ai 2.2.33 → 2.2.35

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -280,78 +280,6 @@ type DataMessage = {
280
280
  data: JSONValue;
281
281
  };
282
282
 
283
- interface StreamPart<CODE extends string, NAME extends string, TYPE> {
284
- code: CODE;
285
- name: NAME;
286
- parse: (value: JSONValue) => {
287
- type: NAME;
288
- value: TYPE;
289
- };
290
- }
291
- declare const textStreamPart: StreamPart<'0', 'text', string>;
292
- declare const functionCallStreamPart: StreamPart<'1', 'function_call', {
293
- function_call: FunctionCall;
294
- }>;
295
- declare const dataStreamPart: StreamPart<'2', 'data', Array<JSONValue>>;
296
- declare const errorStreamPart: StreamPart<'3', 'error', string>;
297
- declare const assistantMessageStreamPart: StreamPart<'4', 'assistant_message', AssistantMessage>;
298
- declare const assistantControlDataStreamPart: StreamPart<'5', 'assistant_control_data', {
299
- threadId: string;
300
- messageId: string;
301
- }>;
302
- declare const dataMessageStreamPart: StreamPart<'6', 'data_message', DataMessage>;
303
- declare const toolCallStreamPart: StreamPart<'7', 'tool_calls', {
304
- tool_calls: ToolCall[];
305
- }>;
306
- declare const messageAnnotationsStreamPart: StreamPart<'8', 'message_annotations', Array<JSONValue>>;
307
- type StreamPartType = ReturnType<typeof textStreamPart.parse> | ReturnType<typeof functionCallStreamPart.parse> | ReturnType<typeof dataStreamPart.parse> | ReturnType<typeof errorStreamPart.parse> | ReturnType<typeof assistantMessageStreamPart.parse> | ReturnType<typeof assistantControlDataStreamPart.parse> | ReturnType<typeof dataMessageStreamPart.parse> | ReturnType<typeof toolCallStreamPart.parse> | ReturnType<typeof messageAnnotationsStreamPart.parse>;
308
- /**
309
- * The map of prefixes for data in the stream
310
- *
311
- * - 0: Text from the LLM response
312
- * - 1: (OpenAI) function_call responses
313
- * - 2: custom JSON added by the user using `Data`
314
- * - 6: (OpenAI) tool_call responses
315
- *
316
- * Example:
317
- * ```
318
- * 0:Vercel
319
- * 0:'s
320
- * 0: AI
321
- * 0: AI
322
- * 0: SDK
323
- * 0: is great
324
- * 0:!
325
- * 2: { "someJson": "value" }
326
- * 1: {"function_call": {"name": "get_current_weather", "arguments": "{\\n\\"location\\": \\"Charlottesville, Virginia\\",\\n\\"format\\": \\"celsius\\"\\n}"}}
327
- * 6: {"tool_call": {"id": "tool_0", "type": "function", "function": {"name": "get_current_weather", "arguments": "{\\n\\"location\\": \\"Charlottesville, Virginia\\",\\n\\"format\\": \\"celsius\\"\\n}"}}}
328
- *```
329
- */
330
- declare const StreamStringPrefixes: {
331
- readonly text: "0";
332
- readonly function_call: "1";
333
- readonly data: "2";
334
- readonly error: "3";
335
- readonly assistant_message: "4";
336
- readonly assistant_control_data: "5";
337
- readonly data_message: "6";
338
- readonly tool_calls: "7";
339
- readonly message_annotations: "8";
340
- };
341
-
342
- declare const nanoid: (size?: number | undefined) => string;
343
- declare function createChunkDecoder(): (chunk: Uint8Array | undefined) => string;
344
- declare function createChunkDecoder(complex: false): (chunk: Uint8Array | undefined) => string;
345
- declare function createChunkDecoder(complex: true): (chunk: Uint8Array | undefined) => StreamPartType[];
346
- declare function createChunkDecoder(complex?: boolean): (chunk: Uint8Array | undefined) => StreamPartType[] | string;
347
-
348
- declare const isStreamStringEqualToType: (type: keyof typeof StreamStringPrefixes, value: string) => value is `0:${string}\n` | `1:${string}\n` | `2:${string}\n` | `3:${string}\n` | `4:${string}\n` | `5:${string}\n` | `6:${string}\n` | `7:${string}\n` | `8:${string}\n`;
349
- type StreamString = `${(typeof StreamStringPrefixes)[keyof typeof StreamStringPrefixes]}:${string}\n`;
350
- /**
351
- * A header sent to the client so it knows how to handle parsing the stream (as a deprecated text response or using the new prefixed protocol)
352
- */
353
- declare const COMPLEX_HEADER = "X-Experimental-Stream-Data";
354
-
355
283
  declare interface AzureChatCompletions {
356
284
  id: string;
357
285
  created: Date;
@@ -697,6 +625,63 @@ declare function AIStream(response: Response, customParser?: AIStreamParser, cal
697
625
  */
698
626
  declare function readableFromAsyncIterable<T>(iterable: AsyncIterable<T>): ReadableStream<T>;
699
627
 
628
+ interface AWSBedrockResponse {
629
+ body?: AsyncIterable<{
630
+ chunk?: {
631
+ bytes?: Uint8Array;
632
+ };
633
+ }>;
634
+ }
635
+ declare function AWSBedrockAnthropicStream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
636
+ declare function AWSBedrockCohereStream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
637
+ declare function AWSBedrockLlama2Stream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
638
+ declare function AWSBedrockStream(response: AWSBedrockResponse, callbacks: AIStreamCallbacksAndOptions | undefined, extractTextDeltaFromChunk: (chunk: any) => string): ReadableStream<any>;
639
+
640
+ /**
641
+ * A stream wrapper to send custom JSON-encoded data back to the client.
642
+ */
643
+ declare class experimental_StreamData {
644
+ private encoder;
645
+ private controller;
646
+ stream: TransformStream<Uint8Array, Uint8Array>;
647
+ private isClosedPromise;
648
+ private isClosedPromiseResolver;
649
+ private isClosed;
650
+ private data;
651
+ private messageAnnotations;
652
+ constructor();
653
+ close(): Promise<void>;
654
+ append(value: JSONValue): void;
655
+ appendMessageAnnotation(value: JSONValue): void;
656
+ }
657
+ /**
658
+ * A TransformStream for LLMs that do not have their own transform stream handlers managing encoding (e.g. OpenAIStream has one for function call handling).
659
+ * This assumes every chunk is a 'text' chunk.
660
+ */
661
+ declare function createStreamDataTransformer(experimental_streamData: boolean | undefined): TransformStream<any, any>;
662
+
663
+ /**
664
+ * A utility class for streaming text responses.
665
+ */
666
+ declare class StreamingTextResponse extends Response {
667
+ constructor(res: ReadableStream, init?: ResponseInit, data?: experimental_StreamData);
668
+ }
669
+ /**
670
+ * A utility function to stream a ReadableStream to a Node.js response-like object.
671
+ */
672
+ declare function streamToResponse(res: ReadableStream, response: ServerResponse, init?: {
673
+ headers?: Record<string, string>;
674
+ status?: number;
675
+ }): void;
676
+
677
+ declare function HuggingFaceStream(res: AsyncGenerator<any>, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
678
+
679
+ interface StreamChunk {
680
+ text?: string;
681
+ eventType: 'stream-start' | 'search-queries-generation' | 'search-results' | 'text-generation' | 'citation-generation' | 'stream-end';
682
+ }
683
+ declare function CohereStream(reader: Response | AsyncIterable<StreamChunk>, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
684
+
700
685
  interface CompletionChunk {
701
686
  /**
702
687
  * The resulting completion up to and excluding the stop sequences.
@@ -771,56 +756,16 @@ interface MessageStopEvent {
771
756
  */
772
757
  declare function AnthropicStream(res: Response | AsyncIterable<CompletionChunk> | AsyncIterable<MessageStreamEvent>, cb?: AIStreamCallbacksAndOptions): ReadableStream;
773
758
 
774
- type AssistantResponseSettings = {
775
- threadId: string;
776
- messageId: string;
759
+ type InkeepOnFinalMetadata = {
760
+ chat_session_id: string;
761
+ records_cited: any;
777
762
  };
778
- type AssistantResponseCallback = (stream: {
779
- threadId: string;
780
- messageId: string;
781
- sendMessage: (message: AssistantMessage) => void;
782
- sendDataMessage: (message: DataMessage) => void;
783
- }) => Promise<void>;
784
- declare function experimental_AssistantResponse({ threadId, messageId }: AssistantResponseSettings, process: AssistantResponseCallback): Response;
785
-
786
- interface AWSBedrockResponse {
787
- body?: AsyncIterable<{
788
- chunk?: {
789
- bytes?: Uint8Array;
790
- };
791
- }>;
792
- }
793
- declare function AWSBedrockAnthropicStream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
794
- declare function AWSBedrockCohereStream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
795
- declare function AWSBedrockLlama2Stream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
796
- declare function AWSBedrockStream(response: AWSBedrockResponse, callbacks: AIStreamCallbacksAndOptions | undefined, extractTextDeltaFromChunk: (chunk: any) => string): ReadableStream<any>;
797
-
798
- declare function CohereStream(reader: Response, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
799
-
800
- interface GenerateContentResponse {
801
- candidates?: GenerateContentCandidate[];
802
- }
803
- interface GenerateContentCandidate {
804
- index: number;
805
- content: Content;
806
- }
807
- interface Content {
808
- role: string;
809
- parts: Part[];
810
- }
811
- type Part = TextPart | InlineDataPart;
812
- interface InlineDataPart {
813
- text?: never;
814
- }
815
- interface TextPart {
816
- text: string;
817
- inlineData?: never;
818
- }
819
- declare function GoogleGenerativeAIStream(response: {
820
- stream: AsyncIterable<GenerateContentResponse>;
821
- }, cb?: AIStreamCallbacksAndOptions): ReadableStream;
822
-
823
- declare function HuggingFaceStream(res: AsyncGenerator<any>, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
763
+ type InkeepChatResultCallbacks = {
764
+ onFinal?: (completion: string, metadata?: InkeepOnFinalMetadata) => Promise<void> | void;
765
+ onRecordsCited?: (records_cited: InkeepOnFinalMetadata['records_cited']) => void;
766
+ };
767
+ type InkeepAIStreamCallbacksAndOptions = AIStreamCallbacksAndOptions & InkeepChatResultCallbacks;
768
+ declare function InkeepStream(res: Response, callbacks?: InkeepAIStreamCallbacksAndOptions): ReadableStream;
824
769
 
825
770
  declare function LangChainStream(callbacks?: AIStreamCallbacksAndOptions): {
826
771
  stream: ReadableStream<any>;
@@ -885,28 +830,112 @@ declare function ReplicateStream(res: Prediction, cb?: AIStreamCallbacksAndOptio
885
830
  headers?: Record<string, string>;
886
831
  }): Promise<ReadableStream>;
887
832
 
833
+ interface StreamPart<CODE extends string, NAME extends string, TYPE> {
834
+ code: CODE;
835
+ name: NAME;
836
+ parse: (value: JSONValue) => {
837
+ type: NAME;
838
+ value: TYPE;
839
+ };
840
+ }
841
+ declare const textStreamPart: StreamPart<'0', 'text', string>;
842
+ declare const functionCallStreamPart: StreamPart<'1', 'function_call', {
843
+ function_call: FunctionCall;
844
+ }>;
845
+ declare const dataStreamPart: StreamPart<'2', 'data', Array<JSONValue>>;
846
+ declare const errorStreamPart: StreamPart<'3', 'error', string>;
847
+ declare const assistantMessageStreamPart: StreamPart<'4', 'assistant_message', AssistantMessage>;
848
+ declare const assistantControlDataStreamPart: StreamPart<'5', 'assistant_control_data', {
849
+ threadId: string;
850
+ messageId: string;
851
+ }>;
852
+ declare const dataMessageStreamPart: StreamPart<'6', 'data_message', DataMessage>;
853
+ declare const toolCallStreamPart: StreamPart<'7', 'tool_calls', {
854
+ tool_calls: ToolCall[];
855
+ }>;
856
+ declare const messageAnnotationsStreamPart: StreamPart<'8', 'message_annotations', Array<JSONValue>>;
857
+ type StreamPartType = ReturnType<typeof textStreamPart.parse> | ReturnType<typeof functionCallStreamPart.parse> | ReturnType<typeof dataStreamPart.parse> | ReturnType<typeof errorStreamPart.parse> | ReturnType<typeof assistantMessageStreamPart.parse> | ReturnType<typeof assistantControlDataStreamPart.parse> | ReturnType<typeof dataMessageStreamPart.parse> | ReturnType<typeof toolCallStreamPart.parse> | ReturnType<typeof messageAnnotationsStreamPart.parse>;
888
858
  /**
889
- * A stream wrapper to send custom JSON-encoded data back to the client.
859
+ * The map of prefixes for data in the stream
860
+ *
861
+ * - 0: Text from the LLM response
862
+ * - 1: (OpenAI) function_call responses
863
+ * - 2: custom JSON added by the user using `Data`
864
+ * - 6: (OpenAI) tool_call responses
865
+ *
866
+ * Example:
867
+ * ```
868
+ * 0:Vercel
869
+ * 0:'s
870
+ * 0: AI
871
+ * 0: AI
872
+ * 0: SDK
873
+ * 0: is great
874
+ * 0:!
875
+ * 2: { "someJson": "value" }
876
+ * 1: {"function_call": {"name": "get_current_weather", "arguments": "{\\n\\"location\\": \\"Charlottesville, Virginia\\",\\n\\"format\\": \\"celsius\\"\\n}"}}
877
+ * 6: {"tool_call": {"id": "tool_0", "type": "function", "function": {"name": "get_current_weather", "arguments": "{\\n\\"location\\": \\"Charlottesville, Virginia\\",\\n\\"format\\": \\"celsius\\"\\n}"}}}
878
+ *```
890
879
  */
891
- declare class experimental_StreamData {
892
- private encoder;
893
- private controller;
894
- stream: TransformStream<Uint8Array, Uint8Array>;
895
- private isClosedPromise;
896
- private isClosedPromiseResolver;
897
- private isClosed;
898
- private data;
899
- private messageAnnotations;
900
- constructor();
901
- close(): Promise<void>;
902
- append(value: JSONValue): void;
903
- appendMessageAnnotation(value: JSONValue): void;
904
- }
880
+ declare const StreamStringPrefixes: {
881
+ readonly text: "0";
882
+ readonly function_call: "1";
883
+ readonly data: "2";
884
+ readonly error: "3";
885
+ readonly assistant_message: "4";
886
+ readonly assistant_control_data: "5";
887
+ readonly data_message: "6";
888
+ readonly tool_calls: "7";
889
+ readonly message_annotations: "8";
890
+ };
891
+
892
+ declare const nanoid: (size?: number | undefined) => string;
893
+ declare function createChunkDecoder(): (chunk: Uint8Array | undefined) => string;
894
+ declare function createChunkDecoder(complex: false): (chunk: Uint8Array | undefined) => string;
895
+ declare function createChunkDecoder(complex: true): (chunk: Uint8Array | undefined) => StreamPartType[];
896
+ declare function createChunkDecoder(complex?: boolean): (chunk: Uint8Array | undefined) => StreamPartType[] | string;
897
+
898
+ declare const isStreamStringEqualToType: (type: keyof typeof StreamStringPrefixes, value: string) => value is `0:${string}\n` | `1:${string}\n` | `2:${string}\n` | `3:${string}\n` | `4:${string}\n` | `5:${string}\n` | `6:${string}\n` | `7:${string}\n` | `8:${string}\n`;
899
+ type StreamString = `${(typeof StreamStringPrefixes)[keyof typeof StreamStringPrefixes]}:${string}\n`;
905
900
  /**
906
- * A TransformStream for LLMs that do not have their own transform stream handlers managing encoding (e.g. OpenAIStream has one for function call handling).
907
- * This assumes every chunk is a 'text' chunk.
901
+ * A header sent to the client so it knows how to handle parsing the stream (as a deprecated text response or using the new prefixed protocol)
908
902
  */
909
- declare function createStreamDataTransformer(experimental_streamData: boolean | undefined): TransformStream<any, any>;
903
+ declare const COMPLEX_HEADER = "X-Experimental-Stream-Data";
904
+
905
+ type AssistantResponseSettings = {
906
+ threadId: string;
907
+ messageId: string;
908
+ };
909
+ type AssistantResponseCallback = (stream: {
910
+ threadId: string;
911
+ messageId: string;
912
+ sendMessage: (message: AssistantMessage) => void;
913
+ sendDataMessage: (message: DataMessage) => void;
914
+ }) => Promise<void>;
915
+ declare function experimental_AssistantResponse({ threadId, messageId }: AssistantResponseSettings, process: AssistantResponseCallback): Response;
916
+
917
+ interface GenerateContentResponse {
918
+ candidates?: GenerateContentCandidate[];
919
+ }
920
+ interface GenerateContentCandidate {
921
+ index: number;
922
+ content: Content;
923
+ }
924
+ interface Content {
925
+ role: string;
926
+ parts: Part[];
927
+ }
928
+ type Part = TextPart | InlineDataPart;
929
+ interface InlineDataPart {
930
+ text?: never;
931
+ }
932
+ interface TextPart {
933
+ text: string;
934
+ inlineData?: never;
935
+ }
936
+ declare function GoogleGenerativeAIStream(response: {
937
+ stream: AsyncIterable<GenerateContentResponse>;
938
+ }, cb?: AIStreamCallbacksAndOptions): ReadableStream;
910
939
 
911
940
  /**
912
941
  * This is a naive implementation of the streaming React response API.
@@ -940,18 +969,4 @@ declare class experimental_StreamingReactResponse {
940
969
  });
941
970
  }
942
971
 
943
- /**
944
- * A utility class for streaming text responses.
945
- */
946
- declare class StreamingTextResponse extends Response {
947
- constructor(res: ReadableStream, init?: ResponseInit, data?: experimental_StreamData);
948
- }
949
- /**
950
- * A utility function to stream a ReadableStream to a Node.js response-like object.
951
- */
952
- declare function streamToResponse(res: ReadableStream, response: ServerResponse, init?: {
953
- headers?: Record<string, string>;
954
- status?: number;
955
- }): void;
956
-
957
- export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantMessage, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, DataMessage, Function, FunctionCall, FunctionCallHandler, FunctionCallPayload, GoogleGenerativeAIStream, HuggingFaceStream, IdGenerator, JSONValue, LangChainStream, Message$1 as Message, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamString, StreamingTextResponse, Tool, ToolCall, ToolCallHandler, ToolCallPayload, ToolChoice, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };
972
+ export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantMessage, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, DataMessage, Function, FunctionCall, FunctionCallHandler, FunctionCallPayload, GoogleGenerativeAIStream, HuggingFaceStream, IdGenerator, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, JSONValue, LangChainStream, Message$1 as Message, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamString, StreamingTextResponse, Tool, ToolCall, ToolCallHandler, ToolCallPayload, ToolChoice, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };