ai 2.2.0 → 2.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -2,94 +2,6 @@ import { ChatCompletionMessage, CompletionCreateParams, CreateChatCompletionRequ
2
2
  import { ServerResponse } from 'node:http';
3
3
  import { Prediction } from 'replicate';
4
4
 
5
- interface FunctionCallPayload {
6
- name: string;
7
- arguments: Record<string, unknown>;
8
- }
9
- /**
10
- * Helper callback methods for AIStream stream lifecycle events
11
- * @interface
12
- */
13
- interface AIStreamCallbacks {
14
- onStart?: () => Promise<void> | void;
15
- onCompletion?: (completion: string) => Promise<void> | void;
16
- onToken?: (token: string) => Promise<void> | void;
17
- }
18
- /**
19
- * Custom parser for AIStream data.
20
- * @interface
21
- */
22
- interface AIStreamParser {
23
- (data: string): string | void;
24
- }
25
- /**
26
- * Creates a TransformStream that parses events from an EventSource stream using a custom parser.
27
- * @param {AIStreamParser} customParser - Function to handle event data.
28
- * @returns {TransformStream<Uint8Array, string>} TransformStream parsing events.
29
- */
30
- declare function createEventStreamTransformer(customParser?: AIStreamParser): TransformStream<Uint8Array, string>;
31
- /**
32
- * Creates a transform stream that encodes input messages and invokes optional callback functions.
33
- * The transform stream uses the provided callbacks to execute custom logic at different stages of the stream's lifecycle.
34
- * - `onStart`: Called once when the stream is initialized.
35
- * - `onToken`: Called for each tokenized message.
36
- * - `onCompletion`: Called once when the stream is flushed, with the aggregated messages.
37
- *
38
- * This function is useful when you want to process a stream of messages and perform specific actions during the stream's lifecycle.
39
- *
40
- * @param {AIStreamCallbacks} [callbacks] - An object containing the callback functions.
41
- * @return {TransformStream<string, Uint8Array>} A transform stream that encodes input messages as Uint8Array and allows the execution of custom logic through callbacks.
42
- *
43
- * @example
44
- * const callbacks = {
45
- * onStart: async () => console.log('Stream started'),
46
- * onToken: async (token) => console.log(`Token: ${token}`),
47
- * onCompletion: async (completion) => console.log(`Completion: ${completion}`)
48
- * };
49
- * const transformer = createCallbacksTransformer(callbacks);
50
- */
51
- declare function createCallbacksTransformer(callbacks: AIStreamCallbacks | undefined): TransformStream<string, Uint8Array>;
52
- /**
53
- * Returns a stateful function that, when invoked, trims leading whitespace
54
- * from the input text. The trimming only occurs on the first invocation, ensuring that
55
- * subsequent calls do not alter the input text. This is particularly useful in scenarios
56
- * where a text stream is being processed and only the initial whitespace should be removed.
57
- *
58
- * @return {function(string): string} A function that takes a string as input and returns a string
59
- * with leading whitespace removed if it is the first invocation; otherwise, it returns the input unchanged.
60
- *
61
- * @example
62
- * const trimStart = trimStartOfStreamHelper();
63
- * const output1 = trimStart(" text"); // "text"
64
- * const output2 = trimStart(" text"); // " text"
65
- *
66
- */
67
- declare function trimStartOfStreamHelper(): (text: string) => string;
68
- /**
69
- * Returns a ReadableStream created from the response, parsed and handled with custom logic.
70
- * The stream goes through two transformation stages, first parsing the events and then
71
- * invoking the provided callbacks.
72
- *
73
- * For 2xx HTTP responses:
74
- * - The function continues with standard stream processing.
75
- *
76
- * For non-2xx HTTP responses:
77
- * - If the response body is defined, it asynchronously extracts and decodes the response body.
78
- * - It then creates a custom ReadableStream to propagate a detailed error message.
79
- *
80
- * @param {Response} response - The response.
81
- * @param {AIStreamParser} customParser - The custom parser function.
82
- * @param {AIStreamCallbacks} callbacks - The callbacks.
83
- * @return {ReadableStream} The AIStream.
84
- * @throws Will throw an error if the response is not OK.
85
- */
86
- declare function AIStream(response: Response, customParser?: AIStreamParser, callbacks?: AIStreamCallbacks): ReadableStream<Uint8Array>;
87
- /**
88
- * Implements ReadableStream.from(asyncIterable), which isn't documented in MDN and isn't implemented in node.
89
- * https://github.com/whatwg/streams/commit/8d7a0bf26eb2cc23e884ddbaac7c1da4b91cf2bc
90
- */
91
- declare function readableFromAsyncIterable<T>(iterable: AsyncIterable<T>): ReadableStream<T>;
92
-
93
5
  /**
94
6
  * Shared types between the API and UI packages.
95
7
  */
@@ -253,11 +165,11 @@ type UseCompletionOptions = {
253
165
  */
254
166
  body?: object;
255
167
  };
256
-
257
168
  type JSONValue = null | string | number | boolean | {
258
169
  [x: string]: JSONValue;
259
170
  } | Array<JSONValue>;
260
- type OpenAIStreamCallbacks = AIStreamCallbacks & {
171
+
172
+ type OpenAIStreamCallbacks = AIStreamCallbacksAndOptions & {
261
173
  /**
262
174
  * @example
263
175
  * ```js
@@ -285,7 +197,7 @@ type OpenAIStreamCallbacks = AIStreamCallbacks & {
285
197
  * })
286
198
  * ```
287
199
  */
288
- experimental_onFunctionCall?: (functionCallPayload: FunctionCallPayload, createFunctionCallMessages: (functionCallResult: JSONValue) => CreateMessage[]) => Promise<Response | undefined | void | string>;
200
+ experimental_onFunctionCall?: (functionCallPayload: FunctionCallPayload, createFunctionCallMessages: (functionCallResult: JSONValue) => CreateMessage[]) => Promise<Response | undefined | void | string | AsyncIterable<ChatCompletionChunk>>;
289
201
  };
290
202
  interface ChatCompletionChunk {
291
203
  id: string;
@@ -329,11 +241,134 @@ interface FunctionCall {
329
241
  }
330
242
  declare function OpenAIStream(res: Response | AsyncIterable<ChatCompletionChunk>, callbacks?: OpenAIStreamCallbacks): ReadableStream;
331
243
 
244
+ interface FunctionCallPayload {
245
+ name: string;
246
+ arguments: Record<string, unknown>;
247
+ }
248
+ /**
249
+ * Configuration options and helper callback methods for AIStream stream lifecycle events.
250
+ * @interface
251
+ */
252
+ interface AIStreamCallbacksAndOptions {
253
+ /** `onStart`: Called once when the stream is initialized. */
254
+ onStart?: () => Promise<void> | void;
255
+ /** `onCompletion`: Called for each tokenized message. */
256
+ onCompletion?: (completion: string) => Promise<void> | void;
257
+ /** `onFinal`: Called once when the stream is closed with the final completion message. */
258
+ onFinal?: (completion: string) => Promise<void> | void;
259
+ /** `onToken`: Called for each tokenized message. */
260
+ onToken?: (token: string) => Promise<void> | void;
261
+ /**
262
+ * A flag for enabling the experimental_StreamData class and the new protocol.
263
+ * @see https://github.com/vercel-labs/ai/pull/425
264
+ *
265
+ * When StreamData is rolled out, this will be removed and the new protocol will be used by default.
266
+ */
267
+ experimental_streamData?: boolean;
268
+ }
269
+ /**
270
+ * Custom parser for AIStream data.
271
+ * @interface
272
+ */
273
+ interface AIStreamParser {
274
+ (data: string): string | void;
275
+ }
276
+ /**
277
+ * Creates a TransformStream that parses events from an EventSource stream using a custom parser.
278
+ * @param {AIStreamParser} customParser - Function to handle event data.
279
+ * @returns {TransformStream<Uint8Array, string>} TransformStream parsing events.
280
+ */
281
+ declare function createEventStreamTransformer(customParser?: AIStreamParser): TransformStream<Uint8Array, string>;
282
+ /**
283
+ * Creates a transform stream that encodes input messages and invokes optional callback functions.
284
+ * The transform stream uses the provided callbacks to execute custom logic at different stages of the stream's lifecycle.
285
+ * - `onStart`: Called once when the stream is initialized.
286
+ * - `onToken`: Called for each tokenized message.
287
+ * - `onCompletion`: Called every time an AIStream completion message is received. This can occur multiple times when using e.g. OpenAI functions
288
+ * - `onFinal`: Called once when the stream is closed with the final completion message.
289
+ *
290
+ * This function is useful when you want to process a stream of messages and perform specific actions during the stream's lifecycle.
291
+ *
292
+ * @param {AIStreamCallbacksAndOptions} [callbacks] - An object containing the callback functions.
293
+ * @return {TransformStream<string, Uint8Array>} A transform stream that encodes input messages as Uint8Array and allows the execution of custom logic through callbacks.
294
+ *
295
+ * @example
296
+ * const callbacks = {
297
+ * onStart: async () => console.log('Stream started'),
298
+ * onToken: async (token) => console.log(`Token: ${token}`),
299
+ * onCompletion: async (completion) => console.log(`Completion: ${completion}`)
300
+ * onFinal: async () => data.close()
301
+ * };
302
+ * const transformer = createCallbacksTransformer(callbacks);
303
+ */
304
+ declare function createCallbacksTransformer(cb: AIStreamCallbacksAndOptions | OpenAIStreamCallbacks | undefined): TransformStream<string, Uint8Array>;
305
+ /**
306
+ * Returns a stateful function that, when invoked, trims leading whitespace
307
+ * from the input text. The trimming only occurs on the first invocation, ensuring that
308
+ * subsequent calls do not alter the input text. This is particularly useful in scenarios
309
+ * where a text stream is being processed and only the initial whitespace should be removed.
310
+ *
311
+ * @return {function(string): string} A function that takes a string as input and returns a string
312
+ * with leading whitespace removed if it is the first invocation; otherwise, it returns the input unchanged.
313
+ *
314
+ * @example
315
+ * const trimStart = trimStartOfStreamHelper();
316
+ * const output1 = trimStart(" text"); // "text"
317
+ * const output2 = trimStart(" text"); // " text"
318
+ *
319
+ */
320
+ declare function trimStartOfStreamHelper(): (text: string) => string;
321
+ /**
322
+ * Returns a ReadableStream created from the response, parsed and handled with custom logic.
323
+ * The stream goes through two transformation stages, first parsing the events and then
324
+ * invoking the provided callbacks.
325
+ *
326
+ * For 2xx HTTP responses:
327
+ * - The function continues with standard stream processing.
328
+ *
329
+ * For non-2xx HTTP responses:
330
+ * - If the response body is defined, it asynchronously extracts and decodes the response body.
331
+ * - It then creates a custom ReadableStream to propagate a detailed error message.
332
+ *
333
+ * @param {Response} response - The response.
334
+ * @param {AIStreamParser} customParser - The custom parser function.
335
+ * @param {AIStreamCallbacksAndOptions} callbacks - The callbacks.
336
+ * @return {ReadableStream} The AIStream.
337
+ * @throws Will throw an error if the response is not OK.
338
+ */
339
+ declare function AIStream(response: Response, customParser?: AIStreamParser, callbacks?: AIStreamCallbacksAndOptions): ReadableStream<Uint8Array>;
340
+ /**
341
+ * Implements ReadableStream.from(asyncIterable), which isn't documented in MDN and isn't implemented in node.
342
+ * https://github.com/whatwg/streams/commit/8d7a0bf26eb2cc23e884ddbaac7c1da4b91cf2bc
343
+ */
344
+ declare function readableFromAsyncIterable<T>(iterable: AsyncIterable<T>): ReadableStream<T>;
345
+
346
+ /**
347
+ * A stream wrapper to send custom JSON-encoded data back to the client.
348
+ */
349
+ declare class experimental_StreamData {
350
+ private encoder;
351
+ private controller;
352
+ stream: TransformStream<Uint8Array, Uint8Array>;
353
+ private isClosedPromise;
354
+ private isClosedPromiseResolver;
355
+ private isClosed;
356
+ private data;
357
+ constructor();
358
+ close(): Promise<void>;
359
+ append(value: JSONValue): void;
360
+ }
361
+ /**
362
+ * A TransformStream for LLMs that do not have their own transform stream handlers managing encoding (e.g. OpenAIStream has one for function call handling).
363
+ * This assumes every chunk is a 'text' chunk.
364
+ */
365
+ declare function createStreamDataTransformer(experimental_streamData: boolean | undefined): TransformStream<any, any>;
366
+
332
367
  /**
333
368
  * A utility class for streaming text responses.
334
369
  */
335
370
  declare class StreamingTextResponse extends Response {
336
- constructor(res: ReadableStream, init?: ResponseInit);
371
+ constructor(res: ReadableStream, init?: ResponseInit, data?: experimental_StreamData);
337
372
  }
338
373
  /**
339
374
  * A utility function to stream a ReadableStream to a Node.js response-like object.
@@ -343,9 +378,9 @@ declare function streamToResponse(res: ReadableStream, response: ServerResponse,
343
378
  status?: number;
344
379
  }): void;
345
380
 
346
- declare function HuggingFaceStream(res: AsyncGenerator<any>, callbacks?: AIStreamCallbacks): ReadableStream;
381
+ declare function HuggingFaceStream(res: AsyncGenerator<any>, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
347
382
 
348
- declare function CohereStream(reader: Response, callbacks?: AIStreamCallbacks): ReadableStream;
383
+ declare function CohereStream(reader: Response, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
349
384
 
350
385
  interface CompletionChunk {
351
386
  /**
@@ -372,10 +407,10 @@ interface CompletionChunk {
372
407
  * or the return value of `await client.completions.create({ stream: true })`
373
408
  * from the `@anthropic-ai/sdk` package.
374
409
  */
375
- declare function AnthropicStream(res: Response | AsyncIterable<CompletionChunk>, cb?: AIStreamCallbacks): ReadableStream;
410
+ declare function AnthropicStream(res: Response | AsyncIterable<CompletionChunk>, cb?: AIStreamCallbacksAndOptions): ReadableStream;
376
411
 
377
- declare function LangChainStream(callbacks?: AIStreamCallbacks): {
378
- stream: ReadableStream<Uint8Array>;
412
+ declare function LangChainStream(callbacks?: AIStreamCallbacksAndOptions): {
413
+ stream: ReadableStream<any>;
379
414
  handlers: {
380
415
  handleLLMNewToken: (token: string) => Promise<void>;
381
416
  handleLLMStart: (_llm: any, _prompts: string[], runId: string) => Promise<void>;
@@ -409,9 +444,49 @@ declare function LangChainStream(callbacks?: AIStreamCallbacks): {
409
444
  * return new StreamingTextResponse(stream)
410
445
  *
411
446
  */
412
- declare function ReplicateStream(res: Prediction, cb?: AIStreamCallbacks): Promise<ReadableStream>;
447
+ declare function ReplicateStream(res: Prediction, cb?: AIStreamCallbacksAndOptions): Promise<ReadableStream>;
413
448
 
414
449
  declare const nanoid: (size?: number | undefined) => string;
415
- declare function createChunkDecoder(): (chunk: Uint8Array | undefined) => string;
450
+ declare function createChunkDecoder(complex?: boolean): (chunk: Uint8Array | undefined) => any;
451
+
452
+ /**
453
+ * The map of prefixes for data in the stream
454
+ *
455
+ * - 0: Text from the LLM response
456
+ * - 1: (OpenAI) function_call responses
457
+ * - 2: custom JSON added by the user using `Data`
458
+ *
459
+ * Example:
460
+ * ```
461
+ * 0:Vercel
462
+ * 0:'s
463
+ * 0: AI
464
+ * 0: AI
465
+ * 0: SDK
466
+ * 0: is great
467
+ * 0:!
468
+ * 2: { "someJson": "value" }
469
+ * 1: {"function_call": {"name": "get_current_weather", "arguments": "{\\n\\"location\\": \\"Charlottesville, Virginia\\",\\n\\"format\\": \\"celsius\\"\\n}"}}
470
+ *```
471
+ */
472
+ declare const StreamStringPrefixes: {
473
+ readonly text: 0;
474
+ readonly function_call: 1;
475
+ readonly data: 2;
476
+ };
477
+ declare const isStreamStringEqualToType: (type: keyof typeof StreamStringPrefixes, value: string) => value is `0:${string}\n` | `1:${string}\n` | `2:${string}\n`;
478
+ /**
479
+ * Prepends a string with a prefix from the `StreamChunkPrefixes`, JSON-ifies it, and appends a new line.
480
+ */
481
+ declare const getStreamString: (type: keyof typeof StreamStringPrefixes, value: JSONValue) => StreamString;
482
+ type StreamString = `${(typeof StreamStringPrefixes)[keyof typeof StreamStringPrefixes]}:${string}\n`;
483
+ declare const getStreamStringTypeAndValue: (line: string) => {
484
+ type: keyof typeof StreamStringPrefixes;
485
+ value: string;
486
+ };
487
+ /**
488
+ * A header sent to the client so it knows how to handle parsing the stream (as a deprecated text response or using the new prefixed protocol)
489
+ */
490
+ declare const COMPLEX_HEADER = "X-Experimental-Stream-Data";
416
491
 
417
- export { AIStream, AIStreamCallbacks, AIStreamParser, AnthropicStream, ChatRequest, ChatRequestOptions, CohereStream, CreateMessage, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, ReplicateStream, RequestOptions, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };
492
+ export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AnthropicStream, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CreateMessage, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, JSONValue, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, ReplicateStream, RequestOptions, StreamString, StreamStringPrefixes, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_StreamData, getStreamString, getStreamStringTypeAndValue, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };