ai 5.0.0-canary.22 → 5.0.0-canary.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/dist/index.d.mts +688 -293
- package/dist/index.d.ts +688 -293
- package/dist/index.js +997 -513
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +949 -463
- package/dist/index.mjs.map +1 -1
- package/package.json +5 -5
package/dist/index.d.mts
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
import { ToolResultContent, Schema, ToolCall, ToolResult,
|
1
|
+
import { ToolResultContent, Schema, ToolCall, ToolResult, FetchFunction, IdGenerator } from '@ai-sdk/provider-utils';
|
2
2
|
export { IdGenerator, Schema, ToolCall, ToolResult, asSchema, createIdGenerator, generateId, jsonSchema } from '@ai-sdk/provider-utils';
|
3
3
|
import { AISDKError, SharedV2ProviderMetadata, SharedV2ProviderOptions, EmbeddingModelV2, EmbeddingModelV2Embedding, ImageModelV2, ImageModelV2CallWarning, ImageModelV2ProviderMetadata, JSONValue as JSONValue$1, LanguageModelV2, LanguageModelV2FinishReason, LanguageModelV2CallWarning, LanguageModelV2Source, SpeechModelV1, SpeechModelV1CallWarning, TranscriptionModelV1, TranscriptionModelV1CallWarning, LanguageModelV2Usage, JSONObject, LanguageModelV2ToolCall, JSONSchema7, LanguageModelV2CallOptions, JSONParseError, TypeValidationError, LanguageModelV2Middleware, ProviderV2, NoSuchModelError } from '@ai-sdk/provider';
|
4
4
|
export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, InvalidResponseDataError, JSONParseError, JSONSchema7, LoadAPIKeyError, NoContentGeneratedError, NoSuchModelError, TypeValidationError, UnsupportedFunctionalityError } from '@ai-sdk/provider';
|
@@ -2287,273 +2287,6 @@ declare function appendClientMessage({ messages, message, }: {
|
|
2287
2287
|
message: UIMessage;
|
2288
2288
|
}): UIMessage<unknown>[];
|
2289
2289
|
|
2290
|
-
type ChatRequestOptions = {
|
2291
|
-
/**
|
2292
|
-
Additional headers that should be to be passed to the API endpoint.
|
2293
|
-
*/
|
2294
|
-
headers?: Record<string, string> | Headers;
|
2295
|
-
/**
|
2296
|
-
Additional body JSON properties that should be sent to the API endpoint.
|
2297
|
-
*/
|
2298
|
-
body?: object;
|
2299
|
-
};
|
2300
|
-
type UseChatOptions<MESSAGE_METADATA = unknown> = {
|
2301
|
-
/**
|
2302
|
-
* Schema for the message metadata. Validates the message metadata.
|
2303
|
-
* Message metadata can be undefined or must match the schema.
|
2304
|
-
*/
|
2305
|
-
messageMetadataSchema?: Schema<MESSAGE_METADATA>;
|
2306
|
-
/**
|
2307
|
-
* The API endpoint that accepts a `{ messages: Message[] }` object and returns
|
2308
|
-
* a stream of tokens of the AI chat response. Defaults to `/api/chat`.
|
2309
|
-
*/
|
2310
|
-
api?: string;
|
2311
|
-
/**
|
2312
|
-
* A unique identifier for the chat. If not provided, a random one will be
|
2313
|
-
* generated. When provided, the `useChat` hook with the same `id` will
|
2314
|
-
* have shared states across components.
|
2315
|
-
*/
|
2316
|
-
id?: string;
|
2317
|
-
/**
|
2318
|
-
* Initial messages of the chat. Useful to load an existing chat history.
|
2319
|
-
*/
|
2320
|
-
initialMessages?: UIMessage<NoInfer<MESSAGE_METADATA>>[];
|
2321
|
-
/**
|
2322
|
-
* Initial input of the chat.
|
2323
|
-
*/
|
2324
|
-
initialInput?: string;
|
2325
|
-
/**
|
2326
|
-
Optional callback function that is invoked when a tool call is received.
|
2327
|
-
Intended for automatic client-side tool execution.
|
2328
|
-
|
2329
|
-
You can optionally return a result for the tool call,
|
2330
|
-
either synchronously or asynchronously.
|
2331
|
-
*/
|
2332
|
-
onToolCall?: ({ toolCall, }: {
|
2333
|
-
toolCall: ToolCall<string, unknown>;
|
2334
|
-
}) => void | Promise<unknown> | unknown;
|
2335
|
-
/**
|
2336
|
-
* Optional callback function that is called when the assistant message is finished streaming.
|
2337
|
-
*
|
2338
|
-
* @param message The message that was streamed.
|
2339
|
-
*/
|
2340
|
-
onFinish?: (options: {
|
2341
|
-
message: UIMessage<NoInfer<MESSAGE_METADATA>>;
|
2342
|
-
}) => void;
|
2343
|
-
/**
|
2344
|
-
* Callback function to be called when an error is encountered.
|
2345
|
-
*/
|
2346
|
-
onError?: (error: Error) => void;
|
2347
|
-
/**
|
2348
|
-
* A way to provide a function that is going to be used for ids for messages and the chat.
|
2349
|
-
* If not provided the default AI SDK `generateId` is used.
|
2350
|
-
*/
|
2351
|
-
generateId?: IdGenerator;
|
2352
|
-
/**
|
2353
|
-
* The credentials mode to be used for the fetch request.
|
2354
|
-
* Possible values are: 'omit', 'same-origin', 'include'.
|
2355
|
-
* Defaults to 'same-origin'.
|
2356
|
-
*/
|
2357
|
-
credentials?: RequestCredentials;
|
2358
|
-
/**
|
2359
|
-
* HTTP headers to be sent with the API request.
|
2360
|
-
*/
|
2361
|
-
headers?: Record<string, string> | Headers;
|
2362
|
-
/**
|
2363
|
-
* Extra body object to be sent with the API request.
|
2364
|
-
* @example
|
2365
|
-
* Send a `sessionId` to the API along with the messages.
|
2366
|
-
* ```js
|
2367
|
-
* useChat({
|
2368
|
-
* body: {
|
2369
|
-
* sessionId: '123',
|
2370
|
-
* }
|
2371
|
-
* })
|
2372
|
-
* ```
|
2373
|
-
*/
|
2374
|
-
body?: object;
|
2375
|
-
/**
|
2376
|
-
Streaming protocol that is used. Defaults to `ui-message`.
|
2377
|
-
*/
|
2378
|
-
streamProtocol?: 'ui-message' | 'text';
|
2379
|
-
/**
|
2380
|
-
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
2381
|
-
or to provide a custom fetch implementation for e.g. testing.
|
2382
|
-
*/
|
2383
|
-
fetch?: FetchFunction;
|
2384
|
-
/**
|
2385
|
-
Maximum number of sequential LLM calls (steps), e.g. when you use tool calls.
|
2386
|
-
Must be at least 1.
|
2387
|
-
|
2388
|
-
A maximum number is required to prevent infinite loops in the case of misconfigured tools.
|
2389
|
-
|
2390
|
-
By default, it's set to 1, which means that only a single LLM call is made.
|
2391
|
-
*/
|
2392
|
-
maxSteps?: number;
|
2393
|
-
};
|
2394
|
-
|
2395
|
-
declare const getOriginalFetch$1: () => typeof fetch;
|
2396
|
-
declare function callChatApi<MESSAGE_METADATA>({ api, body, streamProtocol, credentials, headers, abortController, onUpdate, onFinish, onToolCall, generateId, fetch, lastMessage, requestType, messageMetadataSchema, }: {
|
2397
|
-
api: string;
|
2398
|
-
body: Record<string, any>;
|
2399
|
-
streamProtocol: 'ui-message' | 'text' | undefined;
|
2400
|
-
credentials: RequestCredentials | undefined;
|
2401
|
-
headers: HeadersInit | undefined;
|
2402
|
-
abortController: (() => AbortController | null) | undefined;
|
2403
|
-
onUpdate: (options: {
|
2404
|
-
message: UIMessage<MESSAGE_METADATA>;
|
2405
|
-
}) => void;
|
2406
|
-
onFinish: UseChatOptions<MESSAGE_METADATA>['onFinish'];
|
2407
|
-
onToolCall: UseChatOptions<MESSAGE_METADATA>['onToolCall'];
|
2408
|
-
generateId: IdGenerator;
|
2409
|
-
fetch: ReturnType<typeof getOriginalFetch$1> | undefined;
|
2410
|
-
lastMessage: UIMessage<MESSAGE_METADATA> | undefined;
|
2411
|
-
requestType?: 'generate' | 'resume';
|
2412
|
-
messageMetadataSchema?: Schema<MESSAGE_METADATA>;
|
2413
|
-
}): Promise<void>;
|
2414
|
-
|
2415
|
-
declare const getOriginalFetch: () => typeof fetch;
|
2416
|
-
declare function callCompletionApi({ api, prompt, credentials, headers, body, streamProtocol, setCompletion, setLoading, setError, setAbortController, onFinish, onError, fetch, }: {
|
2417
|
-
api: string;
|
2418
|
-
prompt: string;
|
2419
|
-
credentials: RequestCredentials | undefined;
|
2420
|
-
headers: HeadersInit | undefined;
|
2421
|
-
body: Record<string, any>;
|
2422
|
-
streamProtocol: 'data' | 'text' | undefined;
|
2423
|
-
setCompletion: (completion: string) => void;
|
2424
|
-
setLoading: (loading: boolean) => void;
|
2425
|
-
setError: (error: Error | undefined) => void;
|
2426
|
-
setAbortController: (abortController: AbortController | null) => void;
|
2427
|
-
onFinish: ((prompt: string, completion: string) => void) | undefined;
|
2428
|
-
onError: ((error: Error) => void) | undefined;
|
2429
|
-
fetch: ReturnType<typeof getOriginalFetch> | undefined;
|
2430
|
-
}): Promise<string | null | undefined>;
|
2431
|
-
|
2432
|
-
declare function convertFileListToFileUIParts(files: FileList | undefined): Promise<Array<FileUIPart>>;
|
2433
|
-
|
2434
|
-
/**
|
2435
|
-
Converts an array of messages from useChat into an array of CoreMessages that can be used
|
2436
|
-
with the AI core functions (e.g. `streamText`).
|
2437
|
-
*/
|
2438
|
-
declare function convertToModelMessages<TOOLS extends ToolSet = never>(messages: Array<Omit<UIMessage, 'id'>>, options?: {
|
2439
|
-
tools?: TOOLS;
|
2440
|
-
}): ModelMessage[];
|
2441
|
-
/**
|
2442
|
-
@deprecated Use `convertToModelMessages` instead.
|
2443
|
-
*/
|
2444
|
-
declare const convertToCoreMessages: typeof convertToModelMessages;
|
2445
|
-
|
2446
|
-
declare function extractMaxToolInvocationStep(toolInvocations: ToolInvocation[] | undefined): number | undefined;
|
2447
|
-
|
2448
|
-
declare function getToolInvocations(message: UIMessage): ToolInvocation[];
|
2449
|
-
|
2450
|
-
declare function processTextStream({ stream, onTextPart, }: {
|
2451
|
-
stream: ReadableStream<Uint8Array>;
|
2452
|
-
onTextPart: (chunk: string) => Promise<void> | void;
|
2453
|
-
}): Promise<void>;
|
2454
|
-
|
2455
|
-
declare function shouldResubmitMessages({ originalMaxToolInvocationStep, originalMessageCount, maxSteps, messages, }: {
|
2456
|
-
originalMaxToolInvocationStep: number | undefined;
|
2457
|
-
originalMessageCount: number;
|
2458
|
-
maxSteps: number;
|
2459
|
-
messages: UIMessage[];
|
2460
|
-
}): boolean;
|
2461
|
-
/**
|
2462
|
-
Check if the message is an assistant message with completed tool calls.
|
2463
|
-
The last step of the message must have at least one tool invocation and
|
2464
|
-
all tool invocations must have a result.
|
2465
|
-
*/
|
2466
|
-
declare function isAssistantMessageWithCompletedToolCalls(message: UIMessage): message is UIMessage & {
|
2467
|
-
role: 'assistant';
|
2468
|
-
};
|
2469
|
-
|
2470
|
-
/**
|
2471
|
-
* Updates the result of a specific tool invocation in the last message of the given messages array.
|
2472
|
-
*
|
2473
|
-
* @param {object} params - The parameters object.
|
2474
|
-
* @param {UIMessage[]} params.messages - An array of messages, from which the last one is updated.
|
2475
|
-
* @param {string} params.toolCallId - The unique identifier for the tool invocation to update.
|
2476
|
-
* @param {unknown} params.toolResult - The result object to attach to the tool invocation.
|
2477
|
-
* @returns {void} This function does not return anything.
|
2478
|
-
*/
|
2479
|
-
declare function updateToolCallResult({ messages, toolCallId, toolResult: result, }: {
|
2480
|
-
messages: UIMessage[];
|
2481
|
-
toolCallId: string;
|
2482
|
-
toolResult: unknown;
|
2483
|
-
}): void;
|
2484
|
-
|
2485
|
-
type CompletionRequestOptions = {
|
2486
|
-
/**
|
2487
|
-
An optional object of headers to be passed to the API endpoint.
|
2488
|
-
*/
|
2489
|
-
headers?: Record<string, string> | Headers;
|
2490
|
-
/**
|
2491
|
-
An optional object to be passed to the API endpoint.
|
2492
|
-
*/
|
2493
|
-
body?: object;
|
2494
|
-
};
|
2495
|
-
type UseCompletionOptions = {
|
2496
|
-
/**
|
2497
|
-
* The API endpoint that accepts a `{ prompt: string }` object and returns
|
2498
|
-
* a stream of tokens of the AI completion response. Defaults to `/api/completion`.
|
2499
|
-
*/
|
2500
|
-
api?: string;
|
2501
|
-
/**
|
2502
|
-
* An unique identifier for the chat. If not provided, a random one will be
|
2503
|
-
* generated. When provided, the `useChat` hook with the same `id` will
|
2504
|
-
* have shared states across components.
|
2505
|
-
*/
|
2506
|
-
id?: string;
|
2507
|
-
/**
|
2508
|
-
* Initial prompt input of the completion.
|
2509
|
-
*/
|
2510
|
-
initialInput?: string;
|
2511
|
-
/**
|
2512
|
-
* Initial completion result. Useful to load an existing history.
|
2513
|
-
*/
|
2514
|
-
initialCompletion?: string;
|
2515
|
-
/**
|
2516
|
-
* Callback function to be called when the completion is finished streaming.
|
2517
|
-
*/
|
2518
|
-
onFinish?: (prompt: string, completion: string) => void;
|
2519
|
-
/**
|
2520
|
-
* Callback function to be called when an error is encountered.
|
2521
|
-
*/
|
2522
|
-
onError?: (error: Error) => void;
|
2523
|
-
/**
|
2524
|
-
* The credentials mode to be used for the fetch request.
|
2525
|
-
* Possible values are: 'omit', 'same-origin', 'include'.
|
2526
|
-
* Defaults to 'same-origin'.
|
2527
|
-
*/
|
2528
|
-
credentials?: RequestCredentials;
|
2529
|
-
/**
|
2530
|
-
* HTTP headers to be sent with the API request.
|
2531
|
-
*/
|
2532
|
-
headers?: Record<string, string> | Headers;
|
2533
|
-
/**
|
2534
|
-
* Extra body object to be sent with the API request.
|
2535
|
-
* @example
|
2536
|
-
* Send a `sessionId` to the API along with the prompt.
|
2537
|
-
* ```js
|
2538
|
-
* useChat({
|
2539
|
-
* body: {
|
2540
|
-
* sessionId: '123',
|
2541
|
-
* }
|
2542
|
-
* })
|
2543
|
-
* ```
|
2544
|
-
*/
|
2545
|
-
body?: object;
|
2546
|
-
/**
|
2547
|
-
Streaming protocol that is used. Defaults to `data`.
|
2548
|
-
*/
|
2549
|
-
streamProtocol?: 'data' | 'text';
|
2550
|
-
/**
|
2551
|
-
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
2552
|
-
or to provide a custom fetch implementation for e.g. testing.
|
2553
|
-
*/
|
2554
|
-
fetch?: FetchFunction;
|
2555
|
-
};
|
2556
|
-
|
2557
2290
|
declare const uiMessageStreamPartSchema: z.ZodDiscriminatedUnion<"type", [z.ZodObject<{
|
2558
2291
|
type: z.ZodLiteral<"text">;
|
2559
2292
|
value: z.ZodString;
|
@@ -2882,40 +2615,702 @@ declare const uiMessageStreamPartSchema: z.ZodDiscriminatedUnion<"type", [z.ZodO
|
|
2882
2615
|
}>]>;
|
2883
2616
|
type UIMessageStreamPart = z.infer<typeof uiMessageStreamPartSchema>;
|
2884
2617
|
|
2885
|
-
|
2618
|
+
type Job = () => Promise<void>;
|
2619
|
+
|
2620
|
+
declare class SerialJobExecutor {
|
2621
|
+
private queue;
|
2622
|
+
private isProcessing;
|
2623
|
+
private processQueue;
|
2624
|
+
run(job: Job): Promise<void>;
|
2625
|
+
}
|
2626
|
+
|
2627
|
+
interface UIMessageStreamWriter {
|
2628
|
+
/**
|
2629
|
+
* Appends a data stream part to the stream.
|
2630
|
+
*/
|
2631
|
+
write(part: UIMessageStreamPart): void;
|
2632
|
+
/**
|
2633
|
+
* Merges the contents of another stream to this stream.
|
2634
|
+
*/
|
2635
|
+
merge(stream: ReadableStream<UIMessageStreamPart>): void;
|
2636
|
+
/**
|
2637
|
+
* Error handler that is used by the data stream writer.
|
2638
|
+
* This is intended for forwarding when merging streams
|
2639
|
+
* to prevent duplicated error masking.
|
2640
|
+
*/
|
2641
|
+
onError: ((error: unknown) => string) | undefined;
|
2642
|
+
}
|
2643
|
+
|
2644
|
+
declare function createUIMessageStream({ execute, onError, }: {
|
2645
|
+
execute: (writer: UIMessageStreamWriter) => Promise<void> | void;
|
2646
|
+
onError?: (error: unknown) => string;
|
2647
|
+
}): ReadableStream<UIMessageStreamPart>;
|
2648
|
+
|
2649
|
+
declare function createUIMessageStreamResponse({ status, statusText, headers, stream, }: ResponseInit & {
|
2650
|
+
stream: ReadableStream<UIMessageStreamPart>;
|
2651
|
+
}): Response;
|
2652
|
+
|
2653
|
+
declare function pipeUIMessageStreamToResponse({ response, status, statusText, headers, stream, }: {
|
2654
|
+
response: ServerResponse;
|
2655
|
+
stream: ReadableStream<UIMessageStreamPart>;
|
2656
|
+
} & ResponseInit): void;
|
2657
|
+
|
2658
|
+
declare class JsonToSseTransformStream extends TransformStream<unknown, string> {
|
2659
|
+
constructor();
|
2660
|
+
}
|
2661
|
+
|
2662
|
+
interface ChatTransport<MESSAGE_METADATA> {
|
2663
|
+
submitMessages: (options: {
|
2664
|
+
chatId: string;
|
2665
|
+
messages: UIMessage<MESSAGE_METADATA>[];
|
2666
|
+
abortController: AbortController;
|
2667
|
+
body?: object;
|
2668
|
+
headers?: Record<string, string> | Headers;
|
2669
|
+
requestType: 'generate' | 'resume';
|
2670
|
+
}) => Promise<ReadableStream<UIMessageStreamPart>>;
|
2671
|
+
}
|
2672
|
+
declare class DefaultChatTransport<MESSAGE_METADATA> implements ChatTransport<MESSAGE_METADATA> {
|
2673
|
+
private api;
|
2674
|
+
private credentials?;
|
2675
|
+
private headers?;
|
2676
|
+
private body?;
|
2677
|
+
private streamProtocol?;
|
2678
|
+
private fetch?;
|
2679
|
+
private prepareRequestBody?;
|
2680
|
+
constructor({ api, credentials, headers, body, streamProtocol, fetch, prepareRequestBody, }: {
|
2681
|
+
api: string;
|
2682
|
+
/**
|
2683
|
+
* The credentials mode to be used for the fetch request.
|
2684
|
+
* Possible values are: 'omit', 'same-origin', 'include'.
|
2685
|
+
* Defaults to 'same-origin'.
|
2686
|
+
*/
|
2687
|
+
credentials?: RequestCredentials;
|
2688
|
+
/**
|
2689
|
+
* HTTP headers to be sent with the API request.
|
2690
|
+
*/
|
2691
|
+
headers?: Record<string, string> | Headers;
|
2692
|
+
/**
|
2693
|
+
* Extra body object to be sent with the API request.
|
2694
|
+
* @example
|
2695
|
+
* Send a `sessionId` to the API along with the messages.
|
2696
|
+
* ```js
|
2697
|
+
* useChat({
|
2698
|
+
* body: {
|
2699
|
+
* sessionId: '123',
|
2700
|
+
* }
|
2701
|
+
* })
|
2702
|
+
* ```
|
2703
|
+
*/
|
2704
|
+
body?: object;
|
2705
|
+
/**
|
2706
|
+
Streaming protocol that is used. Defaults to `ui-message`.
|
2707
|
+
*/
|
2708
|
+
streamProtocol?: 'ui-message' | 'text';
|
2709
|
+
/**
|
2710
|
+
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
2711
|
+
or to provide a custom fetch implementation for e.g. testing.
|
2712
|
+
*/
|
2713
|
+
fetch?: FetchFunction;
|
2714
|
+
/**
|
2715
|
+
* When a function is provided, it will be used
|
2716
|
+
* to prepare the request body for the chat API. This can be useful for
|
2717
|
+
* customizing the request body based on the messages and data in the chat.
|
2718
|
+
*
|
2719
|
+
* @param id The id of the chat.
|
2720
|
+
* @param messages The current messages in the chat.
|
2721
|
+
* @param requestBody The request body object passed in the chat request.
|
2722
|
+
*/
|
2723
|
+
prepareRequestBody?: (options: {
|
2724
|
+
id: string;
|
2725
|
+
messages: UIMessage<MESSAGE_METADATA>[];
|
2726
|
+
requestBody?: object;
|
2727
|
+
}) => unknown;
|
2728
|
+
});
|
2729
|
+
submitMessages({ chatId, messages, abortController, body, headers, requestType, }: Parameters<ChatTransport<MESSAGE_METADATA>['submitMessages']>[0]): Promise<ReadableStream<{
|
2730
|
+
value: string;
|
2731
|
+
type: "text";
|
2732
|
+
} | {
|
2733
|
+
value: string;
|
2734
|
+
type: "error";
|
2735
|
+
} | {
|
2736
|
+
value: {
|
2737
|
+
toolName: string;
|
2738
|
+
toolCallId: string;
|
2739
|
+
args?: unknown;
|
2740
|
+
};
|
2741
|
+
type: "tool-call";
|
2742
|
+
} | {
|
2743
|
+
value: {
|
2744
|
+
toolCallId: string;
|
2745
|
+
result?: unknown;
|
2746
|
+
providerMetadata?: any;
|
2747
|
+
};
|
2748
|
+
type: "tool-result";
|
2749
|
+
} | {
|
2750
|
+
value: {
|
2751
|
+
toolName: string;
|
2752
|
+
toolCallId: string;
|
2753
|
+
};
|
2754
|
+
type: "tool-call-streaming-start";
|
2755
|
+
} | {
|
2756
|
+
value: {
|
2757
|
+
toolCallId: string;
|
2758
|
+
argsTextDelta: string;
|
2759
|
+
};
|
2760
|
+
type: "tool-call-delta";
|
2761
|
+
} | {
|
2762
|
+
value: {
|
2763
|
+
text: string;
|
2764
|
+
providerMetadata?: Record<string, any> | undefined;
|
2765
|
+
};
|
2766
|
+
type: "reasoning";
|
2767
|
+
} | {
|
2768
|
+
value: {
|
2769
|
+
type: "source";
|
2770
|
+
id: string;
|
2771
|
+
url: string;
|
2772
|
+
sourceType: "url";
|
2773
|
+
providerMetadata?: any;
|
2774
|
+
title?: string | undefined;
|
2775
|
+
};
|
2776
|
+
type: "source";
|
2777
|
+
} | {
|
2778
|
+
value: {
|
2779
|
+
mediaType: string;
|
2780
|
+
url: string;
|
2781
|
+
};
|
2782
|
+
type: "file";
|
2783
|
+
} | {
|
2784
|
+
value: {
|
2785
|
+
metadata?: unknown;
|
2786
|
+
};
|
2787
|
+
type: "metadata";
|
2788
|
+
} | {
|
2789
|
+
value: {
|
2790
|
+
metadata?: unknown;
|
2791
|
+
};
|
2792
|
+
type: "start-step";
|
2793
|
+
} | {
|
2794
|
+
value: {
|
2795
|
+
metadata?: unknown;
|
2796
|
+
};
|
2797
|
+
type: "finish-step";
|
2798
|
+
} | {
|
2799
|
+
value: {
|
2800
|
+
metadata?: unknown;
|
2801
|
+
messageId?: string | undefined;
|
2802
|
+
};
|
2803
|
+
type: "start";
|
2804
|
+
} | {
|
2805
|
+
value: {
|
2806
|
+
metadata?: unknown;
|
2807
|
+
};
|
2808
|
+
type: "finish";
|
2809
|
+
} | {
|
2810
|
+
value: null;
|
2811
|
+
type: "reasoning-part-finish";
|
2812
|
+
}>>;
|
2813
|
+
}
|
2814
|
+
|
2815
|
+
type StreamingUIMessageState<MESSAGE_METADATA = unknown> = {
|
2816
|
+
message: UIMessage<MESSAGE_METADATA>;
|
2817
|
+
activeTextPart: TextUIPart | undefined;
|
2818
|
+
activeReasoningPart: ReasoningUIPart | undefined;
|
2819
|
+
partialToolCalls: Record<string, {
|
2820
|
+
text: string;
|
2821
|
+
step: number;
|
2822
|
+
index: number;
|
2823
|
+
toolName: string;
|
2824
|
+
}>;
|
2825
|
+
step: number;
|
2826
|
+
};
|
2827
|
+
|
2828
|
+
interface ChatStoreSubscriber {
|
2829
|
+
onChatChanged: (event: ChatStoreEvent) => void;
|
2830
|
+
}
|
2831
|
+
interface ChatStoreEvent {
|
2832
|
+
type: 'chat-messages-changed' | 'chat-status-changed';
|
2833
|
+
chatId: number | string;
|
2834
|
+
error?: Error;
|
2835
|
+
}
|
2836
|
+
type ChatStatus = 'submitted' | 'streaming' | 'ready' | 'error';
|
2837
|
+
interface Chat<MESSAGE_METADATA> {
|
2838
|
+
status: ChatStatus;
|
2839
|
+
messages: UIMessage<MESSAGE_METADATA>[];
|
2840
|
+
error?: Error;
|
2841
|
+
activeResponse?: {
|
2842
|
+
state: StreamingUIMessageState<MESSAGE_METADATA>;
|
2843
|
+
abortController?: AbortController;
|
2844
|
+
};
|
2845
|
+
jobExecutor: SerialJobExecutor;
|
2846
|
+
}
|
2847
|
+
type ExtendedCallOptions<MESSAGE_METADATA> = ChatRequestOptions & {
|
2848
|
+
onError?: (error: Error) => void;
|
2849
|
+
/**
|
2850
|
+
Optional callback function that is invoked when a tool call is received.
|
2851
|
+
Intended for automatic client-side tool execution.
|
2852
|
+
|
2853
|
+
You can optionally return a result for the tool call,
|
2854
|
+
either synchronously or asynchronously.
|
2855
|
+
*/
|
2856
|
+
onToolCall?: ({ toolCall, }: {
|
2857
|
+
toolCall: ToolCall<string, unknown>;
|
2858
|
+
}) => void | Promise<unknown> | unknown;
|
2859
|
+
/**
|
2860
|
+
* Optional callback function that is called when the assistant message is finished streaming.
|
2861
|
+
*
|
2862
|
+
* @param message The message that was streamed.
|
2863
|
+
*/
|
2864
|
+
onFinish?: (options: {
|
2865
|
+
message: UIMessage<NoInfer<MESSAGE_METADATA>>;
|
2866
|
+
}) => void;
|
2867
|
+
};
|
2868
|
+
declare class ChatStore<MESSAGE_METADATA> {
|
2869
|
+
private chats;
|
2870
|
+
private subscribers;
|
2871
|
+
private generateId;
|
2872
|
+
private messageMetadataSchema;
|
2873
|
+
private transport;
|
2874
|
+
private maxSteps;
|
2875
|
+
constructor({ chats, generateId, messageMetadataSchema, transport, maxSteps, }: {
|
2876
|
+
chats?: {
|
2877
|
+
[id: string]: {
|
2878
|
+
messages: UIMessage<MESSAGE_METADATA>[];
|
2879
|
+
};
|
2880
|
+
};
|
2881
|
+
generateId?: UseChatOptions['generateId'];
|
2882
|
+
messageMetadataSchema?: Schema<MESSAGE_METADATA>;
|
2883
|
+
transport: ChatTransport<MESSAGE_METADATA>;
|
2884
|
+
maxSteps?: number;
|
2885
|
+
});
|
2886
|
+
hasChat(id: string): boolean;
|
2887
|
+
addChat(id: string, messages: UIMessage<MESSAGE_METADATA>[]): void;
|
2888
|
+
getChats(): [string, Chat<MESSAGE_METADATA>][];
|
2889
|
+
get chatCount(): number;
|
2890
|
+
getStatus(id: string): ChatStatus;
|
2891
|
+
setStatus({ id, status, error, }: {
|
2892
|
+
id: string;
|
2893
|
+
status: Chat<MESSAGE_METADATA>['status'];
|
2894
|
+
error?: Error;
|
2895
|
+
}): void;
|
2896
|
+
getError(id: string): Error | undefined;
|
2897
|
+
getMessages(id: string): UIMessage<MESSAGE_METADATA>[];
|
2898
|
+
getLastMessage(id: string): UIMessage<MESSAGE_METADATA>;
|
2899
|
+
subscribe(subscriber: ChatStoreSubscriber): () => void;
|
2900
|
+
setMessages({ id, messages, }: {
|
2901
|
+
id: string;
|
2902
|
+
messages: UIMessage<MESSAGE_METADATA>[];
|
2903
|
+
}): void;
|
2904
|
+
appendMessage({ id, message, }: {
|
2905
|
+
id: string;
|
2906
|
+
message: UIMessage<MESSAGE_METADATA>;
|
2907
|
+
}): void;
|
2908
|
+
removeAssistantResponse(id: string): void;
|
2909
|
+
submitMessage({ chatId, message, headers, body, onError, onToolCall, onFinish, }: ExtendedCallOptions<MESSAGE_METADATA> & {
|
2910
|
+
chatId: string;
|
2911
|
+
message: CreateUIMessage<MESSAGE_METADATA>;
|
2912
|
+
}): Promise<void>;
|
2913
|
+
resubmitLastUserMessage({ chatId, headers, body, onError, onToolCall, onFinish, }: ExtendedCallOptions<MESSAGE_METADATA> & {
|
2914
|
+
chatId: string;
|
2915
|
+
}): Promise<null | undefined>;
|
2916
|
+
resumeStream({ chatId, headers, body, onError, onToolCall, onFinish, }: ExtendedCallOptions<MESSAGE_METADATA> & {
|
2917
|
+
chatId: string;
|
2918
|
+
}): Promise<null | undefined>;
|
2919
|
+
addToolResult({ chatId, toolCallId, result, }: {
|
2920
|
+
chatId: string;
|
2921
|
+
toolCallId: string;
|
2922
|
+
result: unknown;
|
2923
|
+
}): Promise<void>;
|
2924
|
+
stopStream({ chatId }: {
|
2925
|
+
chatId: string;
|
2926
|
+
}): Promise<void>;
|
2927
|
+
private emit;
|
2928
|
+
private getChat;
|
2929
|
+
private triggerRequest;
|
2930
|
+
}
|
2931
|
+
|
2932
|
+
type ChatRequestOptions = {
|
2933
|
+
/**
|
2934
|
+
Additional headers that should be to be passed to the API endpoint.
|
2935
|
+
*/
|
2936
|
+
headers?: Record<string, string> | Headers;
|
2937
|
+
/**
|
2938
|
+
Additional body JSON properties that should be sent to the API endpoint.
|
2939
|
+
*/
|
2940
|
+
body?: object;
|
2941
|
+
};
|
2942
|
+
type UseChatOptions<MESSAGE_METADATA = unknown> = {
|
2943
|
+
/**
|
2944
|
+
* A unique identifier for the chat. If not provided, a random one will be
|
2945
|
+
* generated. When provided, the `useChat` hook with the same `id` will
|
2946
|
+
* have shared states across components.
|
2947
|
+
*/
|
2948
|
+
id?: string;
|
2949
|
+
/**
|
2950
|
+
* Initial input of the chat.
|
2951
|
+
*/
|
2952
|
+
initialInput?: string;
|
2953
|
+
/**
|
2954
|
+
Optional callback function that is invoked when a tool call is received.
|
2955
|
+
Intended for automatic client-side tool execution.
|
2956
|
+
|
2957
|
+
You can optionally return a result for the tool call,
|
2958
|
+
either synchronously or asynchronously.
|
2959
|
+
*/
|
2960
|
+
onToolCall?: ({ toolCall, }: {
|
2961
|
+
toolCall: ToolCall<string, unknown>;
|
2962
|
+
}) => void | Promise<unknown> | unknown;
|
2963
|
+
/**
|
2964
|
+
* Optional callback function that is called when the assistant message is finished streaming.
|
2965
|
+
*
|
2966
|
+
* @param message The message that was streamed.
|
2967
|
+
*/
|
2968
|
+
onFinish?: (options: {
|
2969
|
+
message: UIMessage<NoInfer<MESSAGE_METADATA>>;
|
2970
|
+
}) => void;
|
2971
|
+
/**
|
2972
|
+
* Callback function to be called when an error is encountered.
|
2973
|
+
*/
|
2974
|
+
onError?: (error: Error) => void;
|
2975
|
+
/**
|
2976
|
+
* A way to provide a function that is going to be used for ids for messages and the chat.
|
2977
|
+
* If not provided the default AI SDK `generateId` is used.
|
2978
|
+
*/
|
2979
|
+
generateId?: IdGenerator;
|
2980
|
+
/**
|
2981
|
+
* Optional chat store. Default is used when not provided.
|
2982
|
+
*/
|
2983
|
+
chatStore?: ChatStore<MESSAGE_METADATA>;
|
2984
|
+
};
|
2985
|
+
type OriginalUseChatOptions<MESSAGE_METADATA = unknown> = {
|
2986
|
+
/**
|
2987
|
+
* Schema for the message metadata. Validates the message metadata.
|
2988
|
+
* Message metadata can be undefined or must match the schema.
|
2989
|
+
*/
|
2990
|
+
messageMetadataSchema?: Schema<MESSAGE_METADATA>;
|
2991
|
+
/**
|
2992
|
+
* The API endpoint that accepts a `{ messages: Message[] }` object and returns
|
2993
|
+
* a stream of tokens of the AI chat response. Defaults to `/api/chat`.
|
2994
|
+
*/
|
2995
|
+
api?: string;
|
2996
|
+
/**
|
2997
|
+
* A unique identifier for the chat. If not provided, a random one will be
|
2998
|
+
* generated. When provided, the `useChat` hook with the same `id` will
|
2999
|
+
* have shared states across components.
|
3000
|
+
*/
|
3001
|
+
id?: string;
|
3002
|
+
/**
|
3003
|
+
* Initial messages of the chat. Useful to load an existing chat history.
|
3004
|
+
*/
|
3005
|
+
initialMessages?: UIMessage<NoInfer<MESSAGE_METADATA>>[];
|
3006
|
+
/**
|
3007
|
+
* Initial input of the chat.
|
3008
|
+
*/
|
3009
|
+
initialInput?: string;
|
3010
|
+
/**
|
3011
|
+
Optional callback function that is invoked when a tool call is received.
|
3012
|
+
Intended for automatic client-side tool execution.
|
3013
|
+
|
3014
|
+
You can optionally return a result for the tool call,
|
3015
|
+
either synchronously or asynchronously.
|
3016
|
+
*/
|
3017
|
+
onToolCall?: ({ toolCall, }: {
|
3018
|
+
toolCall: ToolCall<string, unknown>;
|
3019
|
+
}) => void | Promise<unknown> | unknown;
|
3020
|
+
/**
|
3021
|
+
* Optional callback function that is called when the assistant message is finished streaming.
|
3022
|
+
*
|
3023
|
+
* @param message The message that was streamed.
|
3024
|
+
*/
|
3025
|
+
onFinish?: (options: {
|
3026
|
+
message: UIMessage<NoInfer<MESSAGE_METADATA>>;
|
3027
|
+
}) => void;
|
3028
|
+
/**
|
3029
|
+
* Callback function to be called when an error is encountered.
|
3030
|
+
*/
|
3031
|
+
onError?: (error: Error) => void;
|
3032
|
+
/**
|
3033
|
+
* A way to provide a function that is going to be used for ids for messages and the chat.
|
3034
|
+
* If not provided the default AI SDK `generateId` is used.
|
3035
|
+
*/
|
3036
|
+
generateId?: IdGenerator;
|
3037
|
+
/**
|
3038
|
+
* The credentials mode to be used for the fetch request.
|
3039
|
+
* Possible values are: 'omit', 'same-origin', 'include'.
|
3040
|
+
* Defaults to 'same-origin'.
|
3041
|
+
*/
|
3042
|
+
credentials?: RequestCredentials;
|
3043
|
+
/**
|
3044
|
+
* HTTP headers to be sent with the API request.
|
3045
|
+
*/
|
3046
|
+
headers?: Record<string, string> | Headers;
|
3047
|
+
/**
|
3048
|
+
* Extra body object to be sent with the API request.
|
3049
|
+
* @example
|
3050
|
+
* Send a `sessionId` to the API along with the messages.
|
3051
|
+
* ```js
|
3052
|
+
* useChat({
|
3053
|
+
* body: {
|
3054
|
+
* sessionId: '123',
|
3055
|
+
* }
|
3056
|
+
* })
|
3057
|
+
* ```
|
3058
|
+
*/
|
3059
|
+
body?: object;
|
3060
|
+
/**
|
3061
|
+
Streaming protocol that is used. Defaults to `ui-message`.
|
3062
|
+
*/
|
3063
|
+
streamProtocol?: 'ui-message' | 'text';
|
3064
|
+
/**
|
3065
|
+
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
3066
|
+
or to provide a custom fetch implementation for e.g. testing.
|
3067
|
+
*/
|
3068
|
+
fetch?: FetchFunction;
|
3069
|
+
/**
|
3070
|
+
Maximum number of sequential LLM calls (steps), e.g. when you use tool calls.
|
3071
|
+
Must be at least 1.
|
3072
|
+
|
3073
|
+
A maximum number is required to prevent infinite loops in the case of misconfigured tools.
|
3074
|
+
|
3075
|
+
By default, it's set to 1, which means that only a single LLM call is made.
|
3076
|
+
*/
|
3077
|
+
maxSteps?: number;
|
3078
|
+
};
|
3079
|
+
|
3080
|
+
declare const getOriginalFetch$1: () => typeof fetch;
|
3081
|
+
declare function callChatApi<MESSAGE_METADATA>({ api, body, streamProtocol, credentials, headers, abortController, onUpdate, onFinish, onToolCall, generateId, fetch, lastMessage, requestType, messageMetadataSchema, }: {
|
3082
|
+
api: string;
|
3083
|
+
body: Record<string, any>;
|
3084
|
+
streamProtocol: 'ui-message' | 'text' | undefined;
|
3085
|
+
credentials: RequestCredentials | undefined;
|
3086
|
+
headers: HeadersInit | undefined;
|
3087
|
+
abortController: (() => AbortController | null) | undefined;
|
3088
|
+
onUpdate: (options: {
|
3089
|
+
message: UIMessage<MESSAGE_METADATA>;
|
3090
|
+
}) => void;
|
3091
|
+
onFinish: UseChatOptions<MESSAGE_METADATA>['onFinish'];
|
3092
|
+
onToolCall: UseChatOptions<MESSAGE_METADATA>['onToolCall'];
|
3093
|
+
generateId: IdGenerator;
|
3094
|
+
fetch: ReturnType<typeof getOriginalFetch$1> | undefined;
|
3095
|
+
lastMessage: UIMessage<MESSAGE_METADATA> | undefined;
|
3096
|
+
requestType?: 'generate' | 'resume';
|
3097
|
+
messageMetadataSchema?: Schema<MESSAGE_METADATA>;
|
3098
|
+
}): Promise<void>;
|
3099
|
+
|
3100
|
+
declare const getOriginalFetch: () => typeof fetch;
|
3101
|
+
declare function callCompletionApi({ api, prompt, credentials, headers, body, streamProtocol, setCompletion, setLoading, setError, setAbortController, onFinish, onError, fetch, }: {
|
3102
|
+
api: string;
|
3103
|
+
prompt: string;
|
3104
|
+
credentials: RequestCredentials | undefined;
|
3105
|
+
headers: HeadersInit | undefined;
|
3106
|
+
body: Record<string, any>;
|
3107
|
+
streamProtocol: 'data' | 'text' | undefined;
|
3108
|
+
setCompletion: (completion: string) => void;
|
3109
|
+
setLoading: (loading: boolean) => void;
|
3110
|
+
setError: (error: Error | undefined) => void;
|
3111
|
+
setAbortController: (abortController: AbortController | null) => void;
|
3112
|
+
onFinish: ((prompt: string, completion: string) => void) | undefined;
|
3113
|
+
onError: ((error: Error) => void) | undefined;
|
3114
|
+
fetch: ReturnType<typeof getOriginalFetch> | undefined;
|
3115
|
+
}): Promise<string | null | undefined>;
|
3116
|
+
|
3117
|
+
declare function convertFileListToFileUIParts(files: FileList | undefined): Promise<Array<FileUIPart>>;
|
3118
|
+
|
3119
|
+
/**
|
3120
|
+
Converts an array of messages from useChat into an array of CoreMessages that can be used
|
3121
|
+
with the AI core functions (e.g. `streamText`).
|
3122
|
+
*/
|
3123
|
+
declare function convertToModelMessages<TOOLS extends ToolSet = never>(messages: Array<Omit<UIMessage, 'id'>>, options?: {
|
3124
|
+
tools?: TOOLS;
|
3125
|
+
}): ModelMessage[];
|
3126
|
+
/**
|
3127
|
+
@deprecated Use `convertToModelMessages` instead.
|
3128
|
+
*/
|
3129
|
+
declare const convertToCoreMessages: typeof convertToModelMessages;
|
3130
|
+
|
3131
|
+
declare function defaultChatStore<MESSAGE_METADATA>({ api, fetch, streamProtocol, credentials, headers, body, prepareRequestBody, generateId, messageMetadataSchema, maxSteps, chats, }: {
|
2886
3132
|
/**
|
2887
|
-
*
|
3133
|
+
* Schema for the message metadata. Validates the message metadata.
|
3134
|
+
* Message metadata can be undefined or must match the schema.
|
2888
3135
|
*/
|
2889
|
-
|
3136
|
+
messageMetadataSchema?: Schema<MESSAGE_METADATA>;
|
2890
3137
|
/**
|
2891
|
-
*
|
3138
|
+
* The API endpoint that accepts a `{ messages: Message[] }` object and returns
|
3139
|
+
* a stream of tokens of the AI chat response.
|
2892
3140
|
*/
|
2893
|
-
|
3141
|
+
api: string;
|
2894
3142
|
/**
|
2895
|
-
*
|
2896
|
-
*
|
2897
|
-
* to prevent duplicated error masking.
|
3143
|
+
* A way to provide a function that is going to be used for ids for messages and the chat.
|
3144
|
+
* If not provided the default AI SDK `generateId` is used.
|
2898
3145
|
*/
|
2899
|
-
|
2900
|
-
|
3146
|
+
generateId?: IdGenerator;
|
3147
|
+
/**
|
3148
|
+
* The credentials mode to be used for the fetch request.
|
3149
|
+
* Possible values are: 'omit', 'same-origin', 'include'.
|
3150
|
+
* Defaults to 'same-origin'.
|
3151
|
+
*/
|
3152
|
+
credentials?: RequestCredentials;
|
3153
|
+
/**
|
3154
|
+
* HTTP headers to be sent with the API request.
|
3155
|
+
*/
|
3156
|
+
headers?: Record<string, string> | Headers;
|
3157
|
+
/**
|
3158
|
+
* Extra body object to be sent with the API request.
|
3159
|
+
* @example
|
3160
|
+
* Send a `sessionId` to the API along with the messages.
|
3161
|
+
* ```js
|
3162
|
+
* useChat({
|
3163
|
+
* body: {
|
3164
|
+
* sessionId: '123',
|
3165
|
+
* }
|
3166
|
+
* })
|
3167
|
+
* ```
|
3168
|
+
*/
|
3169
|
+
body?: object;
|
3170
|
+
/**
|
3171
|
+
Streaming protocol that is used. Defaults to `ui-message`.
|
3172
|
+
*/
|
3173
|
+
streamProtocol?: 'ui-message' | 'text';
|
3174
|
+
/**
|
3175
|
+
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
3176
|
+
or to provide a custom fetch implementation for e.g. testing.
|
3177
|
+
*/
|
3178
|
+
fetch?: FetchFunction;
|
3179
|
+
/**
|
3180
|
+
Maximum number of sequential LLM calls (steps), e.g. when you use tool calls.
|
3181
|
+
Must be at least 1.
|
3182
|
+
|
3183
|
+
A maximum number is required to prevent infinite loops in the case of misconfigured tools.
|
3184
|
+
|
3185
|
+
By default, it's set to 1, which means that only a single LLM call is made.
|
3186
|
+
*/
|
3187
|
+
maxSteps?: number;
|
3188
|
+
/**
|
3189
|
+
* When a function is provided, it will be used
|
3190
|
+
* to prepare the request body for the chat API. This can be useful for
|
3191
|
+
* customizing the request body based on the messages and data in the chat.
|
3192
|
+
*
|
3193
|
+
* @param id The id of the chat.
|
3194
|
+
* @param messages The current messages in the chat.
|
3195
|
+
* @param requestBody The request body object passed in the chat request.
|
3196
|
+
*/
|
3197
|
+
prepareRequestBody?: (options: {
|
3198
|
+
id: string;
|
3199
|
+
messages: UIMessage<MESSAGE_METADATA>[];
|
3200
|
+
requestBody?: object;
|
3201
|
+
}) => unknown;
|
3202
|
+
chats?: {
|
3203
|
+
[id: string]: {
|
3204
|
+
messages: UIMessage<MESSAGE_METADATA>[];
|
3205
|
+
};
|
3206
|
+
};
|
3207
|
+
}): ChatStore<MESSAGE_METADATA>;
|
2901
3208
|
|
2902
|
-
declare function
|
2903
|
-
execute: (writer: UIMessageStreamWriter) => Promise<void> | void;
|
2904
|
-
onError?: (error: unknown) => string;
|
2905
|
-
}): ReadableStream<UIMessageStreamPart>;
|
3209
|
+
declare function extractMaxToolInvocationStep(toolInvocations: ToolInvocation[] | undefined): number | undefined;
|
2906
3210
|
|
2907
|
-
declare function
|
2908
|
-
stream: ReadableStream<UIMessageStreamPart>;
|
2909
|
-
}): Response;
|
3211
|
+
declare function getToolInvocations(message: UIMessage): ToolInvocation[];
|
2910
3212
|
|
2911
|
-
declare function
|
2912
|
-
|
2913
|
-
|
2914
|
-
|
3213
|
+
declare function shouldResubmitMessages({ originalMaxToolInvocationStep, originalMessageCount, maxSteps, messages, }: {
|
3214
|
+
originalMaxToolInvocationStep: number | undefined;
|
3215
|
+
originalMessageCount: number;
|
3216
|
+
maxSteps: number;
|
3217
|
+
messages: UIMessage[];
|
3218
|
+
}): boolean;
|
3219
|
+
/**
|
3220
|
+
Check if the message is an assistant message with completed tool calls.
|
3221
|
+
The last step of the message must have at least one tool invocation and
|
3222
|
+
all tool invocations must have a result.
|
3223
|
+
*/
|
3224
|
+
declare function isAssistantMessageWithCompletedToolCalls(message: UIMessage): message is UIMessage & {
|
3225
|
+
role: 'assistant';
|
3226
|
+
};
|
2915
3227
|
|
2916
|
-
|
2917
|
-
|
2918
|
-
|
3228
|
+
/**
|
3229
|
+
* Updates the result of a specific tool invocation in the last message of the given messages array.
|
3230
|
+
*
|
3231
|
+
* @param {object} params - The parameters object.
|
3232
|
+
* @param {UIMessage[]} params.messages - An array of messages, from which the last one is updated.
|
3233
|
+
* @param {string} params.toolCallId - The unique identifier for the tool invocation to update.
|
3234
|
+
* @param {unknown} params.toolResult - The result object to attach to the tool invocation.
|
3235
|
+
* @returns {void} This function does not return anything.
|
3236
|
+
*/
|
3237
|
+
declare function updateToolCallResult({ messages, toolCallId, toolResult: result, }: {
|
3238
|
+
messages: UIMessage[];
|
3239
|
+
toolCallId: string;
|
3240
|
+
toolResult: unknown;
|
3241
|
+
}): void;
|
3242
|
+
|
3243
|
+
type CompletionRequestOptions = {
|
3244
|
+
/**
|
3245
|
+
An optional object of headers to be passed to the API endpoint.
|
3246
|
+
*/
|
3247
|
+
headers?: Record<string, string> | Headers;
|
3248
|
+
/**
|
3249
|
+
An optional object to be passed to the API endpoint.
|
3250
|
+
*/
|
3251
|
+
body?: object;
|
3252
|
+
};
|
3253
|
+
type UseCompletionOptions = {
|
3254
|
+
/**
|
3255
|
+
* The API endpoint that accepts a `{ prompt: string }` object and returns
|
3256
|
+
* a stream of tokens of the AI completion response. Defaults to `/api/completion`.
|
3257
|
+
*/
|
3258
|
+
api?: string;
|
3259
|
+
/**
|
3260
|
+
* An unique identifier for the chat. If not provided, a random one will be
|
3261
|
+
* generated. When provided, the `useChat` hook with the same `id` will
|
3262
|
+
* have shared states across components.
|
3263
|
+
*/
|
3264
|
+
id?: string;
|
3265
|
+
/**
|
3266
|
+
* Initial prompt input of the completion.
|
3267
|
+
*/
|
3268
|
+
initialInput?: string;
|
3269
|
+
/**
|
3270
|
+
* Initial completion result. Useful to load an existing history.
|
3271
|
+
*/
|
3272
|
+
initialCompletion?: string;
|
3273
|
+
/**
|
3274
|
+
* Callback function to be called when the completion is finished streaming.
|
3275
|
+
*/
|
3276
|
+
onFinish?: (prompt: string, completion: string) => void;
|
3277
|
+
/**
|
3278
|
+
* Callback function to be called when an error is encountered.
|
3279
|
+
*/
|
3280
|
+
onError?: (error: Error) => void;
|
3281
|
+
/**
|
3282
|
+
* The credentials mode to be used for the fetch request.
|
3283
|
+
* Possible values are: 'omit', 'same-origin', 'include'.
|
3284
|
+
* Defaults to 'same-origin'.
|
3285
|
+
*/
|
3286
|
+
credentials?: RequestCredentials;
|
3287
|
+
/**
|
3288
|
+
* HTTP headers to be sent with the API request.
|
3289
|
+
*/
|
3290
|
+
headers?: Record<string, string> | Headers;
|
3291
|
+
/**
|
3292
|
+
* Extra body object to be sent with the API request.
|
3293
|
+
* @example
|
3294
|
+
* Send a `sessionId` to the API along with the prompt.
|
3295
|
+
* ```js
|
3296
|
+
* useChat({
|
3297
|
+
* body: {
|
3298
|
+
* sessionId: '123',
|
3299
|
+
* }
|
3300
|
+
* })
|
3301
|
+
* ```
|
3302
|
+
*/
|
3303
|
+
body?: object;
|
3304
|
+
/**
|
3305
|
+
Streaming protocol that is used. Defaults to `data`.
|
3306
|
+
*/
|
3307
|
+
streamProtocol?: 'data' | 'text';
|
3308
|
+
/**
|
3309
|
+
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
3310
|
+
or to provide a custom fetch implementation for e.g. testing.
|
3311
|
+
*/
|
3312
|
+
fetch?: FetchFunction;
|
3313
|
+
};
|
2919
3314
|
|
2920
3315
|
/**
|
2921
3316
|
* Converts a data URL of type text/* to a text string.
|
@@ -4894,4 +5289,4 @@ declare function transcribe({ model, audio, providerOptions, maxRetries: maxRetr
|
|
4894
5289
|
headers?: Record<string, string>;
|
4895
5290
|
}): Promise<TranscriptionResult>;
|
4896
5291
|
|
4897
|
-
export { AssistantContent, AssistantModelMessage, CallSettings, CallWarning, ChatRequestOptions, ChunkDetector, CompletionRequestOptions, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreToolMessage, CoreUserMessage, CreateUIMessage, DataContent, DeepPartial, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, GenerateImageResult as Experimental_GenerateImageResult, GeneratedFile as Experimental_GeneratedImage, SpeechResult as Experimental_SpeechResult, TranscriptionResult as Experimental_TranscriptionResult, FilePart, FileUIPart, FinishReason, GenerateObjectResult, GenerateTextOnStepFinishCallback, GenerateTextResult, GeneratedAudioFile, GeneratedFile, ImageModel, ImageGenerationWarning as ImageModelCallWarning, ImageModelProviderMetadata, ImageModelResponseMetadata, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidStreamPartError, InvalidToolArgumentsError, JSONRPCError, JSONRPCMessage, JSONRPCNotification, JSONRPCRequest, JSONRPCResponse, JSONValue, JsonToSseTransformStream, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelUsage, MCPClientError, MCPTransport, MessageConversionError, ModelMessage, NoImageGeneratedError, NoObjectGeneratedError, NoOutputSpecifiedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, output as Output, Prompt, Provider, ProviderMetadata, ProviderOptions, ProviderRegistryProvider, ReasoningUIPart, RepairTextFunction, RetryError, SourceUIPart, SpeechModel, SpeechModelResponseMetadata, SpeechWarning, StepResult, StepStartUIPart, StreamObjectOnFinishCallback, StreamObjectResult, StreamTextOnChunkCallback, StreamTextOnErrorCallback, StreamTextOnFinishCallback, StreamTextOnStepFinishCallback, StreamTextResult, StreamTextTransform, SystemModelMessage, TelemetrySettings, TextPart, TextStreamPart, TextUIPart, Tool, ToolCallPart, ToolCallRepairError, ToolCallRepairFunction, ToolCallUnion, ToolChoice, ToolContent, ToolExecutionError, ToolExecutionOptions, ToolInvocation, ToolInvocationUIPart, ToolModelMessage, ToolResultPart, ToolResultUnion, ToolSet, TranscriptionModel, TranscriptionModelResponseMetadata, TranscriptionWarning, UIMessage, UIMessagePart, UIMessageStreamOptions, UIMessageStreamPart, UIMessageStreamWriter, UseChatOptions, UseCompletionOptions, UserContent, UserModelMessage, appendClientMessage, assistantModelMessageSchema, callChatApi, callCompletionApi, convertFileListToFileUIParts, convertToCoreMessages, convertToModelMessages, coreAssistantMessageSchema, coreMessageSchema, coreSystemMessageSchema, coreToolMessageSchema, coreUserMessageSchema, cosineSimilarity, createProviderRegistry, createTextStreamResponse, createUIMessageStream, createUIMessageStreamResponse, customProvider, defaultSettingsMiddleware, embed, embedMany, createMCPClient as experimental_createMCPClient, experimental_createProviderRegistry, experimental_customProvider, generateImage as experimental_generateImage, generateSpeech as experimental_generateSpeech, transcribe as experimental_transcribe, extractMaxToolInvocationStep, extractReasoningMiddleware, generateObject, generateText, getTextFromDataUrl, getToolInvocations, isAssistantMessageWithCompletedToolCalls, isDeepEqualData, modelMessageSchema, parsePartialJson, pipeTextStreamToResponse, pipeUIMessageStreamToResponse,
|
5292
|
+
export { AssistantContent, AssistantModelMessage, CallSettings, CallWarning, ChatRequestOptions, ChatStatus, ChatStore, ChatStoreEvent, ChatTransport, ChunkDetector, CompletionRequestOptions, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreToolMessage, CoreUserMessage, CreateUIMessage, DataContent, DeepPartial, DefaultChatTransport, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, GenerateImageResult as Experimental_GenerateImageResult, GeneratedFile as Experimental_GeneratedImage, SpeechResult as Experimental_SpeechResult, TranscriptionResult as Experimental_TranscriptionResult, FilePart, FileUIPart, FinishReason, GenerateObjectResult, GenerateTextOnStepFinishCallback, GenerateTextResult, GeneratedAudioFile, GeneratedFile, ImageModel, ImageGenerationWarning as ImageModelCallWarning, ImageModelProviderMetadata, ImageModelResponseMetadata, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidStreamPartError, InvalidToolArgumentsError, JSONRPCError, JSONRPCMessage, JSONRPCNotification, JSONRPCRequest, JSONRPCResponse, JSONValue, JsonToSseTransformStream, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelUsage, MCPClientError, MCPTransport, MessageConversionError, ModelMessage, NoImageGeneratedError, NoObjectGeneratedError, NoOutputSpecifiedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, OriginalUseChatOptions, output as Output, Prompt, Provider, ProviderMetadata, ProviderOptions, ProviderRegistryProvider, ReasoningUIPart, RepairTextFunction, RetryError, SourceUIPart, SpeechModel, SpeechModelResponseMetadata, SpeechWarning, StepResult, StepStartUIPart, StreamObjectOnFinishCallback, StreamObjectResult, StreamTextOnChunkCallback, StreamTextOnErrorCallback, StreamTextOnFinishCallback, StreamTextOnStepFinishCallback, StreamTextResult, StreamTextTransform, SystemModelMessage, TelemetrySettings, TextPart, TextStreamPart, TextUIPart, Tool, ToolCallPart, ToolCallRepairError, ToolCallRepairFunction, ToolCallUnion, ToolChoice, ToolContent, ToolExecutionError, ToolExecutionOptions, ToolInvocation, ToolInvocationUIPart, ToolModelMessage, ToolResultPart, ToolResultUnion, ToolSet, TranscriptionModel, TranscriptionModelResponseMetadata, TranscriptionWarning, UIMessage, UIMessagePart, UIMessageStreamOptions, UIMessageStreamPart, UIMessageStreamWriter, UseChatOptions, UseCompletionOptions, UserContent, UserModelMessage, appendClientMessage, assistantModelMessageSchema, callChatApi, callCompletionApi, convertFileListToFileUIParts, convertToCoreMessages, convertToModelMessages, coreAssistantMessageSchema, coreMessageSchema, coreSystemMessageSchema, coreToolMessageSchema, coreUserMessageSchema, cosineSimilarity, createProviderRegistry, createTextStreamResponse, createUIMessageStream, createUIMessageStreamResponse, customProvider, defaultChatStore, defaultSettingsMiddleware, embed, embedMany, createMCPClient as experimental_createMCPClient, experimental_createProviderRegistry, experimental_customProvider, generateImage as experimental_generateImage, generateSpeech as experimental_generateSpeech, transcribe as experimental_transcribe, extractMaxToolInvocationStep, extractReasoningMiddleware, generateObject, generateText, getTextFromDataUrl, getToolInvocations, isAssistantMessageWithCompletedToolCalls, isDeepEqualData, modelMessageSchema, parsePartialJson, pipeTextStreamToResponse, pipeUIMessageStreamToResponse, shouldResubmitMessages, simulateReadableStream, simulateStreamingMiddleware, smoothStream, streamObject, streamText, systemModelMessageSchema, tool, toolModelMessageSchema, updateToolCallResult, userModelMessageSchema, wrapLanguageModel };
|