@ai-sdk/react 2.0.0-canary.20 → 2.0.0-canary.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/dist/index.d.mts +22 -27
- package/dist/index.d.ts +22 -27
- package/dist/index.js +9 -21
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +9 -21
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -2
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,15 @@
|
|
|
1
1
|
# @ai-sdk/react
|
|
2
2
|
|
|
3
|
+
## 2.0.0-canary.21
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- Updated dependencies [e7dc6c7]
|
|
8
|
+
- Updated dependencies [a34eb39]
|
|
9
|
+
- Updated dependencies [b33ed7a]
|
|
10
|
+
- Updated dependencies [765f1cd]
|
|
11
|
+
- ai@5.0.0-canary.22
|
|
12
|
+
|
|
3
13
|
## 2.0.0-canary.20
|
|
4
14
|
|
|
5
15
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
|
@@ -1,20 +1,34 @@
|
|
|
1
|
-
import { UIMessage, CreateUIMessage, ChatRequestOptions, FileUIPart, UseChatOptions,
|
|
1
|
+
import { UIMessage, CreateUIMessage, ChatRequestOptions, FileUIPart, UseChatOptions, CompletionRequestOptions, UseCompletionOptions, Schema, DeepPartial } from 'ai';
|
|
2
2
|
export { CreateUIMessage, UIMessage, UseChatOptions, UseCompletionOptions } from 'ai';
|
|
3
3
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
4
4
|
import z from 'zod';
|
|
5
5
|
|
|
6
6
|
type UseChatHelpers<MESSAGE_METADATA = unknown> = {
|
|
7
|
+
/**
|
|
8
|
+
* The id of the chat.
|
|
9
|
+
*/
|
|
10
|
+
readonly id: string;
|
|
11
|
+
/**
|
|
12
|
+
* Hook status:
|
|
13
|
+
*
|
|
14
|
+
* - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.
|
|
15
|
+
* - `streaming`: The response is actively streaming in from the API, receiving chunks of data.
|
|
16
|
+
* - `ready`: The full response has been received and processed; a new user message can be submitted.
|
|
17
|
+
* - `error`: An error occurred during the API request, preventing successful completion.
|
|
18
|
+
*/
|
|
19
|
+
readonly status: 'submitted' | 'streaming' | 'ready' | 'error';
|
|
7
20
|
/** Current messages in the chat */
|
|
8
|
-
messages: UIMessage<MESSAGE_METADATA>[];
|
|
21
|
+
readonly messages: UIMessage<MESSAGE_METADATA>[];
|
|
9
22
|
/** The error object of the API request */
|
|
10
|
-
error: undefined | Error;
|
|
23
|
+
readonly error: undefined | Error;
|
|
11
24
|
/**
|
|
12
25
|
* Append a user message to the chat list. This triggers the API call to fetch
|
|
13
26
|
* the assistant's response.
|
|
27
|
+
*
|
|
14
28
|
* @param message The message to append
|
|
15
29
|
* @param options Additional options to pass to the API call
|
|
16
30
|
*/
|
|
17
|
-
append: (message:
|
|
31
|
+
append: (message: CreateUIMessage<MESSAGE_METADATA>, options?: ChatRequestOptions) => Promise<void>;
|
|
18
32
|
/**
|
|
19
33
|
* Reload the last AI chat response for the given chat history. If the last
|
|
20
34
|
* message isn't from the assistant, it will request the API to generate a
|
|
@@ -47,43 +61,24 @@ type UseChatHelpers<MESSAGE_METADATA = unknown> = {
|
|
|
47
61
|
}, chatRequestOptions?: ChatRequestOptions & {
|
|
48
62
|
files?: FileList | FileUIPart[];
|
|
49
63
|
}) => void;
|
|
50
|
-
metadata?: Object;
|
|
51
|
-
/**
|
|
52
|
-
* Whether the API request is in progress
|
|
53
|
-
*
|
|
54
|
-
* @deprecated use `status` instead
|
|
55
|
-
*/
|
|
56
|
-
isLoading: boolean;
|
|
57
|
-
/**
|
|
58
|
-
* Hook status:
|
|
59
|
-
*
|
|
60
|
-
* - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.
|
|
61
|
-
* - `streaming`: The response is actively streaming in from the API, receiving chunks of data.
|
|
62
|
-
* - `ready`: The full response has been received and processed; a new user message can be submitted.
|
|
63
|
-
* - `error`: An error occurred during the API request, preventing successful completion.
|
|
64
|
-
*/
|
|
65
|
-
status: 'submitted' | 'streaming' | 'ready' | 'error';
|
|
66
64
|
addToolResult: ({ toolCallId, result, }: {
|
|
67
65
|
toolCallId: string;
|
|
68
66
|
result: any;
|
|
69
67
|
}) => void;
|
|
70
|
-
/** The id of the chat */
|
|
71
|
-
id: string;
|
|
72
68
|
};
|
|
73
|
-
declare function useChat<MESSAGE_METADATA>({ api, id, initialMessages, initialInput, onToolCall, experimental_prepareRequestBody, maxSteps, streamProtocol,
|
|
69
|
+
declare function useChat<MESSAGE_METADATA>({ api, id, initialMessages, initialInput, onToolCall, experimental_prepareRequestBody, maxSteps, streamProtocol, onFinish, onError, credentials, headers, body, generateId, fetch, experimental_throttle: throttleWaitMs, messageMetadataSchema, }?: UseChatOptions<MESSAGE_METADATA> & {
|
|
74
70
|
/**
|
|
75
71
|
* Experimental (React only). When a function is provided, it will be used
|
|
76
72
|
* to prepare the request body for the chat API. This can be useful for
|
|
77
73
|
* customizing the request body based on the messages and data in the chat.
|
|
78
74
|
*
|
|
75
|
+
* @param id The id of the chat.
|
|
79
76
|
* @param messages The current messages in the chat.
|
|
80
|
-
* @param requestData The data object passed in the chat request.
|
|
81
77
|
* @param requestBody The request body object passed in the chat request.
|
|
82
78
|
*/
|
|
83
79
|
experimental_prepareRequestBody?: (options: {
|
|
84
80
|
id: string;
|
|
85
|
-
messages: UIMessage[];
|
|
86
|
-
requestData?: JSONValue;
|
|
81
|
+
messages: UIMessage<MESSAGE_METADATA>[];
|
|
87
82
|
requestBody?: object;
|
|
88
83
|
}) => unknown;
|
|
89
84
|
/**
|
|
@@ -137,7 +132,7 @@ type UseCompletionHelpers = {
|
|
|
137
132
|
/** Whether the API request is in progress */
|
|
138
133
|
isLoading: boolean;
|
|
139
134
|
};
|
|
140
|
-
declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, streamProtocol, fetch,
|
|
135
|
+
declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, streamProtocol, fetch, onFinish, onError, experimental_throttle: throttleWaitMs, }?: UseCompletionOptions & {
|
|
141
136
|
/**
|
|
142
137
|
* Custom throttle wait in ms for the completion and data updates.
|
|
143
138
|
* Default is undefined, which disables throttling.
|
package/dist/index.d.ts
CHANGED
|
@@ -1,20 +1,34 @@
|
|
|
1
|
-
import { UIMessage, CreateUIMessage, ChatRequestOptions, FileUIPart, UseChatOptions,
|
|
1
|
+
import { UIMessage, CreateUIMessage, ChatRequestOptions, FileUIPart, UseChatOptions, CompletionRequestOptions, UseCompletionOptions, Schema, DeepPartial } from 'ai';
|
|
2
2
|
export { CreateUIMessage, UIMessage, UseChatOptions, UseCompletionOptions } from 'ai';
|
|
3
3
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
4
4
|
import z from 'zod';
|
|
5
5
|
|
|
6
6
|
type UseChatHelpers<MESSAGE_METADATA = unknown> = {
|
|
7
|
+
/**
|
|
8
|
+
* The id of the chat.
|
|
9
|
+
*/
|
|
10
|
+
readonly id: string;
|
|
11
|
+
/**
|
|
12
|
+
* Hook status:
|
|
13
|
+
*
|
|
14
|
+
* - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.
|
|
15
|
+
* - `streaming`: The response is actively streaming in from the API, receiving chunks of data.
|
|
16
|
+
* - `ready`: The full response has been received and processed; a new user message can be submitted.
|
|
17
|
+
* - `error`: An error occurred during the API request, preventing successful completion.
|
|
18
|
+
*/
|
|
19
|
+
readonly status: 'submitted' | 'streaming' | 'ready' | 'error';
|
|
7
20
|
/** Current messages in the chat */
|
|
8
|
-
messages: UIMessage<MESSAGE_METADATA>[];
|
|
21
|
+
readonly messages: UIMessage<MESSAGE_METADATA>[];
|
|
9
22
|
/** The error object of the API request */
|
|
10
|
-
error: undefined | Error;
|
|
23
|
+
readonly error: undefined | Error;
|
|
11
24
|
/**
|
|
12
25
|
* Append a user message to the chat list. This triggers the API call to fetch
|
|
13
26
|
* the assistant's response.
|
|
27
|
+
*
|
|
14
28
|
* @param message The message to append
|
|
15
29
|
* @param options Additional options to pass to the API call
|
|
16
30
|
*/
|
|
17
|
-
append: (message:
|
|
31
|
+
append: (message: CreateUIMessage<MESSAGE_METADATA>, options?: ChatRequestOptions) => Promise<void>;
|
|
18
32
|
/**
|
|
19
33
|
* Reload the last AI chat response for the given chat history. If the last
|
|
20
34
|
* message isn't from the assistant, it will request the API to generate a
|
|
@@ -47,43 +61,24 @@ type UseChatHelpers<MESSAGE_METADATA = unknown> = {
|
|
|
47
61
|
}, chatRequestOptions?: ChatRequestOptions & {
|
|
48
62
|
files?: FileList | FileUIPart[];
|
|
49
63
|
}) => void;
|
|
50
|
-
metadata?: Object;
|
|
51
|
-
/**
|
|
52
|
-
* Whether the API request is in progress
|
|
53
|
-
*
|
|
54
|
-
* @deprecated use `status` instead
|
|
55
|
-
*/
|
|
56
|
-
isLoading: boolean;
|
|
57
|
-
/**
|
|
58
|
-
* Hook status:
|
|
59
|
-
*
|
|
60
|
-
* - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.
|
|
61
|
-
* - `streaming`: The response is actively streaming in from the API, receiving chunks of data.
|
|
62
|
-
* - `ready`: The full response has been received and processed; a new user message can be submitted.
|
|
63
|
-
* - `error`: An error occurred during the API request, preventing successful completion.
|
|
64
|
-
*/
|
|
65
|
-
status: 'submitted' | 'streaming' | 'ready' | 'error';
|
|
66
64
|
addToolResult: ({ toolCallId, result, }: {
|
|
67
65
|
toolCallId: string;
|
|
68
66
|
result: any;
|
|
69
67
|
}) => void;
|
|
70
|
-
/** The id of the chat */
|
|
71
|
-
id: string;
|
|
72
68
|
};
|
|
73
|
-
declare function useChat<MESSAGE_METADATA>({ api, id, initialMessages, initialInput, onToolCall, experimental_prepareRequestBody, maxSteps, streamProtocol,
|
|
69
|
+
declare function useChat<MESSAGE_METADATA>({ api, id, initialMessages, initialInput, onToolCall, experimental_prepareRequestBody, maxSteps, streamProtocol, onFinish, onError, credentials, headers, body, generateId, fetch, experimental_throttle: throttleWaitMs, messageMetadataSchema, }?: UseChatOptions<MESSAGE_METADATA> & {
|
|
74
70
|
/**
|
|
75
71
|
* Experimental (React only). When a function is provided, it will be used
|
|
76
72
|
* to prepare the request body for the chat API. This can be useful for
|
|
77
73
|
* customizing the request body based on the messages and data in the chat.
|
|
78
74
|
*
|
|
75
|
+
* @param id The id of the chat.
|
|
79
76
|
* @param messages The current messages in the chat.
|
|
80
|
-
* @param requestData The data object passed in the chat request.
|
|
81
77
|
* @param requestBody The request body object passed in the chat request.
|
|
82
78
|
*/
|
|
83
79
|
experimental_prepareRequestBody?: (options: {
|
|
84
80
|
id: string;
|
|
85
|
-
messages: UIMessage[];
|
|
86
|
-
requestData?: JSONValue;
|
|
81
|
+
messages: UIMessage<MESSAGE_METADATA>[];
|
|
87
82
|
requestBody?: object;
|
|
88
83
|
}) => unknown;
|
|
89
84
|
/**
|
|
@@ -137,7 +132,7 @@ type UseCompletionHelpers = {
|
|
|
137
132
|
/** Whether the API request is in progress */
|
|
138
133
|
isLoading: boolean;
|
|
139
134
|
};
|
|
140
|
-
declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, streamProtocol, fetch,
|
|
135
|
+
declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, streamProtocol, fetch, onFinish, onError, experimental_throttle: throttleWaitMs, }?: UseCompletionOptions & {
|
|
141
136
|
/**
|
|
142
137
|
* Custom throttle wait in ms for the completion and data updates.
|
|
143
138
|
* Default is undefined, which disables throttling.
|
package/dist/index.js
CHANGED
|
@@ -69,8 +69,7 @@ function useChat({
|
|
|
69
69
|
onToolCall,
|
|
70
70
|
experimental_prepareRequestBody,
|
|
71
71
|
maxSteps = 1,
|
|
72
|
-
streamProtocol = "
|
|
73
|
-
onResponse,
|
|
72
|
+
streamProtocol = "ui-message",
|
|
74
73
|
onFinish,
|
|
75
74
|
onError,
|
|
76
75
|
credentials,
|
|
@@ -133,12 +132,10 @@ function useChat({
|
|
|
133
132
|
body: (_a = experimental_prepareRequestBody == null ? void 0 : experimental_prepareRequestBody({
|
|
134
133
|
id: chatId,
|
|
135
134
|
messages: chatMessages,
|
|
136
|
-
requestData: chatRequest.data,
|
|
137
135
|
requestBody: chatRequest.body
|
|
138
136
|
})) != null ? _a : {
|
|
139
137
|
id: chatId,
|
|
140
138
|
messages: chatMessages,
|
|
141
|
-
data: chatRequest.data,
|
|
142
139
|
...extraMetadataRef.current.body,
|
|
143
140
|
...chatRequest.body
|
|
144
141
|
},
|
|
@@ -149,7 +146,6 @@ function useChat({
|
|
|
149
146
|
...chatRequest.headers
|
|
150
147
|
},
|
|
151
148
|
abortController: () => abortControllerRef.current,
|
|
152
|
-
onResponse,
|
|
153
149
|
onUpdate({ message }) {
|
|
154
150
|
mutateStatus("streaming");
|
|
155
151
|
const replaceLastMessage = message.id === chatMessages[chatMessages.length - 1].id;
|
|
@@ -198,7 +194,6 @@ function useChat({
|
|
|
198
194
|
mutateStatus,
|
|
199
195
|
api,
|
|
200
196
|
extraMetadataRef,
|
|
201
|
-
onResponse,
|
|
202
197
|
onFinish,
|
|
203
198
|
onError,
|
|
204
199
|
setError,
|
|
@@ -216,22 +211,21 @@ function useChat({
|
|
|
216
211
|
]
|
|
217
212
|
);
|
|
218
213
|
const append = (0, import_react2.useCallback)(
|
|
219
|
-
(message, {
|
|
214
|
+
async (message, { headers: headers2, body: body2 } = {}) => {
|
|
220
215
|
var _a;
|
|
221
|
-
|
|
216
|
+
await triggerRequest({
|
|
222
217
|
messages: messagesRef.current.concat({
|
|
223
218
|
...message,
|
|
224
219
|
id: (_a = message.id) != null ? _a : generateId()
|
|
225
220
|
}),
|
|
226
221
|
headers: headers2,
|
|
227
|
-
body: body2
|
|
228
|
-
data
|
|
222
|
+
body: body2
|
|
229
223
|
});
|
|
230
224
|
},
|
|
231
225
|
[triggerRequest, generateId]
|
|
232
226
|
);
|
|
233
227
|
const reload = (0, import_react2.useCallback)(
|
|
234
|
-
async ({
|
|
228
|
+
async ({ headers: headers2, body: body2 } = {}) => {
|
|
235
229
|
const messages2 = messagesRef.current;
|
|
236
230
|
if (messages2.length === 0) {
|
|
237
231
|
return null;
|
|
@@ -240,8 +234,7 @@ function useChat({
|
|
|
240
234
|
return triggerRequest({
|
|
241
235
|
messages: lastMessage.role === "assistant" ? messages2.slice(0, -1) : messages2,
|
|
242
236
|
headers: headers2,
|
|
243
|
-
body: body2
|
|
244
|
-
data
|
|
237
|
+
body: body2
|
|
245
238
|
});
|
|
246
239
|
},
|
|
247
240
|
[triggerRequest]
|
|
@@ -271,7 +264,8 @@ function useChat({
|
|
|
271
264
|
async (event, options = {}, metadata) => {
|
|
272
265
|
var _a;
|
|
273
266
|
(_a = event == null ? void 0 : event.preventDefault) == null ? void 0 : _a.call(event);
|
|
274
|
-
|
|
267
|
+
const fileParts = Array.isArray(options == null ? void 0 : options.files) ? options.files : await (0, import_ai2.convertFileListToFileUIParts)(options == null ? void 0 : options.files);
|
|
268
|
+
if (!input && fileParts.length === 0)
|
|
275
269
|
return;
|
|
276
270
|
if (metadata) {
|
|
277
271
|
extraMetadataRef.current = {
|
|
@@ -279,7 +273,6 @@ function useChat({
|
|
|
279
273
|
...metadata
|
|
280
274
|
};
|
|
281
275
|
}
|
|
282
|
-
const fileParts = Array.isArray(options == null ? void 0 : options.files) ? options.files : await (0, import_ai2.convertFileListToFileUIParts)(options == null ? void 0 : options.files);
|
|
283
276
|
triggerRequest({
|
|
284
277
|
messages: messagesRef.current.concat({
|
|
285
278
|
id: generateId(),
|
|
@@ -288,8 +281,7 @@ function useChat({
|
|
|
288
281
|
parts: [...fileParts, { type: "text", text: input }]
|
|
289
282
|
}),
|
|
290
283
|
headers: options.headers,
|
|
291
|
-
body: options.body
|
|
292
|
-
data: options.data
|
|
284
|
+
body: options.body
|
|
293
285
|
});
|
|
294
286
|
setInput("");
|
|
295
287
|
},
|
|
@@ -341,7 +333,6 @@ function useChat({
|
|
|
341
333
|
setInput,
|
|
342
334
|
handleInputChange,
|
|
343
335
|
handleSubmit,
|
|
344
|
-
isLoading: status === "submitted" || status === "streaming",
|
|
345
336
|
status,
|
|
346
337
|
addToolResult
|
|
347
338
|
};
|
|
@@ -361,7 +352,6 @@ function useCompletion({
|
|
|
361
352
|
body,
|
|
362
353
|
streamProtocol = "data",
|
|
363
354
|
fetch: fetch2,
|
|
364
|
-
onResponse,
|
|
365
355
|
onFinish,
|
|
366
356
|
onError,
|
|
367
357
|
experimental_throttle: throttleWaitMs
|
|
@@ -410,7 +400,6 @@ function useCompletion({
|
|
|
410
400
|
setLoading: mutateLoading,
|
|
411
401
|
setError,
|
|
412
402
|
setAbortController,
|
|
413
|
-
onResponse,
|
|
414
403
|
onFinish,
|
|
415
404
|
onError
|
|
416
405
|
}),
|
|
@@ -420,7 +409,6 @@ function useCompletion({
|
|
|
420
409
|
api,
|
|
421
410
|
extraMetadataRef,
|
|
422
411
|
setAbortController,
|
|
423
|
-
onResponse,
|
|
424
412
|
onFinish,
|
|
425
413
|
onError,
|
|
426
414
|
setError,
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/use-chat.ts","../src/throttle.ts","../src/util/use-stable-value.ts","../src/use-completion.ts","../src/use-object.ts"],"sourcesContent":["export * from './use-chat';\nexport * from './use-completion';\nexport * from './use-object';\n","import type {\n ChatRequestOptions,\n CreateUIMessage,\n FileUIPart,\n JSONValue,\n UIMessage,\n UseChatOptions,\n} from 'ai';\nimport {\n callChatApi,\n convertFileListToFileUIParts,\n extractMaxToolInvocationStep,\n generateId as generateIdFunc,\n getToolInvocations,\n isAssistantMessageWithCompletedToolCalls,\n shouldResubmitMessages,\n updateToolCallResult,\n} from 'ai';\nimport { useCallback, useEffect, useMemo, useRef, useState } from 'react';\nimport useSWR from 'swr';\nimport { throttle } from './throttle';\nimport { useStableValue } from './util/use-stable-value';\n\nexport type { CreateUIMessage, UIMessage, UseChatOptions };\n\nexport type UseChatHelpers<MESSAGE_METADATA = unknown> = {\n /** Current messages in the chat */\n messages: UIMessage<MESSAGE_METADATA>[];\n /** The error object of the API request */\n error: undefined | Error;\n /**\n * Append a user message to the chat list. This triggers the API call to fetch\n * the assistant's response.\n * @param message The message to append\n * @param options Additional options to pass to the API call\n */\n append: (\n message: UIMessage<MESSAGE_METADATA> | CreateUIMessage<MESSAGE_METADATA>,\n chatRequestOptions?: ChatRequestOptions,\n ) => Promise<string | null | undefined>;\n /**\n * Reload the last AI chat response for the given chat history. If the last\n * message isn't from the assistant, it will request the API to generate a\n * new response.\n */\n reload: (\n chatRequestOptions?: ChatRequestOptions,\n ) => Promise<string | null | undefined>;\n /**\n * Abort the current request immediately, keep the generated tokens if any.\n */\n stop: () => void;\n\n /**\n * Resume an ongoing chat generation stream. This does not resume an aborted generation.\n */\n experimental_resume: () => void;\n\n /**\n * Update the `messages` state locally. This is useful when you want to\n * edit the messages on the client, and then trigger the `reload` method\n * manually to regenerate the AI response.\n */\n setMessages: (\n messages:\n | UIMessage<MESSAGE_METADATA>[]\n | ((\n messages: UIMessage<MESSAGE_METADATA>[],\n ) => UIMessage<MESSAGE_METADATA>[]),\n ) => void;\n /** The current value of the input */\n input: string;\n /** setState-powered method to update the input value */\n setInput: React.Dispatch<React.SetStateAction<string>>;\n /** An input/textarea-ready onChange handler to control the value of the input */\n handleInputChange: (\n e:\n | React.ChangeEvent<HTMLInputElement>\n | React.ChangeEvent<HTMLTextAreaElement>,\n ) => void;\n /** Form submission handler to automatically reset input and append a user message */\n handleSubmit: (\n event?: { preventDefault?: () => void },\n chatRequestOptions?: ChatRequestOptions & {\n files?: FileList | FileUIPart[];\n },\n ) => void;\n metadata?: Object;\n\n /**\n * Whether the API request is in progress\n *\n * @deprecated use `status` instead\n */\n isLoading: boolean;\n\n /**\n * Hook status:\n *\n * - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.\n * - `streaming`: The response is actively streaming in from the API, receiving chunks of data.\n * - `ready`: The full response has been received and processed; a new user message can be submitted.\n * - `error`: An error occurred during the API request, preventing successful completion.\n */\n status: 'submitted' | 'streaming' | 'ready' | 'error';\n\n addToolResult: ({\n toolCallId,\n result,\n }: {\n toolCallId: string;\n result: any;\n }) => void;\n\n /** The id of the chat */\n id: string;\n};\n\nexport function useChat<MESSAGE_METADATA>({\n api = '/api/chat',\n id,\n initialMessages,\n initialInput = '',\n onToolCall,\n experimental_prepareRequestBody,\n maxSteps = 1,\n streamProtocol = 'data',\n onResponse,\n onFinish,\n onError,\n credentials,\n headers,\n body,\n generateId = generateIdFunc,\n fetch,\n experimental_throttle: throttleWaitMs,\n messageMetadataSchema,\n}: UseChatOptions<MESSAGE_METADATA> & {\n /**\n * Experimental (React only). When a function is provided, it will be used\n * to prepare the request body for the chat API. This can be useful for\n * customizing the request body based on the messages and data in the chat.\n *\n * @param messages The current messages in the chat.\n * @param requestData The data object passed in the chat request.\n * @param requestBody The request body object passed in the chat request.\n */\n experimental_prepareRequestBody?: (options: {\n id: string;\n messages: UIMessage[];\n requestData?: JSONValue;\n requestBody?: object;\n }) => unknown;\n\n /**\nCustom throttle wait in ms for the chat messages and data updates.\nDefault is undefined, which disables throttling.\n */\n experimental_throttle?: number;\n} = {}): UseChatHelpers<MESSAGE_METADATA> {\n // Generate ID once, store in state for stability across re-renders\n const [hookId] = useState(generateId);\n\n // Use the caller-supplied ID if available; otherwise, fall back to our stable ID\n const chatId = id ?? hookId;\n const chatKey = typeof api === 'string' ? [api, chatId] : chatId;\n\n // Store array of the processed initial messages to avoid re-renders:\n const stableInitialMessages = useStableValue(initialMessages ?? []);\n const processedInitialMessages = useMemo(\n () => stableInitialMessages,\n [stableInitialMessages],\n );\n\n // Store the chat state in SWR, using the chatId as the key to share states.\n const { data: messages, mutate } = useSWR<UIMessage<MESSAGE_METADATA>[]>(\n [chatKey, 'messages'],\n null,\n { fallbackData: processedInitialMessages },\n );\n\n // Keep the latest messages in a ref.\n const messagesRef = useRef<UIMessage<MESSAGE_METADATA>[]>(messages || []);\n useEffect(() => {\n messagesRef.current = messages || [];\n }, [messages]);\n\n const { data: status = 'ready', mutate: mutateStatus } = useSWR<\n 'submitted' | 'streaming' | 'ready' | 'error'\n >([chatKey, 'status'], null);\n\n const { data: error = undefined, mutate: setError } = useSWR<\n undefined | Error\n >([chatKey, 'error'], null);\n\n // Abort controller to cancel the current API call.\n const abortControllerRef = useRef<AbortController | null>(null);\n\n const extraMetadataRef = useRef({\n credentials,\n headers,\n body,\n });\n\n useEffect(() => {\n extraMetadataRef.current = {\n credentials,\n headers,\n body,\n };\n }, [credentials, headers, body]);\n\n const triggerRequest = useCallback(\n async (\n chatRequest: {\n headers?: Record<string, string> | Headers;\n body?: object;\n messages: UIMessage<MESSAGE_METADATA>[];\n data?: JSONValue;\n },\n requestType: 'generate' | 'resume' = 'generate',\n ) => {\n mutateStatus('submitted');\n setError(undefined);\n\n const chatMessages = chatRequest.messages;\n\n const messageCount = chatMessages.length;\n const maxStep = extractMaxToolInvocationStep(\n getToolInvocations(chatMessages[chatMessages.length - 1]),\n );\n\n try {\n const abortController = new AbortController();\n abortControllerRef.current = abortController;\n\n const throttledMutate = throttle(mutate, throttleWaitMs);\n\n // Do an optimistic update to show the updated messages immediately:\n throttledMutate(chatMessages, false);\n\n await callChatApi({\n api,\n body: experimental_prepareRequestBody?.({\n id: chatId,\n messages: chatMessages,\n requestData: chatRequest.data,\n requestBody: chatRequest.body,\n }) ?? {\n id: chatId,\n messages: chatMessages,\n data: chatRequest.data,\n ...extraMetadataRef.current.body,\n ...chatRequest.body,\n },\n streamProtocol,\n credentials: extraMetadataRef.current.credentials,\n headers: {\n ...extraMetadataRef.current.headers,\n ...chatRequest.headers,\n },\n abortController: () => abortControllerRef.current,\n onResponse,\n onUpdate({ message }) {\n mutateStatus('streaming');\n\n const replaceLastMessage =\n message.id === chatMessages[chatMessages.length - 1].id;\n\n throttledMutate(\n [\n ...(replaceLastMessage\n ? chatMessages.slice(0, chatMessages.length - 1)\n : chatMessages),\n message,\n ],\n false,\n );\n },\n onToolCall,\n onFinish,\n generateId,\n fetch,\n lastMessage: chatMessages[chatMessages.length - 1],\n requestType,\n messageMetadataSchema,\n });\n\n abortControllerRef.current = null;\n\n mutateStatus('ready');\n } catch (err) {\n // Ignore abort errors as they are expected.\n if ((err as any).name === 'AbortError') {\n abortControllerRef.current = null;\n mutateStatus('ready');\n return null;\n }\n\n if (onError && err instanceof Error) {\n onError(err);\n }\n\n setError(err as Error);\n mutateStatus('error');\n }\n\n // auto-submit when all tool calls in the last assistant message have results\n // and assistant has not answered yet\n const messages = messagesRef.current;\n if (\n shouldResubmitMessages({\n originalMaxToolInvocationStep: maxStep,\n originalMessageCount: messageCount,\n maxSteps,\n messages,\n })\n ) {\n await triggerRequest({ messages });\n }\n },\n [\n mutate,\n mutateStatus,\n api,\n extraMetadataRef,\n onResponse,\n onFinish,\n onError,\n setError,\n streamProtocol,\n experimental_prepareRequestBody,\n onToolCall,\n maxSteps,\n messagesRef,\n abortControllerRef,\n generateId,\n fetch,\n throttleWaitMs,\n chatId,\n messageMetadataSchema,\n ],\n );\n\n const append = useCallback(\n (\n message: UIMessage<MESSAGE_METADATA> | CreateUIMessage<MESSAGE_METADATA>,\n { data, headers, body }: ChatRequestOptions = {},\n ) =>\n triggerRequest({\n messages: messagesRef.current.concat({\n ...message,\n id: message.id ?? generateId(),\n }),\n headers,\n body,\n data,\n }),\n [triggerRequest, generateId],\n );\n\n const reload = useCallback(\n async ({ data, headers, body }: ChatRequestOptions = {}) => {\n const messages = messagesRef.current;\n\n if (messages.length === 0) {\n return null;\n }\n\n // Remove last assistant message and retry last user message.\n const lastMessage = messages[messages.length - 1];\n return triggerRequest({\n messages:\n lastMessage.role === 'assistant' ? messages.slice(0, -1) : messages,\n headers,\n body,\n data,\n });\n },\n [triggerRequest],\n );\n\n const stop = useCallback(() => {\n if (abortControllerRef.current) {\n abortControllerRef.current.abort();\n abortControllerRef.current = null;\n }\n }, []);\n\n const experimental_resume = useCallback(async () => {\n const messages = messagesRef.current;\n\n triggerRequest({ messages }, 'resume');\n }, [triggerRequest]);\n\n const setMessages = useCallback(\n (\n messages:\n | UIMessage<MESSAGE_METADATA>[]\n | ((\n messages: UIMessage<MESSAGE_METADATA>[],\n ) => UIMessage<MESSAGE_METADATA>[]),\n ) => {\n if (typeof messages === 'function') {\n messages = messages(messagesRef.current);\n }\n\n mutate(messages, false);\n messagesRef.current = messages;\n },\n [mutate],\n );\n\n // Input state and handlers.\n const [input, setInput] = useState(initialInput);\n\n const handleSubmit = useCallback(\n async (\n event?: { preventDefault?: () => void },\n options: ChatRequestOptions & {\n files?: FileList | FileUIPart[];\n } = {},\n metadata?: Object,\n ) => {\n event?.preventDefault?.();\n\n if (!input && !options.allowEmptySubmit) return;\n\n if (metadata) {\n extraMetadataRef.current = {\n ...extraMetadataRef.current,\n ...metadata,\n };\n }\n\n const fileParts = Array.isArray(options?.files)\n ? options.files\n : await convertFileListToFileUIParts(options?.files);\n\n triggerRequest({\n messages: messagesRef.current.concat({\n id: generateId(),\n role: 'user',\n metadata: undefined,\n parts: [...fileParts, { type: 'text', text: input }],\n }),\n headers: options.headers,\n body: options.body,\n data: options.data,\n });\n\n setInput('');\n },\n [input, generateId, triggerRequest],\n );\n\n const handleInputChange = (e: any) => {\n setInput(e.target.value);\n };\n\n const addToolResult = useCallback(\n ({ toolCallId, result }: { toolCallId: string; result: unknown }) => {\n const currentMessages = messagesRef.current;\n\n updateToolCallResult({\n messages: currentMessages,\n toolCallId,\n toolResult: result,\n });\n\n // array mutation is required to trigger a re-render\n mutate(\n [\n ...currentMessages.slice(0, currentMessages.length - 1),\n {\n ...currentMessages[currentMessages.length - 1],\n // @ts-ignore\n // update the revisionId to trigger a re-render\n revisionId: generateId(),\n },\n ],\n false,\n );\n\n // when the request is ongoing, the auto-submit will be triggered after the request is finished\n if (status === 'submitted' || status === 'streaming') {\n return;\n }\n\n // auto-submit when all tool calls in the last assistant message have results:\n const lastMessage = currentMessages[currentMessages.length - 1];\n if (isAssistantMessageWithCompletedToolCalls(lastMessage)) {\n triggerRequest({ messages: currentMessages });\n }\n },\n [mutate, status, triggerRequest, generateId],\n );\n\n return {\n messages: messages ?? [],\n id: chatId,\n setMessages,\n error,\n append,\n reload,\n stop,\n experimental_resume,\n input,\n setInput,\n handleInputChange,\n handleSubmit,\n isLoading: status === 'submitted' || status === 'streaming',\n status,\n addToolResult,\n };\n}\n","import throttleFunction from 'throttleit';\n\nexport function throttle<T extends (...args: any[]) => any>(\n fn: T,\n waitMs: number | undefined,\n): T {\n return waitMs != null ? throttleFunction(fn, waitMs) : fn;\n}\n","import { isDeepEqualData } from 'ai';\nimport { useEffect, useState } from 'react';\n\n/**\n * Returns a stable value that only updates the stored value (and triggers a re-render)\n * when the value's contents differ by deep-compare.\n */\nexport function useStableValue<T>(latestValue: T): T {\n const [value, setValue] = useState<T>(latestValue);\n\n useEffect(() => {\n if (!isDeepEqualData(latestValue, value)) {\n setValue(latestValue);\n }\n }, [latestValue, value]);\n\n return value;\n}\n","import {\n CompletionRequestOptions,\n UseCompletionOptions,\n callCompletionApi,\n} from 'ai';\nimport { useCallback, useEffect, useId, useRef, useState } from 'react';\nimport useSWR from 'swr';\nimport { throttle } from './throttle';\n\nexport type { UseCompletionOptions };\n\nexport type UseCompletionHelpers = {\n /** The current completion result */\n completion: string;\n /**\n * Send a new prompt to the API endpoint and update the completion state.\n */\n complete: (\n prompt: string,\n options?: CompletionRequestOptions,\n ) => Promise<string | null | undefined>;\n /** The error object of the API request */\n error: undefined | Error;\n /**\n * Abort the current API request but keep the generated tokens.\n */\n stop: () => void;\n /**\n * Update the `completion` state locally.\n */\n setCompletion: (completion: string) => void;\n /** The current value of the input */\n input: string;\n /** setState-powered method to update the input value */\n setInput: React.Dispatch<React.SetStateAction<string>>;\n /**\n * An input/textarea-ready onChange handler to control the value of the input\n * @example\n * ```jsx\n * <input onChange={handleInputChange} value={input} />\n * ```\n */\n handleInputChange: (\n event:\n | React.ChangeEvent<HTMLInputElement>\n | React.ChangeEvent<HTMLTextAreaElement>,\n ) => void;\n\n /**\n * Form submission handler to automatically reset input and append a user message\n * @example\n * ```jsx\n * <form onSubmit={handleSubmit}>\n * <input onChange={handleInputChange} value={input} />\n * </form>\n * ```\n */\n handleSubmit: (event?: { preventDefault?: () => void }) => void;\n\n /** Whether the API request is in progress */\n isLoading: boolean;\n};\n\nexport function useCompletion({\n api = '/api/completion',\n id,\n initialCompletion = '',\n initialInput = '',\n credentials,\n headers,\n body,\n streamProtocol = 'data',\n fetch,\n onResponse,\n onFinish,\n onError,\n experimental_throttle: throttleWaitMs,\n}: UseCompletionOptions & {\n /**\n * Custom throttle wait in ms for the completion and data updates.\n * Default is undefined, which disables throttling.\n */\n experimental_throttle?: number;\n} = {}): UseCompletionHelpers {\n // Generate an unique id for the completion if not provided.\n const hookId = useId();\n const completionId = id || hookId;\n\n // Store the completion state in SWR, using the completionId as the key to share states.\n const { data, mutate } = useSWR<string>([api, completionId], null, {\n fallbackData: initialCompletion,\n });\n\n const { data: isLoading = false, mutate: mutateLoading } = useSWR<boolean>(\n [completionId, 'loading'],\n null,\n );\n\n const [error, setError] = useState<undefined | Error>(undefined);\n const completion = data!;\n\n // Abort controller to cancel the current API call.\n const [abortController, setAbortController] =\n useState<AbortController | null>(null);\n\n const extraMetadataRef = useRef({\n credentials,\n headers,\n body,\n });\n\n useEffect(() => {\n extraMetadataRef.current = {\n credentials,\n headers,\n body,\n };\n }, [credentials, headers, body]);\n\n const triggerRequest = useCallback(\n async (prompt: string, options?: CompletionRequestOptions) =>\n callCompletionApi({\n api,\n prompt,\n credentials: extraMetadataRef.current.credentials,\n headers: { ...extraMetadataRef.current.headers, ...options?.headers },\n body: {\n ...extraMetadataRef.current.body,\n ...options?.body,\n },\n streamProtocol,\n fetch,\n // throttle streamed ui updates:\n setCompletion: throttle(\n (completion: string) => mutate(completion, false),\n throttleWaitMs,\n ),\n setLoading: mutateLoading,\n setError,\n setAbortController,\n onResponse,\n onFinish,\n onError,\n }),\n [\n mutate,\n mutateLoading,\n api,\n extraMetadataRef,\n setAbortController,\n onResponse,\n onFinish,\n onError,\n setError,\n streamProtocol,\n fetch,\n throttleWaitMs,\n ],\n );\n\n const stop = useCallback(() => {\n if (abortController) {\n abortController.abort();\n setAbortController(null);\n }\n }, [abortController]);\n\n const setCompletion = useCallback(\n (completion: string) => {\n mutate(completion, false);\n },\n [mutate],\n );\n\n const complete = useCallback<UseCompletionHelpers['complete']>(\n async (prompt, options) => {\n return triggerRequest(prompt, options);\n },\n [triggerRequest],\n );\n\n const [input, setInput] = useState(initialInput);\n\n const handleSubmit = useCallback(\n (event?: { preventDefault?: () => void }) => {\n event?.preventDefault?.();\n return input ? complete(input) : undefined;\n },\n [input, complete],\n );\n\n const handleInputChange = useCallback(\n (e: any) => {\n setInput(e.target.value);\n },\n [setInput],\n );\n\n return {\n completion,\n complete,\n error,\n setCompletion,\n stop,\n input,\n setInput,\n handleInputChange,\n handleSubmit,\n isLoading,\n };\n}\n","import {\n FetchFunction,\n isAbortError,\n safeValidateTypes,\n} from '@ai-sdk/provider-utils';\nimport {\n asSchema,\n DeepPartial,\n isDeepEqualData,\n parsePartialJson,\n Schema,\n} from 'ai';\nimport { useCallback, useId, useRef, useState } from 'react';\nimport useSWR from 'swr';\nimport z from 'zod';\n\n// use function to allow for mocking in tests:\nconst getOriginalFetch = () => fetch;\n\nexport type Experimental_UseObjectOptions<RESULT> = {\n /**\n * The API endpoint. It should stream JSON that matches the schema as chunked text.\n */\n api: string;\n\n /**\n * A Zod schema that defines the shape of the complete object.\n */\n schema: z.Schema<RESULT, z.ZodTypeDef, any> | Schema<RESULT>;\n\n /**\n * An unique identifier. If not provided, a random one will be\n * generated. When provided, the `useObject` hook with the same `id` will\n * have shared states across components.\n */\n id?: string;\n\n /**\n * An optional value for the initial object.\n */\n initialValue?: DeepPartial<RESULT>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nCallback that is called when the stream has finished.\n */\n onFinish?: (event: {\n /**\nThe generated object (typed according to the schema).\nCan be undefined if the final object does not match the schema.\n */\n object: RESULT | undefined;\n\n /**\nOptional error object. This is e.g. a TypeValidationError when the final object does not match the schema.\n */\n error: Error | undefined;\n }) => Promise<void> | void;\n\n /**\n * Callback function to be called when an error is encountered.\n */\n onError?: (error: Error) => void;\n\n /**\n * Additional HTTP headers to be included in the request.\n */\n headers?: Record<string, string> | Headers;\n\n /**\n * The credentials mode to be used for the fetch request.\n * Possible values are: 'omit', 'same-origin', 'include'.\n * Defaults to 'same-origin'.\n */\n credentials?: RequestCredentials;\n};\n\nexport type Experimental_UseObjectHelpers<RESULT, INPUT> = {\n /**\n * Calls the API with the provided input as JSON body.\n */\n submit: (input: INPUT) => void;\n\n /**\n * The current value for the generated object. Updated as the API streams JSON chunks.\n */\n object: DeepPartial<RESULT> | undefined;\n\n /**\n * The error object of the API request if any.\n */\n error: Error | undefined;\n\n /**\n * Flag that indicates whether an API request is in progress.\n */\n isLoading: boolean;\n\n /**\n * Abort the current request immediately, keep the current partial object if any.\n */\n stop: () => void;\n};\n\nfunction useObject<RESULT, INPUT = any>({\n api,\n id,\n schema, // required, in the future we will use it for validation\n initialValue,\n fetch,\n onError,\n onFinish,\n headers,\n credentials,\n}: Experimental_UseObjectOptions<RESULT>): Experimental_UseObjectHelpers<\n RESULT,\n INPUT\n> {\n // Generate an unique id if not provided.\n const hookId = useId();\n const completionId = id ?? hookId;\n\n // Store the completion state in SWR, using the completionId as the key to share states.\n const { data, mutate } = useSWR<DeepPartial<RESULT>>(\n [api, completionId],\n null,\n { fallbackData: initialValue },\n );\n\n const [error, setError] = useState<undefined | Error>(undefined);\n const [isLoading, setIsLoading] = useState(false);\n\n // Abort controller to cancel the current API call.\n const abortControllerRef = useRef<AbortController | null>(null);\n\n const stop = useCallback(() => {\n try {\n abortControllerRef.current?.abort();\n } catch (ignored) {\n } finally {\n setIsLoading(false);\n abortControllerRef.current = null;\n }\n }, []);\n\n const submit = async (input: INPUT) => {\n try {\n mutate(undefined); // reset the data\n setIsLoading(true);\n setError(undefined);\n\n const abortController = new AbortController();\n abortControllerRef.current = abortController;\n\n const actualFetch = fetch ?? getOriginalFetch();\n const response = await actualFetch(api, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...headers,\n },\n credentials,\n signal: abortController.signal,\n body: JSON.stringify(input),\n });\n\n if (!response.ok) {\n throw new Error(\n (await response.text()) ?? 'Failed to fetch the response.',\n );\n }\n\n if (response.body == null) {\n throw new Error('The response body is empty.');\n }\n\n let accumulatedText = '';\n let latestObject: DeepPartial<RESULT> | undefined = undefined;\n\n await response.body.pipeThrough(new TextDecoderStream()).pipeTo(\n new WritableStream<string>({\n async write(chunk) {\n accumulatedText += chunk;\n\n const { value } = await parsePartialJson(accumulatedText);\n const currentObject = value as DeepPartial<RESULT>;\n\n if (!isDeepEqualData(latestObject, currentObject)) {\n latestObject = currentObject;\n\n mutate(currentObject);\n }\n },\n\n async close() {\n setIsLoading(false);\n abortControllerRef.current = null;\n\n if (onFinish != null) {\n const validationResult = await safeValidateTypes({\n value: latestObject,\n schema: asSchema(schema),\n });\n\n onFinish(\n validationResult.success\n ? { object: validationResult.value, error: undefined }\n : { object: undefined, error: validationResult.error },\n );\n }\n },\n }),\n );\n } catch (error) {\n if (isAbortError(error)) {\n return;\n }\n\n if (onError && error instanceof Error) {\n onError(error);\n }\n\n setIsLoading(false);\n setError(error instanceof Error ? error : new Error(String(error)));\n }\n };\n\n return {\n submit,\n object: data,\n error,\n isLoading,\n stop,\n };\n}\n\nexport const experimental_useObject = useObject;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACQA,IAAAA,aASO;AACP,IAAAC,gBAAkE;AAClE,iBAAmB;;;ACnBnB,wBAA6B;AAEtB,SAAS,SACd,IACA,QACG;AACH,SAAO,UAAU,WAAO,kBAAAC,SAAiB,IAAI,MAAM,IAAI;AACzD;;;ACPA,gBAAgC;AAChC,mBAAoC;AAM7B,SAAS,eAAkB,aAAmB;AACnD,QAAM,CAAC,OAAO,QAAQ,QAAI,uBAAY,WAAW;AAEjD,8BAAU,MAAM;AACd,QAAI,KAAC,2BAAgB,aAAa,KAAK,GAAG;AACxC,eAAS,WAAW;AAAA,IACtB;AAAA,EACF,GAAG,CAAC,aAAa,KAAK,CAAC;AAEvB,SAAO;AACT;;;AFqGO,SAAS,QAA0B;AAAA,EACxC,MAAM;AAAA,EACN;AAAA,EACA;AAAA,EACA,eAAe;AAAA,EACf;AAAA,EACA;AAAA,EACA,WAAW;AAAA,EACX,iBAAiB;AAAA,EACjB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,aAAa,WAAAC;AAAA,EACb,OAAAC;AAAA,EACA,uBAAuB;AAAA,EACvB;AACF,IAsBI,CAAC,GAAqC;AAExC,QAAM,CAAC,MAAM,QAAI,wBAAS,UAAU;AAGpC,QAAM,SAAS,kBAAM;AACrB,QAAM,UAAU,OAAO,QAAQ,WAAW,CAAC,KAAK,MAAM,IAAI;AAG1D,QAAM,wBAAwB,eAAe,4CAAmB,CAAC,CAAC;AAClE,QAAM,+BAA2B;AAAA,IAC/B,MAAM;AAAA,IACN,CAAC,qBAAqB;AAAA,EACxB;AAGA,QAAM,EAAE,MAAM,UAAU,OAAO,QAAI,WAAAC;AAAA,IACjC,CAAC,SAAS,UAAU;AAAA,IACpB;AAAA,IACA,EAAE,cAAc,yBAAyB;AAAA,EAC3C;AAGA,QAAM,kBAAc,sBAAsC,YAAY,CAAC,CAAC;AACxE,+BAAU,MAAM;AACd,gBAAY,UAAU,YAAY,CAAC;AAAA,EACrC,GAAG,CAAC,QAAQ,CAAC;AAEb,QAAM,EAAE,MAAM,SAAS,SAAS,QAAQ,aAAa,QAAI,WAAAA,SAEvD,CAAC,SAAS,QAAQ,GAAG,IAAI;AAE3B,QAAM,EAAE,MAAM,QAAQ,QAAW,QAAQ,SAAS,QAAI,WAAAA,SAEpD,CAAC,SAAS,OAAO,GAAG,IAAI;AAG1B,QAAM,yBAAqB,sBAA+B,IAAI;AAE9D,QAAM,uBAAmB,sBAAO;AAAA,IAC9B;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,+BAAU,MAAM;AACd,qBAAiB,UAAU;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,GAAG,CAAC,aAAa,SAAS,IAAI,CAAC;AAE/B,QAAM,qBAAiB;AAAA,IACrB,OACE,aAMA,cAAqC,eAClC;AA7NT;AA8NM,mBAAa,WAAW;AACxB,eAAS,MAAS;AAElB,YAAM,eAAe,YAAY;AAEjC,YAAM,eAAe,aAAa;AAClC,YAAM,cAAU;AAAA,YACd,+BAAmB,aAAa,aAAa,SAAS,CAAC,CAAC;AAAA,MAC1D;AAEA,UAAI;AACF,cAAM,kBAAkB,IAAI,gBAAgB;AAC5C,2BAAmB,UAAU;AAE7B,cAAM,kBAAkB,SAAS,QAAQ,cAAc;AAGvD,wBAAgB,cAAc,KAAK;AAEnC,kBAAM,wBAAY;AAAA,UAChB;AAAA,UACA,OAAM,wFAAkC;AAAA,YACtC,IAAI;AAAA,YACJ,UAAU;AAAA,YACV,aAAa,YAAY;AAAA,YACzB,aAAa,YAAY;AAAA,UAC3B,OALM,YAKA;AAAA,YACJ,IAAI;AAAA,YACJ,UAAU;AAAA,YACV,MAAM,YAAY;AAAA,YAClB,GAAG,iBAAiB,QAAQ;AAAA,YAC5B,GAAG,YAAY;AAAA,UACjB;AAAA,UACA;AAAA,UACA,aAAa,iBAAiB,QAAQ;AAAA,UACtC,SAAS;AAAA,YACP,GAAG,iBAAiB,QAAQ;AAAA,YAC5B,GAAG,YAAY;AAAA,UACjB;AAAA,UACA,iBAAiB,MAAM,mBAAmB;AAAA,UAC1C;AAAA,UACA,SAAS,EAAE,QAAQ,GAAG;AACpB,yBAAa,WAAW;AAExB,kBAAM,qBACJ,QAAQ,OAAO,aAAa,aAAa,SAAS,CAAC,EAAE;AAEvD;AAAA,cACE;AAAA,gBACE,GAAI,qBACA,aAAa,MAAM,GAAG,aAAa,SAAS,CAAC,IAC7C;AAAA,gBACJ;AAAA,cACF;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,OAAAD;AAAA,UACA,aAAa,aAAa,aAAa,SAAS,CAAC;AAAA,UACjD;AAAA,UACA;AAAA,QACF,CAAC;AAED,2BAAmB,UAAU;AAE7B,qBAAa,OAAO;AAAA,MACtB,SAAS,KAAK;AAEZ,YAAK,IAAY,SAAS,cAAc;AACtC,6BAAmB,UAAU;AAC7B,uBAAa,OAAO;AACpB,iBAAO;AAAA,QACT;AAEA,YAAI,WAAW,eAAe,OAAO;AACnC,kBAAQ,GAAG;AAAA,QACb;AAEA,iBAAS,GAAY;AACrB,qBAAa,OAAO;AAAA,MACtB;AAIA,YAAME,YAAW,YAAY;AAC7B,cACE,mCAAuB;AAAA,QACrB,+BAA+B;AAAA,QAC/B,sBAAsB;AAAA,QACtB;AAAA,QACA,UAAAA;AAAA,MACF,CAAC,GACD;AACA,cAAM,eAAe,EAAE,UAAAA,UAAS,CAAC;AAAA,MACnC;AAAA,IACF;AAAA,IACA;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACAF;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,aAAS;AAAA,IACb,CACE,SACA,EAAE,MAAM,SAAAG,UAAS,MAAAC,MAAK,IAAwB,CAAC,MAC/C;AA5VN;AA6VM,4BAAe;AAAA,QACb,UAAU,YAAY,QAAQ,OAAO;AAAA,UACnC,GAAG;AAAA,UACH,KAAI,aAAQ,OAAR,YAAc,WAAW;AAAA,QAC/B,CAAC;AAAA,QACD,SAAAD;AAAA,QACA,MAAAC;AAAA,QACA;AAAA,MACF,CAAC;AAAA;AAAA,IACH,CAAC,gBAAgB,UAAU;AAAA,EAC7B;AAEA,QAAM,aAAS;AAAA,IACb,OAAO,EAAE,MAAM,SAAAD,UAAS,MAAAC,MAAK,IAAwB,CAAC,MAAM;AAC1D,YAAMF,YAAW,YAAY;AAE7B,UAAIA,UAAS,WAAW,GAAG;AACzB,eAAO;AAAA,MACT;AAGA,YAAM,cAAcA,UAASA,UAAS,SAAS,CAAC;AAChD,aAAO,eAAe;AAAA,QACpB,UACE,YAAY,SAAS,cAAcA,UAAS,MAAM,GAAG,EAAE,IAAIA;AAAA,QAC7D,SAAAC;AAAA,QACA,MAAAC;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,CAAC,cAAc;AAAA,EACjB;AAEA,QAAM,WAAO,2BAAY,MAAM;AAC7B,QAAI,mBAAmB,SAAS;AAC9B,yBAAmB,QAAQ,MAAM;AACjC,yBAAmB,UAAU;AAAA,IAC/B;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,QAAM,0BAAsB,2BAAY,YAAY;AAClD,UAAMF,YAAW,YAAY;AAE7B,mBAAe,EAAE,UAAAA,UAAS,GAAG,QAAQ;AAAA,EACvC,GAAG,CAAC,cAAc,CAAC;AAEnB,QAAM,kBAAc;AAAA,IAClB,CACEA,cAKG;AACH,UAAI,OAAOA,cAAa,YAAY;AAClC,QAAAA,YAAWA,UAAS,YAAY,OAAO;AAAA,MACzC;AAEA,aAAOA,WAAU,KAAK;AACtB,kBAAY,UAAUA;AAAA,IACxB;AAAA,IACA,CAAC,MAAM;AAAA,EACT;AAGA,QAAM,CAAC,OAAO,QAAQ,QAAI,wBAAS,YAAY;AAE/C,QAAM,mBAAe;AAAA,IACnB,OACE,OACA,UAEI,CAAC,GACL,aACG;AAvaT;AAwaM,2CAAO,mBAAP;AAEA,UAAI,CAAC,SAAS,CAAC,QAAQ;AAAkB;AAEzC,UAAI,UAAU;AACZ,yBAAiB,UAAU;AAAA,UACzB,GAAG,iBAAiB;AAAA,UACpB,GAAG;AAAA,QACL;AAAA,MACF;AAEA,YAAM,YAAY,MAAM,QAAQ,mCAAS,KAAK,IAC1C,QAAQ,QACR,UAAM,yCAA6B,mCAAS,KAAK;AAErD,qBAAe;AAAA,QACb,UAAU,YAAY,QAAQ,OAAO;AAAA,UACnC,IAAI,WAAW;AAAA,UACf,MAAM;AAAA,UACN,UAAU;AAAA,UACV,OAAO,CAAC,GAAG,WAAW,EAAE,MAAM,QAAQ,MAAM,MAAM,CAAC;AAAA,QACrD,CAAC;AAAA,QACD,SAAS,QAAQ;AAAA,QACjB,MAAM,QAAQ;AAAA,QACd,MAAM,QAAQ;AAAA,MAChB,CAAC;AAED,eAAS,EAAE;AAAA,IACb;AAAA,IACA,CAAC,OAAO,YAAY,cAAc;AAAA,EACpC;AAEA,QAAM,oBAAoB,CAAC,MAAW;AACpC,aAAS,EAAE,OAAO,KAAK;AAAA,EACzB;AAEA,QAAM,oBAAgB;AAAA,IACpB,CAAC,EAAE,YAAY,OAAO,MAA+C;AACnE,YAAM,kBAAkB,YAAY;AAEpC,2CAAqB;AAAA,QACnB,UAAU;AAAA,QACV;AAAA,QACA,YAAY;AAAA,MACd,CAAC;AAGD;AAAA,QACE;AAAA,UACE,GAAG,gBAAgB,MAAM,GAAG,gBAAgB,SAAS,CAAC;AAAA,UACtD;AAAA,YACE,GAAG,gBAAgB,gBAAgB,SAAS,CAAC;AAAA;AAAA;AAAA,YAG7C,YAAY,WAAW;AAAA,UACzB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAGA,UAAI,WAAW,eAAe,WAAW,aAAa;AACpD;AAAA,MACF;AAGA,YAAM,cAAc,gBAAgB,gBAAgB,SAAS,CAAC;AAC9D,cAAI,qDAAyC,WAAW,GAAG;AACzD,uBAAe,EAAE,UAAU,gBAAgB,CAAC;AAAA,MAC9C;AAAA,IACF;AAAA,IACA,CAAC,QAAQ,QAAQ,gBAAgB,UAAU;AAAA,EAC7C;AAEA,SAAO;AAAA,IACL,UAAU,8BAAY,CAAC;AAAA,IACvB,IAAI;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,WAAW,WAAW,eAAe,WAAW;AAAA,IAChD;AAAA,IACA;AAAA,EACF;AACF;;;AGngBA,IAAAG,aAIO;AACP,IAAAC,gBAAgE;AAChE,IAAAC,cAAmB;AAyDZ,SAAS,cAAc;AAAA,EAC5B,MAAM;AAAA,EACN;AAAA,EACA,oBAAoB;AAAA,EACpB,eAAe;AAAA,EACf;AAAA,EACA;AAAA,EACA;AAAA,EACA,iBAAiB;AAAA,EACjB,OAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,uBAAuB;AACzB,IAMI,CAAC,GAAyB;AAE5B,QAAM,aAAS,qBAAM;AACrB,QAAM,eAAe,MAAM;AAG3B,QAAM,EAAE,MAAM,OAAO,QAAI,YAAAC,SAAe,CAAC,KAAK,YAAY,GAAG,MAAM;AAAA,IACjE,cAAc;AAAA,EAChB,CAAC;AAED,QAAM,EAAE,MAAM,YAAY,OAAO,QAAQ,cAAc,QAAI,YAAAA;AAAA,IACzD,CAAC,cAAc,SAAS;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,CAAC,OAAO,QAAQ,QAAI,wBAA4B,MAAS;AAC/D,QAAM,aAAa;AAGnB,QAAM,CAAC,iBAAiB,kBAAkB,QACxC,wBAAiC,IAAI;AAEvC,QAAM,uBAAmB,sBAAO;AAAA,IAC9B;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,+BAAU,MAAM;AACd,qBAAiB,UAAU;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,GAAG,CAAC,aAAa,SAAS,IAAI,CAAC;AAE/B,QAAM,qBAAiB;AAAA,IACrB,OAAO,QAAgB,gBACrB,8BAAkB;AAAA,MAChB;AAAA,MACA;AAAA,MACA,aAAa,iBAAiB,QAAQ;AAAA,MACtC,SAAS,EAAE,GAAG,iBAAiB,QAAQ,SAAS,GAAG,mCAAS,QAAQ;AAAA,MACpE,MAAM;AAAA,QACJ,GAAG,iBAAiB,QAAQ;AAAA,QAC5B,GAAG,mCAAS;AAAA,MACd;AAAA,MACA;AAAA,MACA,OAAAD;AAAA;AAAA,MAEA,eAAe;AAAA,QACb,CAACE,gBAAuB,OAAOA,aAAY,KAAK;AAAA,QAChD;AAAA,MACF;AAAA,MACA,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,IACH;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACAF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAO,2BAAY,MAAM;AAC7B,QAAI,iBAAiB;AACnB,sBAAgB,MAAM;AACtB,yBAAmB,IAAI;AAAA,IACzB;AAAA,EACF,GAAG,CAAC,eAAe,CAAC;AAEpB,QAAM,oBAAgB;AAAA,IACpB,CAACE,gBAAuB;AACtB,aAAOA,aAAY,KAAK;AAAA,IAC1B;AAAA,IACA,CAAC,MAAM;AAAA,EACT;AAEA,QAAM,eAAW;AAAA,IACf,OAAO,QAAQ,YAAY;AACzB,aAAO,eAAe,QAAQ,OAAO;AAAA,IACvC;AAAA,IACA,CAAC,cAAc;AAAA,EACjB;AAEA,QAAM,CAAC,OAAO,QAAQ,QAAI,wBAAS,YAAY;AAE/C,QAAM,mBAAe;AAAA,IACnB,CAAC,UAA4C;AAxLjD;AAyLM,2CAAO,mBAAP;AACA,aAAO,QAAQ,SAAS,KAAK,IAAI;AAAA,IACnC;AAAA,IACA,CAAC,OAAO,QAAQ;AAAA,EAClB;AAEA,QAAM,wBAAoB;AAAA,IACxB,CAAC,MAAW;AACV,eAAS,EAAE,OAAO,KAAK;AAAA,IACzB;AAAA,IACA,CAAC,QAAQ;AAAA,EACX;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AClNA,4BAIO;AACP,IAAAC,aAMO;AACP,IAAAC,gBAAqD;AACrD,IAAAC,cAAmB;AAInB,IAAM,mBAAmB,MAAM;AA4F/B,SAAS,UAA+B;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EACA;AAAA,EACA,OAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAGE;AAEA,QAAM,aAAS,qBAAM;AACrB,QAAM,eAAe,kBAAM;AAG3B,QAAM,EAAE,MAAM,OAAO,QAAI,YAAAC;AAAA,IACvB,CAAC,KAAK,YAAY;AAAA,IAClB;AAAA,IACA,EAAE,cAAc,aAAa;AAAA,EAC/B;AAEA,QAAM,CAAC,OAAO,QAAQ,QAAI,wBAA4B,MAAS;AAC/D,QAAM,CAAC,WAAW,YAAY,QAAI,wBAAS,KAAK;AAGhD,QAAM,yBAAqB,sBAA+B,IAAI;AAE9D,QAAM,WAAO,2BAAY,MAAM;AA5IjC;AA6II,QAAI;AACF,+BAAmB,YAAnB,mBAA4B;AAAA,IAC9B,SAAS,SAAS;AAAA,IAClB,UAAE;AACA,mBAAa,KAAK;AAClB,yBAAmB,UAAU;AAAA,IAC/B;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,QAAM,SAAS,OAAO,UAAiB;AAtJzC;AAuJI,QAAI;AACF,aAAO,MAAS;AAChB,mBAAa,IAAI;AACjB,eAAS,MAAS;AAElB,YAAM,kBAAkB,IAAI,gBAAgB;AAC5C,yBAAmB,UAAU;AAE7B,YAAM,cAAcD,UAAA,OAAAA,SAAS,iBAAiB;AAC9C,YAAM,WAAW,MAAM,YAAY,KAAK;AAAA,QACtC,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,GAAG;AAAA,QACL;AAAA,QACA;AAAA,QACA,QAAQ,gBAAgB;AAAA,QACxB,MAAM,KAAK,UAAU,KAAK;AAAA,MAC5B,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI;AAAA,WACP,WAAM,SAAS,KAAK,MAApB,YAA0B;AAAA,QAC7B;AAAA,MACF;AAEA,UAAI,SAAS,QAAQ,MAAM;AACzB,cAAM,IAAI,MAAM,6BAA6B;AAAA,MAC/C;AAEA,UAAI,kBAAkB;AACtB,UAAI,eAAgD;AAEpD,YAAM,SAAS,KAAK,YAAY,IAAI,kBAAkB,CAAC,EAAE;AAAA,QACvD,IAAI,eAAuB;AAAA,UACzB,MAAM,MAAM,OAAO;AACjB,+BAAmB;AAEnB,kBAAM,EAAE,MAAM,IAAI,UAAM,6BAAiB,eAAe;AACxD,kBAAM,gBAAgB;AAEtB,gBAAI,KAAC,4BAAgB,cAAc,aAAa,GAAG;AACjD,6BAAe;AAEf,qBAAO,aAAa;AAAA,YACtB;AAAA,UACF;AAAA,UAEA,MAAM,QAAQ;AACZ,yBAAa,KAAK;AAClB,+BAAmB,UAAU;AAE7B,gBAAI,YAAY,MAAM;AACpB,oBAAM,mBAAmB,UAAM,yCAAkB;AAAA,gBAC/C,OAAO;AAAA,gBACP,YAAQ,qBAAS,MAAM;AAAA,cACzB,CAAC;AAED;AAAA,gBACE,iBAAiB,UACb,EAAE,QAAQ,iBAAiB,OAAO,OAAO,OAAU,IACnD,EAAE,QAAQ,QAAW,OAAO,iBAAiB,MAAM;AAAA,cACzD;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,SAASE,QAAO;AACd,cAAI,oCAAaA,MAAK,GAAG;AACvB;AAAA,MACF;AAEA,UAAI,WAAWA,kBAAiB,OAAO;AACrC,gBAAQA,MAAK;AAAA,MACf;AAEA,mBAAa,KAAK;AAClB,eAASA,kBAAiB,QAAQA,SAAQ,IAAI,MAAM,OAAOA,MAAK,CAAC,CAAC;AAAA,IACpE;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,IAAM,yBAAyB;","names":["import_ai","import_react","throttleFunction","generateIdFunc","fetch","useSWR","messages","headers","body","import_ai","import_react","import_swr","fetch","useSWR","completion","import_ai","import_react","import_swr","fetch","useSWR","error"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/use-chat.ts","../src/throttle.ts","../src/util/use-stable-value.ts","../src/use-completion.ts","../src/use-object.ts"],"sourcesContent":["export * from './use-chat';\nexport * from './use-completion';\nexport * from './use-object';\n","import type {\n ChatRequestOptions,\n CreateUIMessage,\n FileUIPart,\n UIMessage,\n UseChatOptions,\n} from 'ai';\nimport {\n callChatApi,\n convertFileListToFileUIParts,\n extractMaxToolInvocationStep,\n generateId as generateIdFunc,\n getToolInvocations,\n isAssistantMessageWithCompletedToolCalls,\n shouldResubmitMessages,\n updateToolCallResult,\n} from 'ai';\nimport { useCallback, useEffect, useMemo, useRef, useState } from 'react';\nimport useSWR from 'swr';\nimport { throttle } from './throttle';\nimport { useStableValue } from './util/use-stable-value';\n\nexport type { CreateUIMessage, UIMessage, UseChatOptions };\n\nexport type UseChatHelpers<MESSAGE_METADATA = unknown> = {\n /**\n * The id of the chat.\n */\n readonly id: string;\n\n /**\n * Hook status:\n *\n * - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.\n * - `streaming`: The response is actively streaming in from the API, receiving chunks of data.\n * - `ready`: The full response has been received and processed; a new user message can be submitted.\n * - `error`: An error occurred during the API request, preventing successful completion.\n */\n readonly status: 'submitted' | 'streaming' | 'ready' | 'error';\n\n /** Current messages in the chat */\n readonly messages: UIMessage<MESSAGE_METADATA>[];\n\n /** The error object of the API request */\n readonly error: undefined | Error;\n\n /**\n * Append a user message to the chat list. This triggers the API call to fetch\n * the assistant's response.\n *\n * @param message The message to append\n * @param options Additional options to pass to the API call\n */\n append: (\n message: CreateUIMessage<MESSAGE_METADATA>,\n options?: ChatRequestOptions,\n ) => Promise<void>;\n\n /**\n * Reload the last AI chat response for the given chat history. If the last\n * message isn't from the assistant, it will request the API to generate a\n * new response.\n */\n reload: (\n chatRequestOptions?: ChatRequestOptions,\n ) => Promise<string | null | undefined>;\n\n /**\n * Abort the current request immediately, keep the generated tokens if any.\n */\n stop: () => void;\n\n /**\n * Resume an ongoing chat generation stream. This does not resume an aborted generation.\n */\n experimental_resume: () => void;\n\n /**\n * Update the `messages` state locally. This is useful when you want to\n * edit the messages on the client, and then trigger the `reload` method\n * manually to regenerate the AI response.\n */\n setMessages: (\n messages:\n | UIMessage<MESSAGE_METADATA>[]\n | ((\n messages: UIMessage<MESSAGE_METADATA>[],\n ) => UIMessage<MESSAGE_METADATA>[]),\n ) => void;\n\n /** The current value of the input */\n input: string;\n\n /** setState-powered method to update the input value */\n setInput: React.Dispatch<React.SetStateAction<string>>;\n\n /** An input/textarea-ready onChange handler to control the value of the input */\n handleInputChange: (\n e:\n | React.ChangeEvent<HTMLInputElement>\n | React.ChangeEvent<HTMLTextAreaElement>,\n ) => void;\n\n /** Form submission handler to automatically reset input and append a user message */\n handleSubmit: (\n event?: { preventDefault?: () => void },\n chatRequestOptions?: ChatRequestOptions & {\n files?: FileList | FileUIPart[];\n },\n ) => void;\n\n addToolResult: ({\n toolCallId,\n result,\n }: {\n toolCallId: string;\n result: any;\n }) => void;\n};\n\nexport function useChat<MESSAGE_METADATA>({\n api = '/api/chat',\n id,\n initialMessages,\n initialInput = '',\n onToolCall,\n experimental_prepareRequestBody,\n maxSteps = 1,\n streamProtocol = 'ui-message',\n onFinish,\n onError,\n credentials,\n headers,\n body,\n generateId = generateIdFunc,\n fetch,\n experimental_throttle: throttleWaitMs,\n messageMetadataSchema,\n}: UseChatOptions<MESSAGE_METADATA> & {\n /**\n * Experimental (React only). When a function is provided, it will be used\n * to prepare the request body for the chat API. This can be useful for\n * customizing the request body based on the messages and data in the chat.\n *\n * @param id The id of the chat.\n * @param messages The current messages in the chat.\n * @param requestBody The request body object passed in the chat request.\n */\n experimental_prepareRequestBody?: (options: {\n id: string;\n messages: UIMessage<MESSAGE_METADATA>[];\n requestBody?: object;\n }) => unknown;\n\n /**\nCustom throttle wait in ms for the chat messages and data updates.\nDefault is undefined, which disables throttling.\n */\n experimental_throttle?: number;\n} = {}): UseChatHelpers<MESSAGE_METADATA> {\n // Generate ID once, store in state for stability across re-renders\n const [hookId] = useState(generateId);\n\n // Use the caller-supplied ID if available; otherwise, fall back to our stable ID\n const chatId = id ?? hookId;\n const chatKey = typeof api === 'string' ? [api, chatId] : chatId;\n\n // Store array of the processed initial messages to avoid re-renders:\n const stableInitialMessages = useStableValue(initialMessages ?? []);\n const processedInitialMessages = useMemo(\n () => stableInitialMessages,\n [stableInitialMessages],\n );\n\n // Store the chat state in SWR, using the chatId as the key to share states.\n const { data: messages, mutate } = useSWR<UIMessage<MESSAGE_METADATA>[]>(\n [chatKey, 'messages'],\n null,\n { fallbackData: processedInitialMessages },\n );\n\n // Keep the latest messages in a ref.\n const messagesRef = useRef<UIMessage<MESSAGE_METADATA>[]>(messages || []);\n useEffect(() => {\n messagesRef.current = messages || [];\n }, [messages]);\n\n const { data: status = 'ready', mutate: mutateStatus } = useSWR<\n 'submitted' | 'streaming' | 'ready' | 'error'\n >([chatKey, 'status'], null);\n\n const { data: error = undefined, mutate: setError } = useSWR<\n undefined | Error\n >([chatKey, 'error'], null);\n\n // Abort controller to cancel the current API call.\n const abortControllerRef = useRef<AbortController | null>(null);\n\n const extraMetadataRef = useRef({\n credentials,\n headers,\n body,\n });\n\n useEffect(() => {\n extraMetadataRef.current = {\n credentials,\n headers,\n body,\n };\n }, [credentials, headers, body]);\n\n const triggerRequest = useCallback(\n async (\n chatRequest: ChatRequestOptions & {\n messages: UIMessage<MESSAGE_METADATA>[];\n },\n requestType: 'generate' | 'resume' = 'generate',\n ) => {\n mutateStatus('submitted');\n setError(undefined);\n\n const chatMessages = chatRequest.messages;\n\n const messageCount = chatMessages.length;\n const maxStep = extractMaxToolInvocationStep(\n getToolInvocations(chatMessages[chatMessages.length - 1]),\n );\n\n try {\n const abortController = new AbortController();\n abortControllerRef.current = abortController;\n\n const throttledMutate = throttle(mutate, throttleWaitMs);\n\n // Do an optimistic update to show the updated messages immediately:\n throttledMutate(chatMessages, false);\n\n await callChatApi({\n api,\n body: experimental_prepareRequestBody?.({\n id: chatId,\n messages: chatMessages,\n requestBody: chatRequest.body,\n }) ?? {\n id: chatId,\n messages: chatMessages,\n ...extraMetadataRef.current.body,\n ...chatRequest.body,\n },\n streamProtocol,\n credentials: extraMetadataRef.current.credentials,\n headers: {\n ...extraMetadataRef.current.headers,\n ...chatRequest.headers,\n },\n abortController: () => abortControllerRef.current,\n onUpdate({ message }) {\n mutateStatus('streaming');\n\n const replaceLastMessage =\n message.id === chatMessages[chatMessages.length - 1].id;\n\n throttledMutate(\n [\n ...(replaceLastMessage\n ? chatMessages.slice(0, chatMessages.length - 1)\n : chatMessages),\n message,\n ],\n false,\n );\n },\n onToolCall,\n onFinish,\n generateId,\n fetch,\n lastMessage: chatMessages[chatMessages.length - 1],\n requestType,\n messageMetadataSchema,\n });\n\n abortControllerRef.current = null;\n\n mutateStatus('ready');\n } catch (err) {\n // Ignore abort errors as they are expected.\n if ((err as any).name === 'AbortError') {\n abortControllerRef.current = null;\n mutateStatus('ready');\n return null;\n }\n\n if (onError && err instanceof Error) {\n onError(err);\n }\n\n setError(err as Error);\n mutateStatus('error');\n }\n\n // auto-submit when all tool calls in the last assistant message have results\n // and assistant has not answered yet\n const messages = messagesRef.current;\n if (\n shouldResubmitMessages({\n originalMaxToolInvocationStep: maxStep,\n originalMessageCount: messageCount,\n maxSteps,\n messages,\n })\n ) {\n await triggerRequest({ messages });\n }\n },\n [\n mutate,\n mutateStatus,\n api,\n extraMetadataRef,\n onFinish,\n onError,\n setError,\n streamProtocol,\n experimental_prepareRequestBody,\n onToolCall,\n maxSteps,\n messagesRef,\n abortControllerRef,\n generateId,\n fetch,\n throttleWaitMs,\n chatId,\n messageMetadataSchema,\n ],\n );\n\n const append = useCallback(\n async (\n message: CreateUIMessage<MESSAGE_METADATA>,\n { headers, body }: ChatRequestOptions = {},\n ) => {\n await triggerRequest({\n messages: messagesRef.current.concat({\n ...message,\n id: message.id ?? generateId(),\n }),\n headers,\n body,\n });\n },\n [triggerRequest, generateId],\n );\n\n const reload = useCallback(\n async ({ headers, body }: ChatRequestOptions = {}) => {\n const messages = messagesRef.current;\n\n if (messages.length === 0) {\n return null;\n }\n\n // Remove last assistant message and retry last user message.\n const lastMessage = messages[messages.length - 1];\n return triggerRequest({\n messages:\n lastMessage.role === 'assistant' ? messages.slice(0, -1) : messages,\n headers,\n body,\n });\n },\n [triggerRequest],\n );\n\n const stop = useCallback(() => {\n if (abortControllerRef.current) {\n abortControllerRef.current.abort();\n abortControllerRef.current = null;\n }\n }, []);\n\n const experimental_resume = useCallback(async () => {\n const messages = messagesRef.current;\n\n triggerRequest({ messages }, 'resume');\n }, [triggerRequest]);\n\n const setMessages = useCallback(\n (\n messages:\n | UIMessage<MESSAGE_METADATA>[]\n | ((\n messages: UIMessage<MESSAGE_METADATA>[],\n ) => UIMessage<MESSAGE_METADATA>[]),\n ) => {\n if (typeof messages === 'function') {\n messages = messages(messagesRef.current);\n }\n\n mutate(messages, false);\n messagesRef.current = messages;\n },\n [mutate],\n );\n\n // Input state and handlers.\n const [input, setInput] = useState(initialInput);\n\n const handleSubmit = useCallback(\n async (\n event?: { preventDefault?: () => void },\n options: ChatRequestOptions & {\n files?: FileList | FileUIPart[];\n } = {},\n metadata?: Object,\n ) => {\n event?.preventDefault?.();\n\n const fileParts = Array.isArray(options?.files)\n ? options.files\n : await convertFileListToFileUIParts(options?.files);\n\n if (!input && fileParts.length === 0) return;\n\n if (metadata) {\n extraMetadataRef.current = {\n ...extraMetadataRef.current,\n ...metadata,\n };\n }\n\n triggerRequest({\n messages: messagesRef.current.concat({\n id: generateId(),\n role: 'user',\n metadata: undefined,\n parts: [...fileParts, { type: 'text', text: input }],\n }),\n headers: options.headers,\n body: options.body,\n });\n\n setInput('');\n },\n [input, generateId, triggerRequest],\n );\n\n const handleInputChange = (e: any) => {\n setInput(e.target.value);\n };\n\n const addToolResult = useCallback(\n ({ toolCallId, result }: { toolCallId: string; result: unknown }) => {\n const currentMessages = messagesRef.current;\n\n updateToolCallResult({\n messages: currentMessages,\n toolCallId,\n toolResult: result,\n });\n\n // array mutation is required to trigger a re-render\n mutate(\n [\n ...currentMessages.slice(0, currentMessages.length - 1),\n {\n ...currentMessages[currentMessages.length - 1],\n // @ts-ignore\n // update the revisionId to trigger a re-render\n revisionId: generateId(),\n },\n ],\n false,\n );\n\n // when the request is ongoing, the auto-submit will be triggered after the request is finished\n if (status === 'submitted' || status === 'streaming') {\n return;\n }\n\n // auto-submit when all tool calls in the last assistant message have results:\n const lastMessage = currentMessages[currentMessages.length - 1];\n if (isAssistantMessageWithCompletedToolCalls(lastMessage)) {\n triggerRequest({ messages: currentMessages });\n }\n },\n [mutate, status, triggerRequest, generateId],\n );\n\n return {\n messages: messages ?? [],\n id: chatId,\n setMessages,\n error,\n append,\n reload,\n stop,\n experimental_resume,\n input,\n setInput,\n handleInputChange,\n handleSubmit,\n status,\n addToolResult,\n };\n}\n","import throttleFunction from 'throttleit';\n\nexport function throttle<T extends (...args: any[]) => any>(\n fn: T,\n waitMs: number | undefined,\n): T {\n return waitMs != null ? throttleFunction(fn, waitMs) : fn;\n}\n","import { isDeepEqualData } from 'ai';\nimport { useEffect, useState } from 'react';\n\n/**\n * Returns a stable value that only updates the stored value (and triggers a re-render)\n * when the value's contents differ by deep-compare.\n */\nexport function useStableValue<T>(latestValue: T): T {\n const [value, setValue] = useState<T>(latestValue);\n\n useEffect(() => {\n if (!isDeepEqualData(latestValue, value)) {\n setValue(latestValue);\n }\n }, [latestValue, value]);\n\n return value;\n}\n","import {\n CompletionRequestOptions,\n UseCompletionOptions,\n callCompletionApi,\n} from 'ai';\nimport { useCallback, useEffect, useId, useRef, useState } from 'react';\nimport useSWR from 'swr';\nimport { throttle } from './throttle';\n\nexport type { UseCompletionOptions };\n\nexport type UseCompletionHelpers = {\n /** The current completion result */\n completion: string;\n /**\n * Send a new prompt to the API endpoint and update the completion state.\n */\n complete: (\n prompt: string,\n options?: CompletionRequestOptions,\n ) => Promise<string | null | undefined>;\n /** The error object of the API request */\n error: undefined | Error;\n /**\n * Abort the current API request but keep the generated tokens.\n */\n stop: () => void;\n /**\n * Update the `completion` state locally.\n */\n setCompletion: (completion: string) => void;\n /** The current value of the input */\n input: string;\n /** setState-powered method to update the input value */\n setInput: React.Dispatch<React.SetStateAction<string>>;\n /**\n * An input/textarea-ready onChange handler to control the value of the input\n * @example\n * ```jsx\n * <input onChange={handleInputChange} value={input} />\n * ```\n */\n handleInputChange: (\n event:\n | React.ChangeEvent<HTMLInputElement>\n | React.ChangeEvent<HTMLTextAreaElement>,\n ) => void;\n\n /**\n * Form submission handler to automatically reset input and append a user message\n * @example\n * ```jsx\n * <form onSubmit={handleSubmit}>\n * <input onChange={handleInputChange} value={input} />\n * </form>\n * ```\n */\n handleSubmit: (event?: { preventDefault?: () => void }) => void;\n\n /** Whether the API request is in progress */\n isLoading: boolean;\n};\n\nexport function useCompletion({\n api = '/api/completion',\n id,\n initialCompletion = '',\n initialInput = '',\n credentials,\n headers,\n body,\n streamProtocol = 'data',\n fetch,\n onFinish,\n onError,\n experimental_throttle: throttleWaitMs,\n}: UseCompletionOptions & {\n /**\n * Custom throttle wait in ms for the completion and data updates.\n * Default is undefined, which disables throttling.\n */\n experimental_throttle?: number;\n} = {}): UseCompletionHelpers {\n // Generate an unique id for the completion if not provided.\n const hookId = useId();\n const completionId = id || hookId;\n\n // Store the completion state in SWR, using the completionId as the key to share states.\n const { data, mutate } = useSWR<string>([api, completionId], null, {\n fallbackData: initialCompletion,\n });\n\n const { data: isLoading = false, mutate: mutateLoading } = useSWR<boolean>(\n [completionId, 'loading'],\n null,\n );\n\n const [error, setError] = useState<undefined | Error>(undefined);\n const completion = data!;\n\n // Abort controller to cancel the current API call.\n const [abortController, setAbortController] =\n useState<AbortController | null>(null);\n\n const extraMetadataRef = useRef({\n credentials,\n headers,\n body,\n });\n\n useEffect(() => {\n extraMetadataRef.current = {\n credentials,\n headers,\n body,\n };\n }, [credentials, headers, body]);\n\n const triggerRequest = useCallback(\n async (prompt: string, options?: CompletionRequestOptions) =>\n callCompletionApi({\n api,\n prompt,\n credentials: extraMetadataRef.current.credentials,\n headers: { ...extraMetadataRef.current.headers, ...options?.headers },\n body: {\n ...extraMetadataRef.current.body,\n ...options?.body,\n },\n streamProtocol,\n fetch,\n // throttle streamed ui updates:\n setCompletion: throttle(\n (completion: string) => mutate(completion, false),\n throttleWaitMs,\n ),\n setLoading: mutateLoading,\n setError,\n setAbortController,\n onFinish,\n onError,\n }),\n [\n mutate,\n mutateLoading,\n api,\n extraMetadataRef,\n setAbortController,\n onFinish,\n onError,\n setError,\n streamProtocol,\n fetch,\n throttleWaitMs,\n ],\n );\n\n const stop = useCallback(() => {\n if (abortController) {\n abortController.abort();\n setAbortController(null);\n }\n }, [abortController]);\n\n const setCompletion = useCallback(\n (completion: string) => {\n mutate(completion, false);\n },\n [mutate],\n );\n\n const complete = useCallback<UseCompletionHelpers['complete']>(\n async (prompt, options) => {\n return triggerRequest(prompt, options);\n },\n [triggerRequest],\n );\n\n const [input, setInput] = useState(initialInput);\n\n const handleSubmit = useCallback(\n (event?: { preventDefault?: () => void }) => {\n event?.preventDefault?.();\n return input ? complete(input) : undefined;\n },\n [input, complete],\n );\n\n const handleInputChange = useCallback(\n (e: any) => {\n setInput(e.target.value);\n },\n [setInput],\n );\n\n return {\n completion,\n complete,\n error,\n setCompletion,\n stop,\n input,\n setInput,\n handleInputChange,\n handleSubmit,\n isLoading,\n };\n}\n","import {\n FetchFunction,\n isAbortError,\n safeValidateTypes,\n} from '@ai-sdk/provider-utils';\nimport {\n asSchema,\n DeepPartial,\n isDeepEqualData,\n parsePartialJson,\n Schema,\n} from 'ai';\nimport { useCallback, useId, useRef, useState } from 'react';\nimport useSWR from 'swr';\nimport z from 'zod';\n\n// use function to allow for mocking in tests:\nconst getOriginalFetch = () => fetch;\n\nexport type Experimental_UseObjectOptions<RESULT> = {\n /**\n * The API endpoint. It should stream JSON that matches the schema as chunked text.\n */\n api: string;\n\n /**\n * A Zod schema that defines the shape of the complete object.\n */\n schema: z.Schema<RESULT, z.ZodTypeDef, any> | Schema<RESULT>;\n\n /**\n * An unique identifier. If not provided, a random one will be\n * generated. When provided, the `useObject` hook with the same `id` will\n * have shared states across components.\n */\n id?: string;\n\n /**\n * An optional value for the initial object.\n */\n initialValue?: DeepPartial<RESULT>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nCallback that is called when the stream has finished.\n */\n onFinish?: (event: {\n /**\nThe generated object (typed according to the schema).\nCan be undefined if the final object does not match the schema.\n */\n object: RESULT | undefined;\n\n /**\nOptional error object. This is e.g. a TypeValidationError when the final object does not match the schema.\n */\n error: Error | undefined;\n }) => Promise<void> | void;\n\n /**\n * Callback function to be called when an error is encountered.\n */\n onError?: (error: Error) => void;\n\n /**\n * Additional HTTP headers to be included in the request.\n */\n headers?: Record<string, string> | Headers;\n\n /**\n * The credentials mode to be used for the fetch request.\n * Possible values are: 'omit', 'same-origin', 'include'.\n * Defaults to 'same-origin'.\n */\n credentials?: RequestCredentials;\n};\n\nexport type Experimental_UseObjectHelpers<RESULT, INPUT> = {\n /**\n * Calls the API with the provided input as JSON body.\n */\n submit: (input: INPUT) => void;\n\n /**\n * The current value for the generated object. Updated as the API streams JSON chunks.\n */\n object: DeepPartial<RESULT> | undefined;\n\n /**\n * The error object of the API request if any.\n */\n error: Error | undefined;\n\n /**\n * Flag that indicates whether an API request is in progress.\n */\n isLoading: boolean;\n\n /**\n * Abort the current request immediately, keep the current partial object if any.\n */\n stop: () => void;\n};\n\nfunction useObject<RESULT, INPUT = any>({\n api,\n id,\n schema, // required, in the future we will use it for validation\n initialValue,\n fetch,\n onError,\n onFinish,\n headers,\n credentials,\n}: Experimental_UseObjectOptions<RESULT>): Experimental_UseObjectHelpers<\n RESULT,\n INPUT\n> {\n // Generate an unique id if not provided.\n const hookId = useId();\n const completionId = id ?? hookId;\n\n // Store the completion state in SWR, using the completionId as the key to share states.\n const { data, mutate } = useSWR<DeepPartial<RESULT>>(\n [api, completionId],\n null,\n { fallbackData: initialValue },\n );\n\n const [error, setError] = useState<undefined | Error>(undefined);\n const [isLoading, setIsLoading] = useState(false);\n\n // Abort controller to cancel the current API call.\n const abortControllerRef = useRef<AbortController | null>(null);\n\n const stop = useCallback(() => {\n try {\n abortControllerRef.current?.abort();\n } catch (ignored) {\n } finally {\n setIsLoading(false);\n abortControllerRef.current = null;\n }\n }, []);\n\n const submit = async (input: INPUT) => {\n try {\n mutate(undefined); // reset the data\n setIsLoading(true);\n setError(undefined);\n\n const abortController = new AbortController();\n abortControllerRef.current = abortController;\n\n const actualFetch = fetch ?? getOriginalFetch();\n const response = await actualFetch(api, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...headers,\n },\n credentials,\n signal: abortController.signal,\n body: JSON.stringify(input),\n });\n\n if (!response.ok) {\n throw new Error(\n (await response.text()) ?? 'Failed to fetch the response.',\n );\n }\n\n if (response.body == null) {\n throw new Error('The response body is empty.');\n }\n\n let accumulatedText = '';\n let latestObject: DeepPartial<RESULT> | undefined = undefined;\n\n await response.body.pipeThrough(new TextDecoderStream()).pipeTo(\n new WritableStream<string>({\n async write(chunk) {\n accumulatedText += chunk;\n\n const { value } = await parsePartialJson(accumulatedText);\n const currentObject = value as DeepPartial<RESULT>;\n\n if (!isDeepEqualData(latestObject, currentObject)) {\n latestObject = currentObject;\n\n mutate(currentObject);\n }\n },\n\n async close() {\n setIsLoading(false);\n abortControllerRef.current = null;\n\n if (onFinish != null) {\n const validationResult = await safeValidateTypes({\n value: latestObject,\n schema: asSchema(schema),\n });\n\n onFinish(\n validationResult.success\n ? { object: validationResult.value, error: undefined }\n : { object: undefined, error: validationResult.error },\n );\n }\n },\n }),\n );\n } catch (error) {\n if (isAbortError(error)) {\n return;\n }\n\n if (onError && error instanceof Error) {\n onError(error);\n }\n\n setIsLoading(false);\n setError(error instanceof Error ? error : new Error(String(error)));\n }\n };\n\n return {\n submit,\n object: data,\n error,\n isLoading,\n stop,\n };\n}\n\nexport const experimental_useObject = useObject;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACOA,IAAAA,aASO;AACP,IAAAC,gBAAkE;AAClE,iBAAmB;;;AClBnB,wBAA6B;AAEtB,SAAS,SACd,IACA,QACG;AACH,SAAO,UAAU,WAAO,kBAAAC,SAAiB,IAAI,MAAM,IAAI;AACzD;;;ACPA,gBAAgC;AAChC,mBAAoC;AAM7B,SAAS,eAAkB,aAAmB;AACnD,QAAM,CAAC,OAAO,QAAQ,QAAI,uBAAY,WAAW;AAEjD,8BAAU,MAAM;AACd,QAAI,KAAC,2BAAgB,aAAa,KAAK,GAAG;AACxC,eAAS,WAAW;AAAA,IACtB;AAAA,EACF,GAAG,CAAC,aAAa,KAAK,CAAC;AAEvB,SAAO;AACT;;;AFuGO,SAAS,QAA0B;AAAA,EACxC,MAAM;AAAA,EACN;AAAA,EACA;AAAA,EACA,eAAe;AAAA,EACf;AAAA,EACA;AAAA,EACA,WAAW;AAAA,EACX,iBAAiB;AAAA,EACjB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,aAAa,WAAAC;AAAA,EACb,OAAAC;AAAA,EACA,uBAAuB;AAAA,EACvB;AACF,IAqBI,CAAC,GAAqC;AAExC,QAAM,CAAC,MAAM,QAAI,wBAAS,UAAU;AAGpC,QAAM,SAAS,kBAAM;AACrB,QAAM,UAAU,OAAO,QAAQ,WAAW,CAAC,KAAK,MAAM,IAAI;AAG1D,QAAM,wBAAwB,eAAe,4CAAmB,CAAC,CAAC;AAClE,QAAM,+BAA2B;AAAA,IAC/B,MAAM;AAAA,IACN,CAAC,qBAAqB;AAAA,EACxB;AAGA,QAAM,EAAE,MAAM,UAAU,OAAO,QAAI,WAAAC;AAAA,IACjC,CAAC,SAAS,UAAU;AAAA,IACpB;AAAA,IACA,EAAE,cAAc,yBAAyB;AAAA,EAC3C;AAGA,QAAM,kBAAc,sBAAsC,YAAY,CAAC,CAAC;AACxE,+BAAU,MAAM;AACd,gBAAY,UAAU,YAAY,CAAC;AAAA,EACrC,GAAG,CAAC,QAAQ,CAAC;AAEb,QAAM,EAAE,MAAM,SAAS,SAAS,QAAQ,aAAa,QAAI,WAAAA,SAEvD,CAAC,SAAS,QAAQ,GAAG,IAAI;AAE3B,QAAM,EAAE,MAAM,QAAQ,QAAW,QAAQ,SAAS,QAAI,WAAAA,SAEpD,CAAC,SAAS,OAAO,GAAG,IAAI;AAG1B,QAAM,yBAAqB,sBAA+B,IAAI;AAE9D,QAAM,uBAAmB,sBAAO;AAAA,IAC9B;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,+BAAU,MAAM;AACd,qBAAiB,UAAU;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,GAAG,CAAC,aAAa,SAAS,IAAI,CAAC;AAE/B,QAAM,qBAAiB;AAAA,IACrB,OACE,aAGA,cAAqC,eAClC;AA1NT;AA2NM,mBAAa,WAAW;AACxB,eAAS,MAAS;AAElB,YAAM,eAAe,YAAY;AAEjC,YAAM,eAAe,aAAa;AAClC,YAAM,cAAU;AAAA,YACd,+BAAmB,aAAa,aAAa,SAAS,CAAC,CAAC;AAAA,MAC1D;AAEA,UAAI;AACF,cAAM,kBAAkB,IAAI,gBAAgB;AAC5C,2BAAmB,UAAU;AAE7B,cAAM,kBAAkB,SAAS,QAAQ,cAAc;AAGvD,wBAAgB,cAAc,KAAK;AAEnC,kBAAM,wBAAY;AAAA,UAChB;AAAA,UACA,OAAM,wFAAkC;AAAA,YACtC,IAAI;AAAA,YACJ,UAAU;AAAA,YACV,aAAa,YAAY;AAAA,UAC3B,OAJM,YAIA;AAAA,YACJ,IAAI;AAAA,YACJ,UAAU;AAAA,YACV,GAAG,iBAAiB,QAAQ;AAAA,YAC5B,GAAG,YAAY;AAAA,UACjB;AAAA,UACA;AAAA,UACA,aAAa,iBAAiB,QAAQ;AAAA,UACtC,SAAS;AAAA,YACP,GAAG,iBAAiB,QAAQ;AAAA,YAC5B,GAAG,YAAY;AAAA,UACjB;AAAA,UACA,iBAAiB,MAAM,mBAAmB;AAAA,UAC1C,SAAS,EAAE,QAAQ,GAAG;AACpB,yBAAa,WAAW;AAExB,kBAAM,qBACJ,QAAQ,OAAO,aAAa,aAAa,SAAS,CAAC,EAAE;AAEvD;AAAA,cACE;AAAA,gBACE,GAAI,qBACA,aAAa,MAAM,GAAG,aAAa,SAAS,CAAC,IAC7C;AAAA,gBACJ;AAAA,cACF;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,OAAAD;AAAA,UACA,aAAa,aAAa,aAAa,SAAS,CAAC;AAAA,UACjD;AAAA,UACA;AAAA,QACF,CAAC;AAED,2BAAmB,UAAU;AAE7B,qBAAa,OAAO;AAAA,MACtB,SAAS,KAAK;AAEZ,YAAK,IAAY,SAAS,cAAc;AACtC,6BAAmB,UAAU;AAC7B,uBAAa,OAAO;AACpB,iBAAO;AAAA,QACT;AAEA,YAAI,WAAW,eAAe,OAAO;AACnC,kBAAQ,GAAG;AAAA,QACb;AAEA,iBAAS,GAAY;AACrB,qBAAa,OAAO;AAAA,MACtB;AAIA,YAAME,YAAW,YAAY;AAC7B,cACE,mCAAuB;AAAA,QACrB,+BAA+B;AAAA,QAC/B,sBAAsB;AAAA,QACtB;AAAA,QACA,UAAAA;AAAA,MACF,CAAC,GACD;AACA,cAAM,eAAe,EAAE,UAAAA,UAAS,CAAC;AAAA,MACnC;AAAA,IACF;AAAA,IACA;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACAF;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,aAAS;AAAA,IACb,OACE,SACA,EAAE,SAAAG,UAAS,MAAAC,MAAK,IAAwB,CAAC,MACtC;AArVT;AAsVM,YAAM,eAAe;AAAA,QACnB,UAAU,YAAY,QAAQ,OAAO;AAAA,UACnC,GAAG;AAAA,UACH,KAAI,aAAQ,OAAR,YAAc,WAAW;AAAA,QAC/B,CAAC;AAAA,QACD,SAAAD;AAAA,QACA,MAAAC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,CAAC,gBAAgB,UAAU;AAAA,EAC7B;AAEA,QAAM,aAAS;AAAA,IACb,OAAO,EAAE,SAAAD,UAAS,MAAAC,MAAK,IAAwB,CAAC,MAAM;AACpD,YAAMF,YAAW,YAAY;AAE7B,UAAIA,UAAS,WAAW,GAAG;AACzB,eAAO;AAAA,MACT;AAGA,YAAM,cAAcA,UAASA,UAAS,SAAS,CAAC;AAChD,aAAO,eAAe;AAAA,QACpB,UACE,YAAY,SAAS,cAAcA,UAAS,MAAM,GAAG,EAAE,IAAIA;AAAA,QAC7D,SAAAC;AAAA,QACA,MAAAC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,CAAC,cAAc;AAAA,EACjB;AAEA,QAAM,WAAO,2BAAY,MAAM;AAC7B,QAAI,mBAAmB,SAAS;AAC9B,yBAAmB,QAAQ,MAAM;AACjC,yBAAmB,UAAU;AAAA,IAC/B;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,QAAM,0BAAsB,2BAAY,YAAY;AAClD,UAAMF,YAAW,YAAY;AAE7B,mBAAe,EAAE,UAAAA,UAAS,GAAG,QAAQ;AAAA,EACvC,GAAG,CAAC,cAAc,CAAC;AAEnB,QAAM,kBAAc;AAAA,IAClB,CACEA,cAKG;AACH,UAAI,OAAOA,cAAa,YAAY;AAClC,QAAAA,YAAWA,UAAS,YAAY,OAAO;AAAA,MACzC;AAEA,aAAOA,WAAU,KAAK;AACtB,kBAAY,UAAUA;AAAA,IACxB;AAAA,IACA,CAAC,MAAM;AAAA,EACT;AAGA,QAAM,CAAC,OAAO,QAAQ,QAAI,wBAAS,YAAY;AAE/C,QAAM,mBAAe;AAAA,IACnB,OACE,OACA,UAEI,CAAC,GACL,aACG;AA/ZT;AAgaM,2CAAO,mBAAP;AAEA,YAAM,YAAY,MAAM,QAAQ,mCAAS,KAAK,IAC1C,QAAQ,QACR,UAAM,yCAA6B,mCAAS,KAAK;AAErD,UAAI,CAAC,SAAS,UAAU,WAAW;AAAG;AAEtC,UAAI,UAAU;AACZ,yBAAiB,UAAU;AAAA,UACzB,GAAG,iBAAiB;AAAA,UACpB,GAAG;AAAA,QACL;AAAA,MACF;AAEA,qBAAe;AAAA,QACb,UAAU,YAAY,QAAQ,OAAO;AAAA,UACnC,IAAI,WAAW;AAAA,UACf,MAAM;AAAA,UACN,UAAU;AAAA,UACV,OAAO,CAAC,GAAG,WAAW,EAAE,MAAM,QAAQ,MAAM,MAAM,CAAC;AAAA,QACrD,CAAC;AAAA,QACD,SAAS,QAAQ;AAAA,QACjB,MAAM,QAAQ;AAAA,MAChB,CAAC;AAED,eAAS,EAAE;AAAA,IACb;AAAA,IACA,CAAC,OAAO,YAAY,cAAc;AAAA,EACpC;AAEA,QAAM,oBAAoB,CAAC,MAAW;AACpC,aAAS,EAAE,OAAO,KAAK;AAAA,EACzB;AAEA,QAAM,oBAAgB;AAAA,IACpB,CAAC,EAAE,YAAY,OAAO,MAA+C;AACnE,YAAM,kBAAkB,YAAY;AAEpC,2CAAqB;AAAA,QACnB,UAAU;AAAA,QACV;AAAA,QACA,YAAY;AAAA,MACd,CAAC;AAGD;AAAA,QACE;AAAA,UACE,GAAG,gBAAgB,MAAM,GAAG,gBAAgB,SAAS,CAAC;AAAA,UACtD;AAAA,YACE,GAAG,gBAAgB,gBAAgB,SAAS,CAAC;AAAA;AAAA;AAAA,YAG7C,YAAY,WAAW;AAAA,UACzB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAGA,UAAI,WAAW,eAAe,WAAW,aAAa;AACpD;AAAA,MACF;AAGA,YAAM,cAAc,gBAAgB,gBAAgB,SAAS,CAAC;AAC9D,cAAI,qDAAyC,WAAW,GAAG;AACzD,uBAAe,EAAE,UAAU,gBAAgB,CAAC;AAAA,MAC9C;AAAA,IACF;AAAA,IACA,CAAC,QAAQ,QAAQ,gBAAgB,UAAU;AAAA,EAC7C;AAEA,SAAO;AAAA,IACL,UAAU,8BAAY,CAAC;AAAA,IACvB,IAAI;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AGzfA,IAAAG,aAIO;AACP,IAAAC,gBAAgE;AAChE,IAAAC,cAAmB;AAyDZ,SAAS,cAAc;AAAA,EAC5B,MAAM;AAAA,EACN;AAAA,EACA,oBAAoB;AAAA,EACpB,eAAe;AAAA,EACf;AAAA,EACA;AAAA,EACA;AAAA,EACA,iBAAiB;AAAA,EACjB,OAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA,uBAAuB;AACzB,IAMI,CAAC,GAAyB;AAE5B,QAAM,aAAS,qBAAM;AACrB,QAAM,eAAe,MAAM;AAG3B,QAAM,EAAE,MAAM,OAAO,QAAI,YAAAC,SAAe,CAAC,KAAK,YAAY,GAAG,MAAM;AAAA,IACjE,cAAc;AAAA,EAChB,CAAC;AAED,QAAM,EAAE,MAAM,YAAY,OAAO,QAAQ,cAAc,QAAI,YAAAA;AAAA,IACzD,CAAC,cAAc,SAAS;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,CAAC,OAAO,QAAQ,QAAI,wBAA4B,MAAS;AAC/D,QAAM,aAAa;AAGnB,QAAM,CAAC,iBAAiB,kBAAkB,QACxC,wBAAiC,IAAI;AAEvC,QAAM,uBAAmB,sBAAO;AAAA,IAC9B;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,+BAAU,MAAM;AACd,qBAAiB,UAAU;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,GAAG,CAAC,aAAa,SAAS,IAAI,CAAC;AAE/B,QAAM,qBAAiB;AAAA,IACrB,OAAO,QAAgB,gBACrB,8BAAkB;AAAA,MAChB;AAAA,MACA;AAAA,MACA,aAAa,iBAAiB,QAAQ;AAAA,MACtC,SAAS,EAAE,GAAG,iBAAiB,QAAQ,SAAS,GAAG,mCAAS,QAAQ;AAAA,MACpE,MAAM;AAAA,QACJ,GAAG,iBAAiB,QAAQ;AAAA,QAC5B,GAAG,mCAAS;AAAA,MACd;AAAA,MACA;AAAA,MACA,OAAAD;AAAA;AAAA,MAEA,eAAe;AAAA,QACb,CAACE,gBAAuB,OAAOA,aAAY,KAAK;AAAA,QAChD;AAAA,MACF;AAAA,MACA,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,IACH;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACAF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAO,2BAAY,MAAM;AAC7B,QAAI,iBAAiB;AACnB,sBAAgB,MAAM;AACtB,yBAAmB,IAAI;AAAA,IACzB;AAAA,EACF,GAAG,CAAC,eAAe,CAAC;AAEpB,QAAM,oBAAgB;AAAA,IACpB,CAACE,gBAAuB;AACtB,aAAOA,aAAY,KAAK;AAAA,IAC1B;AAAA,IACA,CAAC,MAAM;AAAA,EACT;AAEA,QAAM,eAAW;AAAA,IACf,OAAO,QAAQ,YAAY;AACzB,aAAO,eAAe,QAAQ,OAAO;AAAA,IACvC;AAAA,IACA,CAAC,cAAc;AAAA,EACjB;AAEA,QAAM,CAAC,OAAO,QAAQ,QAAI,wBAAS,YAAY;AAE/C,QAAM,mBAAe;AAAA,IACnB,CAAC,UAA4C;AArLjD;AAsLM,2CAAO,mBAAP;AACA,aAAO,QAAQ,SAAS,KAAK,IAAI;AAAA,IACnC;AAAA,IACA,CAAC,OAAO,QAAQ;AAAA,EAClB;AAEA,QAAM,wBAAoB;AAAA,IACxB,CAAC,MAAW;AACV,eAAS,EAAE,OAAO,KAAK;AAAA,IACzB;AAAA,IACA,CAAC,QAAQ;AAAA,EACX;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AC/MA,4BAIO;AACP,IAAAC,aAMO;AACP,IAAAC,gBAAqD;AACrD,IAAAC,cAAmB;AAInB,IAAM,mBAAmB,MAAM;AA4F/B,SAAS,UAA+B;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EACA;AAAA,EACA,OAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAGE;AAEA,QAAM,aAAS,qBAAM;AACrB,QAAM,eAAe,kBAAM;AAG3B,QAAM,EAAE,MAAM,OAAO,QAAI,YAAAC;AAAA,IACvB,CAAC,KAAK,YAAY;AAAA,IAClB;AAAA,IACA,EAAE,cAAc,aAAa;AAAA,EAC/B;AAEA,QAAM,CAAC,OAAO,QAAQ,QAAI,wBAA4B,MAAS;AAC/D,QAAM,CAAC,WAAW,YAAY,QAAI,wBAAS,KAAK;AAGhD,QAAM,yBAAqB,sBAA+B,IAAI;AAE9D,QAAM,WAAO,2BAAY,MAAM;AA5IjC;AA6II,QAAI;AACF,+BAAmB,YAAnB,mBAA4B;AAAA,IAC9B,SAAS,SAAS;AAAA,IAClB,UAAE;AACA,mBAAa,KAAK;AAClB,yBAAmB,UAAU;AAAA,IAC/B;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,QAAM,SAAS,OAAO,UAAiB;AAtJzC;AAuJI,QAAI;AACF,aAAO,MAAS;AAChB,mBAAa,IAAI;AACjB,eAAS,MAAS;AAElB,YAAM,kBAAkB,IAAI,gBAAgB;AAC5C,yBAAmB,UAAU;AAE7B,YAAM,cAAcD,UAAA,OAAAA,SAAS,iBAAiB;AAC9C,YAAM,WAAW,MAAM,YAAY,KAAK;AAAA,QACtC,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,GAAG;AAAA,QACL;AAAA,QACA;AAAA,QACA,QAAQ,gBAAgB;AAAA,QACxB,MAAM,KAAK,UAAU,KAAK;AAAA,MAC5B,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI;AAAA,WACP,WAAM,SAAS,KAAK,MAApB,YAA0B;AAAA,QAC7B;AAAA,MACF;AAEA,UAAI,SAAS,QAAQ,MAAM;AACzB,cAAM,IAAI,MAAM,6BAA6B;AAAA,MAC/C;AAEA,UAAI,kBAAkB;AACtB,UAAI,eAAgD;AAEpD,YAAM,SAAS,KAAK,YAAY,IAAI,kBAAkB,CAAC,EAAE;AAAA,QACvD,IAAI,eAAuB;AAAA,UACzB,MAAM,MAAM,OAAO;AACjB,+BAAmB;AAEnB,kBAAM,EAAE,MAAM,IAAI,UAAM,6BAAiB,eAAe;AACxD,kBAAM,gBAAgB;AAEtB,gBAAI,KAAC,4BAAgB,cAAc,aAAa,GAAG;AACjD,6BAAe;AAEf,qBAAO,aAAa;AAAA,YACtB;AAAA,UACF;AAAA,UAEA,MAAM,QAAQ;AACZ,yBAAa,KAAK;AAClB,+BAAmB,UAAU;AAE7B,gBAAI,YAAY,MAAM;AACpB,oBAAM,mBAAmB,UAAM,yCAAkB;AAAA,gBAC/C,OAAO;AAAA,gBACP,YAAQ,qBAAS,MAAM;AAAA,cACzB,CAAC;AAED;AAAA,gBACE,iBAAiB,UACb,EAAE,QAAQ,iBAAiB,OAAO,OAAO,OAAU,IACnD,EAAE,QAAQ,QAAW,OAAO,iBAAiB,MAAM;AAAA,cACzD;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,SAASE,QAAO;AACd,cAAI,oCAAaA,MAAK,GAAG;AACvB;AAAA,MACF;AAEA,UAAI,WAAWA,kBAAiB,OAAO;AACrC,gBAAQA,MAAK;AAAA,MACf;AAEA,mBAAa,KAAK;AAClB,eAASA,kBAAiB,QAAQA,SAAQ,IAAI,MAAM,OAAOA,MAAK,CAAC,CAAC;AAAA,IACpE;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,IAAM,yBAAyB;","names":["import_ai","import_react","throttleFunction","generateIdFunc","fetch","useSWR","messages","headers","body","import_ai","import_react","import_swr","fetch","useSWR","completion","import_ai","import_react","import_swr","fetch","useSWR","error"]}
|
package/dist/index.mjs
CHANGED
|
@@ -40,8 +40,7 @@ function useChat({
|
|
|
40
40
|
onToolCall,
|
|
41
41
|
experimental_prepareRequestBody,
|
|
42
42
|
maxSteps = 1,
|
|
43
|
-
streamProtocol = "
|
|
44
|
-
onResponse,
|
|
43
|
+
streamProtocol = "ui-message",
|
|
45
44
|
onFinish,
|
|
46
45
|
onError,
|
|
47
46
|
credentials,
|
|
@@ -104,12 +103,10 @@ function useChat({
|
|
|
104
103
|
body: (_a = experimental_prepareRequestBody == null ? void 0 : experimental_prepareRequestBody({
|
|
105
104
|
id: chatId,
|
|
106
105
|
messages: chatMessages,
|
|
107
|
-
requestData: chatRequest.data,
|
|
108
106
|
requestBody: chatRequest.body
|
|
109
107
|
})) != null ? _a : {
|
|
110
108
|
id: chatId,
|
|
111
109
|
messages: chatMessages,
|
|
112
|
-
data: chatRequest.data,
|
|
113
110
|
...extraMetadataRef.current.body,
|
|
114
111
|
...chatRequest.body
|
|
115
112
|
},
|
|
@@ -120,7 +117,6 @@ function useChat({
|
|
|
120
117
|
...chatRequest.headers
|
|
121
118
|
},
|
|
122
119
|
abortController: () => abortControllerRef.current,
|
|
123
|
-
onResponse,
|
|
124
120
|
onUpdate({ message }) {
|
|
125
121
|
mutateStatus("streaming");
|
|
126
122
|
const replaceLastMessage = message.id === chatMessages[chatMessages.length - 1].id;
|
|
@@ -169,7 +165,6 @@ function useChat({
|
|
|
169
165
|
mutateStatus,
|
|
170
166
|
api,
|
|
171
167
|
extraMetadataRef,
|
|
172
|
-
onResponse,
|
|
173
168
|
onFinish,
|
|
174
169
|
onError,
|
|
175
170
|
setError,
|
|
@@ -187,22 +182,21 @@ function useChat({
|
|
|
187
182
|
]
|
|
188
183
|
);
|
|
189
184
|
const append = useCallback(
|
|
190
|
-
(message, {
|
|
185
|
+
async (message, { headers: headers2, body: body2 } = {}) => {
|
|
191
186
|
var _a;
|
|
192
|
-
|
|
187
|
+
await triggerRequest({
|
|
193
188
|
messages: messagesRef.current.concat({
|
|
194
189
|
...message,
|
|
195
190
|
id: (_a = message.id) != null ? _a : generateId()
|
|
196
191
|
}),
|
|
197
192
|
headers: headers2,
|
|
198
|
-
body: body2
|
|
199
|
-
data
|
|
193
|
+
body: body2
|
|
200
194
|
});
|
|
201
195
|
},
|
|
202
196
|
[triggerRequest, generateId]
|
|
203
197
|
);
|
|
204
198
|
const reload = useCallback(
|
|
205
|
-
async ({
|
|
199
|
+
async ({ headers: headers2, body: body2 } = {}) => {
|
|
206
200
|
const messages2 = messagesRef.current;
|
|
207
201
|
if (messages2.length === 0) {
|
|
208
202
|
return null;
|
|
@@ -211,8 +205,7 @@ function useChat({
|
|
|
211
205
|
return triggerRequest({
|
|
212
206
|
messages: lastMessage.role === "assistant" ? messages2.slice(0, -1) : messages2,
|
|
213
207
|
headers: headers2,
|
|
214
|
-
body: body2
|
|
215
|
-
data
|
|
208
|
+
body: body2
|
|
216
209
|
});
|
|
217
210
|
},
|
|
218
211
|
[triggerRequest]
|
|
@@ -242,7 +235,8 @@ function useChat({
|
|
|
242
235
|
async (event, options = {}, metadata) => {
|
|
243
236
|
var _a;
|
|
244
237
|
(_a = event == null ? void 0 : event.preventDefault) == null ? void 0 : _a.call(event);
|
|
245
|
-
|
|
238
|
+
const fileParts = Array.isArray(options == null ? void 0 : options.files) ? options.files : await convertFileListToFileUIParts(options == null ? void 0 : options.files);
|
|
239
|
+
if (!input && fileParts.length === 0)
|
|
246
240
|
return;
|
|
247
241
|
if (metadata) {
|
|
248
242
|
extraMetadataRef.current = {
|
|
@@ -250,7 +244,6 @@ function useChat({
|
|
|
250
244
|
...metadata
|
|
251
245
|
};
|
|
252
246
|
}
|
|
253
|
-
const fileParts = Array.isArray(options == null ? void 0 : options.files) ? options.files : await convertFileListToFileUIParts(options == null ? void 0 : options.files);
|
|
254
247
|
triggerRequest({
|
|
255
248
|
messages: messagesRef.current.concat({
|
|
256
249
|
id: generateId(),
|
|
@@ -259,8 +252,7 @@ function useChat({
|
|
|
259
252
|
parts: [...fileParts, { type: "text", text: input }]
|
|
260
253
|
}),
|
|
261
254
|
headers: options.headers,
|
|
262
|
-
body: options.body
|
|
263
|
-
data: options.data
|
|
255
|
+
body: options.body
|
|
264
256
|
});
|
|
265
257
|
setInput("");
|
|
266
258
|
},
|
|
@@ -312,7 +304,6 @@ function useChat({
|
|
|
312
304
|
setInput,
|
|
313
305
|
handleInputChange,
|
|
314
306
|
handleSubmit,
|
|
315
|
-
isLoading: status === "submitted" || status === "streaming",
|
|
316
307
|
status,
|
|
317
308
|
addToolResult
|
|
318
309
|
};
|
|
@@ -334,7 +325,6 @@ function useCompletion({
|
|
|
334
325
|
body,
|
|
335
326
|
streamProtocol = "data",
|
|
336
327
|
fetch: fetch2,
|
|
337
|
-
onResponse,
|
|
338
328
|
onFinish,
|
|
339
329
|
onError,
|
|
340
330
|
experimental_throttle: throttleWaitMs
|
|
@@ -383,7 +373,6 @@ function useCompletion({
|
|
|
383
373
|
setLoading: mutateLoading,
|
|
384
374
|
setError,
|
|
385
375
|
setAbortController,
|
|
386
|
-
onResponse,
|
|
387
376
|
onFinish,
|
|
388
377
|
onError
|
|
389
378
|
}),
|
|
@@ -393,7 +382,6 @@ function useCompletion({
|
|
|
393
382
|
api,
|
|
394
383
|
extraMetadataRef,
|
|
395
384
|
setAbortController,
|
|
396
|
-
onResponse,
|
|
397
385
|
onFinish,
|
|
398
386
|
onError,
|
|
399
387
|
setError,
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/use-chat.ts","../src/throttle.ts","../src/util/use-stable-value.ts","../src/use-completion.ts","../src/use-object.ts"],"sourcesContent":["import type {\n ChatRequestOptions,\n CreateUIMessage,\n FileUIPart,\n JSONValue,\n UIMessage,\n UseChatOptions,\n} from 'ai';\nimport {\n callChatApi,\n convertFileListToFileUIParts,\n extractMaxToolInvocationStep,\n generateId as generateIdFunc,\n getToolInvocations,\n isAssistantMessageWithCompletedToolCalls,\n shouldResubmitMessages,\n updateToolCallResult,\n} from 'ai';\nimport { useCallback, useEffect, useMemo, useRef, useState } from 'react';\nimport useSWR from 'swr';\nimport { throttle } from './throttle';\nimport { useStableValue } from './util/use-stable-value';\n\nexport type { CreateUIMessage, UIMessage, UseChatOptions };\n\nexport type UseChatHelpers<MESSAGE_METADATA = unknown> = {\n /** Current messages in the chat */\n messages: UIMessage<MESSAGE_METADATA>[];\n /** The error object of the API request */\n error: undefined | Error;\n /**\n * Append a user message to the chat list. This triggers the API call to fetch\n * the assistant's response.\n * @param message The message to append\n * @param options Additional options to pass to the API call\n */\n append: (\n message: UIMessage<MESSAGE_METADATA> | CreateUIMessage<MESSAGE_METADATA>,\n chatRequestOptions?: ChatRequestOptions,\n ) => Promise<string | null | undefined>;\n /**\n * Reload the last AI chat response for the given chat history. If the last\n * message isn't from the assistant, it will request the API to generate a\n * new response.\n */\n reload: (\n chatRequestOptions?: ChatRequestOptions,\n ) => Promise<string | null | undefined>;\n /**\n * Abort the current request immediately, keep the generated tokens if any.\n */\n stop: () => void;\n\n /**\n * Resume an ongoing chat generation stream. This does not resume an aborted generation.\n */\n experimental_resume: () => void;\n\n /**\n * Update the `messages` state locally. This is useful when you want to\n * edit the messages on the client, and then trigger the `reload` method\n * manually to regenerate the AI response.\n */\n setMessages: (\n messages:\n | UIMessage<MESSAGE_METADATA>[]\n | ((\n messages: UIMessage<MESSAGE_METADATA>[],\n ) => UIMessage<MESSAGE_METADATA>[]),\n ) => void;\n /** The current value of the input */\n input: string;\n /** setState-powered method to update the input value */\n setInput: React.Dispatch<React.SetStateAction<string>>;\n /** An input/textarea-ready onChange handler to control the value of the input */\n handleInputChange: (\n e:\n | React.ChangeEvent<HTMLInputElement>\n | React.ChangeEvent<HTMLTextAreaElement>,\n ) => void;\n /** Form submission handler to automatically reset input and append a user message */\n handleSubmit: (\n event?: { preventDefault?: () => void },\n chatRequestOptions?: ChatRequestOptions & {\n files?: FileList | FileUIPart[];\n },\n ) => void;\n metadata?: Object;\n\n /**\n * Whether the API request is in progress\n *\n * @deprecated use `status` instead\n */\n isLoading: boolean;\n\n /**\n * Hook status:\n *\n * - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.\n * - `streaming`: The response is actively streaming in from the API, receiving chunks of data.\n * - `ready`: The full response has been received and processed; a new user message can be submitted.\n * - `error`: An error occurred during the API request, preventing successful completion.\n */\n status: 'submitted' | 'streaming' | 'ready' | 'error';\n\n addToolResult: ({\n toolCallId,\n result,\n }: {\n toolCallId: string;\n result: any;\n }) => void;\n\n /** The id of the chat */\n id: string;\n};\n\nexport function useChat<MESSAGE_METADATA>({\n api = '/api/chat',\n id,\n initialMessages,\n initialInput = '',\n onToolCall,\n experimental_prepareRequestBody,\n maxSteps = 1,\n streamProtocol = 'data',\n onResponse,\n onFinish,\n onError,\n credentials,\n headers,\n body,\n generateId = generateIdFunc,\n fetch,\n experimental_throttle: throttleWaitMs,\n messageMetadataSchema,\n}: UseChatOptions<MESSAGE_METADATA> & {\n /**\n * Experimental (React only). When a function is provided, it will be used\n * to prepare the request body for the chat API. This can be useful for\n * customizing the request body based on the messages and data in the chat.\n *\n * @param messages The current messages in the chat.\n * @param requestData The data object passed in the chat request.\n * @param requestBody The request body object passed in the chat request.\n */\n experimental_prepareRequestBody?: (options: {\n id: string;\n messages: UIMessage[];\n requestData?: JSONValue;\n requestBody?: object;\n }) => unknown;\n\n /**\nCustom throttle wait in ms for the chat messages and data updates.\nDefault is undefined, which disables throttling.\n */\n experimental_throttle?: number;\n} = {}): UseChatHelpers<MESSAGE_METADATA> {\n // Generate ID once, store in state for stability across re-renders\n const [hookId] = useState(generateId);\n\n // Use the caller-supplied ID if available; otherwise, fall back to our stable ID\n const chatId = id ?? hookId;\n const chatKey = typeof api === 'string' ? [api, chatId] : chatId;\n\n // Store array of the processed initial messages to avoid re-renders:\n const stableInitialMessages = useStableValue(initialMessages ?? []);\n const processedInitialMessages = useMemo(\n () => stableInitialMessages,\n [stableInitialMessages],\n );\n\n // Store the chat state in SWR, using the chatId as the key to share states.\n const { data: messages, mutate } = useSWR<UIMessage<MESSAGE_METADATA>[]>(\n [chatKey, 'messages'],\n null,\n { fallbackData: processedInitialMessages },\n );\n\n // Keep the latest messages in a ref.\n const messagesRef = useRef<UIMessage<MESSAGE_METADATA>[]>(messages || []);\n useEffect(() => {\n messagesRef.current = messages || [];\n }, [messages]);\n\n const { data: status = 'ready', mutate: mutateStatus } = useSWR<\n 'submitted' | 'streaming' | 'ready' | 'error'\n >([chatKey, 'status'], null);\n\n const { data: error = undefined, mutate: setError } = useSWR<\n undefined | Error\n >([chatKey, 'error'], null);\n\n // Abort controller to cancel the current API call.\n const abortControllerRef = useRef<AbortController | null>(null);\n\n const extraMetadataRef = useRef({\n credentials,\n headers,\n body,\n });\n\n useEffect(() => {\n extraMetadataRef.current = {\n credentials,\n headers,\n body,\n };\n }, [credentials, headers, body]);\n\n const triggerRequest = useCallback(\n async (\n chatRequest: {\n headers?: Record<string, string> | Headers;\n body?: object;\n messages: UIMessage<MESSAGE_METADATA>[];\n data?: JSONValue;\n },\n requestType: 'generate' | 'resume' = 'generate',\n ) => {\n mutateStatus('submitted');\n setError(undefined);\n\n const chatMessages = chatRequest.messages;\n\n const messageCount = chatMessages.length;\n const maxStep = extractMaxToolInvocationStep(\n getToolInvocations(chatMessages[chatMessages.length - 1]),\n );\n\n try {\n const abortController = new AbortController();\n abortControllerRef.current = abortController;\n\n const throttledMutate = throttle(mutate, throttleWaitMs);\n\n // Do an optimistic update to show the updated messages immediately:\n throttledMutate(chatMessages, false);\n\n await callChatApi({\n api,\n body: experimental_prepareRequestBody?.({\n id: chatId,\n messages: chatMessages,\n requestData: chatRequest.data,\n requestBody: chatRequest.body,\n }) ?? {\n id: chatId,\n messages: chatMessages,\n data: chatRequest.data,\n ...extraMetadataRef.current.body,\n ...chatRequest.body,\n },\n streamProtocol,\n credentials: extraMetadataRef.current.credentials,\n headers: {\n ...extraMetadataRef.current.headers,\n ...chatRequest.headers,\n },\n abortController: () => abortControllerRef.current,\n onResponse,\n onUpdate({ message }) {\n mutateStatus('streaming');\n\n const replaceLastMessage =\n message.id === chatMessages[chatMessages.length - 1].id;\n\n throttledMutate(\n [\n ...(replaceLastMessage\n ? chatMessages.slice(0, chatMessages.length - 1)\n : chatMessages),\n message,\n ],\n false,\n );\n },\n onToolCall,\n onFinish,\n generateId,\n fetch,\n lastMessage: chatMessages[chatMessages.length - 1],\n requestType,\n messageMetadataSchema,\n });\n\n abortControllerRef.current = null;\n\n mutateStatus('ready');\n } catch (err) {\n // Ignore abort errors as they are expected.\n if ((err as any).name === 'AbortError') {\n abortControllerRef.current = null;\n mutateStatus('ready');\n return null;\n }\n\n if (onError && err instanceof Error) {\n onError(err);\n }\n\n setError(err as Error);\n mutateStatus('error');\n }\n\n // auto-submit when all tool calls in the last assistant message have results\n // and assistant has not answered yet\n const messages = messagesRef.current;\n if (\n shouldResubmitMessages({\n originalMaxToolInvocationStep: maxStep,\n originalMessageCount: messageCount,\n maxSteps,\n messages,\n })\n ) {\n await triggerRequest({ messages });\n }\n },\n [\n mutate,\n mutateStatus,\n api,\n extraMetadataRef,\n onResponse,\n onFinish,\n onError,\n setError,\n streamProtocol,\n experimental_prepareRequestBody,\n onToolCall,\n maxSteps,\n messagesRef,\n abortControllerRef,\n generateId,\n fetch,\n throttleWaitMs,\n chatId,\n messageMetadataSchema,\n ],\n );\n\n const append = useCallback(\n (\n message: UIMessage<MESSAGE_METADATA> | CreateUIMessage<MESSAGE_METADATA>,\n { data, headers, body }: ChatRequestOptions = {},\n ) =>\n triggerRequest({\n messages: messagesRef.current.concat({\n ...message,\n id: message.id ?? generateId(),\n }),\n headers,\n body,\n data,\n }),\n [triggerRequest, generateId],\n );\n\n const reload = useCallback(\n async ({ data, headers, body }: ChatRequestOptions = {}) => {\n const messages = messagesRef.current;\n\n if (messages.length === 0) {\n return null;\n }\n\n // Remove last assistant message and retry last user message.\n const lastMessage = messages[messages.length - 1];\n return triggerRequest({\n messages:\n lastMessage.role === 'assistant' ? messages.slice(0, -1) : messages,\n headers,\n body,\n data,\n });\n },\n [triggerRequest],\n );\n\n const stop = useCallback(() => {\n if (abortControllerRef.current) {\n abortControllerRef.current.abort();\n abortControllerRef.current = null;\n }\n }, []);\n\n const experimental_resume = useCallback(async () => {\n const messages = messagesRef.current;\n\n triggerRequest({ messages }, 'resume');\n }, [triggerRequest]);\n\n const setMessages = useCallback(\n (\n messages:\n | UIMessage<MESSAGE_METADATA>[]\n | ((\n messages: UIMessage<MESSAGE_METADATA>[],\n ) => UIMessage<MESSAGE_METADATA>[]),\n ) => {\n if (typeof messages === 'function') {\n messages = messages(messagesRef.current);\n }\n\n mutate(messages, false);\n messagesRef.current = messages;\n },\n [mutate],\n );\n\n // Input state and handlers.\n const [input, setInput] = useState(initialInput);\n\n const handleSubmit = useCallback(\n async (\n event?: { preventDefault?: () => void },\n options: ChatRequestOptions & {\n files?: FileList | FileUIPart[];\n } = {},\n metadata?: Object,\n ) => {\n event?.preventDefault?.();\n\n if (!input && !options.allowEmptySubmit) return;\n\n if (metadata) {\n extraMetadataRef.current = {\n ...extraMetadataRef.current,\n ...metadata,\n };\n }\n\n const fileParts = Array.isArray(options?.files)\n ? options.files\n : await convertFileListToFileUIParts(options?.files);\n\n triggerRequest({\n messages: messagesRef.current.concat({\n id: generateId(),\n role: 'user',\n metadata: undefined,\n parts: [...fileParts, { type: 'text', text: input }],\n }),\n headers: options.headers,\n body: options.body,\n data: options.data,\n });\n\n setInput('');\n },\n [input, generateId, triggerRequest],\n );\n\n const handleInputChange = (e: any) => {\n setInput(e.target.value);\n };\n\n const addToolResult = useCallback(\n ({ toolCallId, result }: { toolCallId: string; result: unknown }) => {\n const currentMessages = messagesRef.current;\n\n updateToolCallResult({\n messages: currentMessages,\n toolCallId,\n toolResult: result,\n });\n\n // array mutation is required to trigger a re-render\n mutate(\n [\n ...currentMessages.slice(0, currentMessages.length - 1),\n {\n ...currentMessages[currentMessages.length - 1],\n // @ts-ignore\n // update the revisionId to trigger a re-render\n revisionId: generateId(),\n },\n ],\n false,\n );\n\n // when the request is ongoing, the auto-submit will be triggered after the request is finished\n if (status === 'submitted' || status === 'streaming') {\n return;\n }\n\n // auto-submit when all tool calls in the last assistant message have results:\n const lastMessage = currentMessages[currentMessages.length - 1];\n if (isAssistantMessageWithCompletedToolCalls(lastMessage)) {\n triggerRequest({ messages: currentMessages });\n }\n },\n [mutate, status, triggerRequest, generateId],\n );\n\n return {\n messages: messages ?? [],\n id: chatId,\n setMessages,\n error,\n append,\n reload,\n stop,\n experimental_resume,\n input,\n setInput,\n handleInputChange,\n handleSubmit,\n isLoading: status === 'submitted' || status === 'streaming',\n status,\n addToolResult,\n };\n}\n","import throttleFunction from 'throttleit';\n\nexport function throttle<T extends (...args: any[]) => any>(\n fn: T,\n waitMs: number | undefined,\n): T {\n return waitMs != null ? throttleFunction(fn, waitMs) : fn;\n}\n","import { isDeepEqualData } from 'ai';\nimport { useEffect, useState } from 'react';\n\n/**\n * Returns a stable value that only updates the stored value (and triggers a re-render)\n * when the value's contents differ by deep-compare.\n */\nexport function useStableValue<T>(latestValue: T): T {\n const [value, setValue] = useState<T>(latestValue);\n\n useEffect(() => {\n if (!isDeepEqualData(latestValue, value)) {\n setValue(latestValue);\n }\n }, [latestValue, value]);\n\n return value;\n}\n","import {\n CompletionRequestOptions,\n UseCompletionOptions,\n callCompletionApi,\n} from 'ai';\nimport { useCallback, useEffect, useId, useRef, useState } from 'react';\nimport useSWR from 'swr';\nimport { throttle } from './throttle';\n\nexport type { UseCompletionOptions };\n\nexport type UseCompletionHelpers = {\n /** The current completion result */\n completion: string;\n /**\n * Send a new prompt to the API endpoint and update the completion state.\n */\n complete: (\n prompt: string,\n options?: CompletionRequestOptions,\n ) => Promise<string | null | undefined>;\n /** The error object of the API request */\n error: undefined | Error;\n /**\n * Abort the current API request but keep the generated tokens.\n */\n stop: () => void;\n /**\n * Update the `completion` state locally.\n */\n setCompletion: (completion: string) => void;\n /** The current value of the input */\n input: string;\n /** setState-powered method to update the input value */\n setInput: React.Dispatch<React.SetStateAction<string>>;\n /**\n * An input/textarea-ready onChange handler to control the value of the input\n * @example\n * ```jsx\n * <input onChange={handleInputChange} value={input} />\n * ```\n */\n handleInputChange: (\n event:\n | React.ChangeEvent<HTMLInputElement>\n | React.ChangeEvent<HTMLTextAreaElement>,\n ) => void;\n\n /**\n * Form submission handler to automatically reset input and append a user message\n * @example\n * ```jsx\n * <form onSubmit={handleSubmit}>\n * <input onChange={handleInputChange} value={input} />\n * </form>\n * ```\n */\n handleSubmit: (event?: { preventDefault?: () => void }) => void;\n\n /** Whether the API request is in progress */\n isLoading: boolean;\n};\n\nexport function useCompletion({\n api = '/api/completion',\n id,\n initialCompletion = '',\n initialInput = '',\n credentials,\n headers,\n body,\n streamProtocol = 'data',\n fetch,\n onResponse,\n onFinish,\n onError,\n experimental_throttle: throttleWaitMs,\n}: UseCompletionOptions & {\n /**\n * Custom throttle wait in ms for the completion and data updates.\n * Default is undefined, which disables throttling.\n */\n experimental_throttle?: number;\n} = {}): UseCompletionHelpers {\n // Generate an unique id for the completion if not provided.\n const hookId = useId();\n const completionId = id || hookId;\n\n // Store the completion state in SWR, using the completionId as the key to share states.\n const { data, mutate } = useSWR<string>([api, completionId], null, {\n fallbackData: initialCompletion,\n });\n\n const { data: isLoading = false, mutate: mutateLoading } = useSWR<boolean>(\n [completionId, 'loading'],\n null,\n );\n\n const [error, setError] = useState<undefined | Error>(undefined);\n const completion = data!;\n\n // Abort controller to cancel the current API call.\n const [abortController, setAbortController] =\n useState<AbortController | null>(null);\n\n const extraMetadataRef = useRef({\n credentials,\n headers,\n body,\n });\n\n useEffect(() => {\n extraMetadataRef.current = {\n credentials,\n headers,\n body,\n };\n }, [credentials, headers, body]);\n\n const triggerRequest = useCallback(\n async (prompt: string, options?: CompletionRequestOptions) =>\n callCompletionApi({\n api,\n prompt,\n credentials: extraMetadataRef.current.credentials,\n headers: { ...extraMetadataRef.current.headers, ...options?.headers },\n body: {\n ...extraMetadataRef.current.body,\n ...options?.body,\n },\n streamProtocol,\n fetch,\n // throttle streamed ui updates:\n setCompletion: throttle(\n (completion: string) => mutate(completion, false),\n throttleWaitMs,\n ),\n setLoading: mutateLoading,\n setError,\n setAbortController,\n onResponse,\n onFinish,\n onError,\n }),\n [\n mutate,\n mutateLoading,\n api,\n extraMetadataRef,\n setAbortController,\n onResponse,\n onFinish,\n onError,\n setError,\n streamProtocol,\n fetch,\n throttleWaitMs,\n ],\n );\n\n const stop = useCallback(() => {\n if (abortController) {\n abortController.abort();\n setAbortController(null);\n }\n }, [abortController]);\n\n const setCompletion = useCallback(\n (completion: string) => {\n mutate(completion, false);\n },\n [mutate],\n );\n\n const complete = useCallback<UseCompletionHelpers['complete']>(\n async (prompt, options) => {\n return triggerRequest(prompt, options);\n },\n [triggerRequest],\n );\n\n const [input, setInput] = useState(initialInput);\n\n const handleSubmit = useCallback(\n (event?: { preventDefault?: () => void }) => {\n event?.preventDefault?.();\n return input ? complete(input) : undefined;\n },\n [input, complete],\n );\n\n const handleInputChange = useCallback(\n (e: any) => {\n setInput(e.target.value);\n },\n [setInput],\n );\n\n return {\n completion,\n complete,\n error,\n setCompletion,\n stop,\n input,\n setInput,\n handleInputChange,\n handleSubmit,\n isLoading,\n };\n}\n","import {\n FetchFunction,\n isAbortError,\n safeValidateTypes,\n} from '@ai-sdk/provider-utils';\nimport {\n asSchema,\n DeepPartial,\n isDeepEqualData,\n parsePartialJson,\n Schema,\n} from 'ai';\nimport { useCallback, useId, useRef, useState } from 'react';\nimport useSWR from 'swr';\nimport z from 'zod';\n\n// use function to allow for mocking in tests:\nconst getOriginalFetch = () => fetch;\n\nexport type Experimental_UseObjectOptions<RESULT> = {\n /**\n * The API endpoint. It should stream JSON that matches the schema as chunked text.\n */\n api: string;\n\n /**\n * A Zod schema that defines the shape of the complete object.\n */\n schema: z.Schema<RESULT, z.ZodTypeDef, any> | Schema<RESULT>;\n\n /**\n * An unique identifier. If not provided, a random one will be\n * generated. When provided, the `useObject` hook with the same `id` will\n * have shared states across components.\n */\n id?: string;\n\n /**\n * An optional value for the initial object.\n */\n initialValue?: DeepPartial<RESULT>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nCallback that is called when the stream has finished.\n */\n onFinish?: (event: {\n /**\nThe generated object (typed according to the schema).\nCan be undefined if the final object does not match the schema.\n */\n object: RESULT | undefined;\n\n /**\nOptional error object. This is e.g. a TypeValidationError when the final object does not match the schema.\n */\n error: Error | undefined;\n }) => Promise<void> | void;\n\n /**\n * Callback function to be called when an error is encountered.\n */\n onError?: (error: Error) => void;\n\n /**\n * Additional HTTP headers to be included in the request.\n */\n headers?: Record<string, string> | Headers;\n\n /**\n * The credentials mode to be used for the fetch request.\n * Possible values are: 'omit', 'same-origin', 'include'.\n * Defaults to 'same-origin'.\n */\n credentials?: RequestCredentials;\n};\n\nexport type Experimental_UseObjectHelpers<RESULT, INPUT> = {\n /**\n * Calls the API with the provided input as JSON body.\n */\n submit: (input: INPUT) => void;\n\n /**\n * The current value for the generated object. Updated as the API streams JSON chunks.\n */\n object: DeepPartial<RESULT> | undefined;\n\n /**\n * The error object of the API request if any.\n */\n error: Error | undefined;\n\n /**\n * Flag that indicates whether an API request is in progress.\n */\n isLoading: boolean;\n\n /**\n * Abort the current request immediately, keep the current partial object if any.\n */\n stop: () => void;\n};\n\nfunction useObject<RESULT, INPUT = any>({\n api,\n id,\n schema, // required, in the future we will use it for validation\n initialValue,\n fetch,\n onError,\n onFinish,\n headers,\n credentials,\n}: Experimental_UseObjectOptions<RESULT>): Experimental_UseObjectHelpers<\n RESULT,\n INPUT\n> {\n // Generate an unique id if not provided.\n const hookId = useId();\n const completionId = id ?? hookId;\n\n // Store the completion state in SWR, using the completionId as the key to share states.\n const { data, mutate } = useSWR<DeepPartial<RESULT>>(\n [api, completionId],\n null,\n { fallbackData: initialValue },\n );\n\n const [error, setError] = useState<undefined | Error>(undefined);\n const [isLoading, setIsLoading] = useState(false);\n\n // Abort controller to cancel the current API call.\n const abortControllerRef = useRef<AbortController | null>(null);\n\n const stop = useCallback(() => {\n try {\n abortControllerRef.current?.abort();\n } catch (ignored) {\n } finally {\n setIsLoading(false);\n abortControllerRef.current = null;\n }\n }, []);\n\n const submit = async (input: INPUT) => {\n try {\n mutate(undefined); // reset the data\n setIsLoading(true);\n setError(undefined);\n\n const abortController = new AbortController();\n abortControllerRef.current = abortController;\n\n const actualFetch = fetch ?? getOriginalFetch();\n const response = await actualFetch(api, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...headers,\n },\n credentials,\n signal: abortController.signal,\n body: JSON.stringify(input),\n });\n\n if (!response.ok) {\n throw new Error(\n (await response.text()) ?? 'Failed to fetch the response.',\n );\n }\n\n if (response.body == null) {\n throw new Error('The response body is empty.');\n }\n\n let accumulatedText = '';\n let latestObject: DeepPartial<RESULT> | undefined = undefined;\n\n await response.body.pipeThrough(new TextDecoderStream()).pipeTo(\n new WritableStream<string>({\n async write(chunk) {\n accumulatedText += chunk;\n\n const { value } = await parsePartialJson(accumulatedText);\n const currentObject = value as DeepPartial<RESULT>;\n\n if (!isDeepEqualData(latestObject, currentObject)) {\n latestObject = currentObject;\n\n mutate(currentObject);\n }\n },\n\n async close() {\n setIsLoading(false);\n abortControllerRef.current = null;\n\n if (onFinish != null) {\n const validationResult = await safeValidateTypes({\n value: latestObject,\n schema: asSchema(schema),\n });\n\n onFinish(\n validationResult.success\n ? { object: validationResult.value, error: undefined }\n : { object: undefined, error: validationResult.error },\n );\n }\n },\n }),\n );\n } catch (error) {\n if (isAbortError(error)) {\n return;\n }\n\n if (onError && error instanceof Error) {\n onError(error);\n }\n\n setIsLoading(false);\n setError(error instanceof Error ? error : new Error(String(error)));\n }\n };\n\n return {\n submit,\n object: data,\n error,\n isLoading,\n stop,\n };\n}\n\nexport const experimental_useObject = useObject;\n"],"mappings":";AAQA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EACd;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,aAAa,aAAAA,YAAW,SAAS,QAAQ,YAAAC,iBAAgB;AAClE,OAAO,YAAY;;;ACnBnB,OAAO,sBAAsB;AAEtB,SAAS,SACd,IACA,QACG;AACH,SAAO,UAAU,OAAO,iBAAiB,IAAI,MAAM,IAAI;AACzD;;;ACPA,SAAS,uBAAuB;AAChC,SAAS,WAAW,gBAAgB;AAM7B,SAAS,eAAkB,aAAmB;AACnD,QAAM,CAAC,OAAO,QAAQ,IAAI,SAAY,WAAW;AAEjD,YAAU,MAAM;AACd,QAAI,CAAC,gBAAgB,aAAa,KAAK,GAAG;AACxC,eAAS,WAAW;AAAA,IACtB;AAAA,EACF,GAAG,CAAC,aAAa,KAAK,CAAC;AAEvB,SAAO;AACT;;;AFqGO,SAAS,QAA0B;AAAA,EACxC,MAAM;AAAA,EACN;AAAA,EACA;AAAA,EACA,eAAe;AAAA,EACf;AAAA,EACA;AAAA,EACA,WAAW;AAAA,EACX,iBAAiB;AAAA,EACjB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,aAAa;AAAA,EACb,OAAAC;AAAA,EACA,uBAAuB;AAAA,EACvB;AACF,IAsBI,CAAC,GAAqC;AAExC,QAAM,CAAC,MAAM,IAAIC,UAAS,UAAU;AAGpC,QAAM,SAAS,kBAAM;AACrB,QAAM,UAAU,OAAO,QAAQ,WAAW,CAAC,KAAK,MAAM,IAAI;AAG1D,QAAM,wBAAwB,eAAe,4CAAmB,CAAC,CAAC;AAClE,QAAM,2BAA2B;AAAA,IAC/B,MAAM;AAAA,IACN,CAAC,qBAAqB;AAAA,EACxB;AAGA,QAAM,EAAE,MAAM,UAAU,OAAO,IAAI;AAAA,IACjC,CAAC,SAAS,UAAU;AAAA,IACpB;AAAA,IACA,EAAE,cAAc,yBAAyB;AAAA,EAC3C;AAGA,QAAM,cAAc,OAAsC,YAAY,CAAC,CAAC;AACxE,EAAAC,WAAU,MAAM;AACd,gBAAY,UAAU,YAAY,CAAC;AAAA,EACrC,GAAG,CAAC,QAAQ,CAAC;AAEb,QAAM,EAAE,MAAM,SAAS,SAAS,QAAQ,aAAa,IAAI,OAEvD,CAAC,SAAS,QAAQ,GAAG,IAAI;AAE3B,QAAM,EAAE,MAAM,QAAQ,QAAW,QAAQ,SAAS,IAAI,OAEpD,CAAC,SAAS,OAAO,GAAG,IAAI;AAG1B,QAAM,qBAAqB,OAA+B,IAAI;AAE9D,QAAM,mBAAmB,OAAO;AAAA,IAC9B;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,EAAAA,WAAU,MAAM;AACd,qBAAiB,UAAU;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,GAAG,CAAC,aAAa,SAAS,IAAI,CAAC;AAE/B,QAAM,iBAAiB;AAAA,IACrB,OACE,aAMA,cAAqC,eAClC;AA7NT;AA8NM,mBAAa,WAAW;AACxB,eAAS,MAAS;AAElB,YAAM,eAAe,YAAY;AAEjC,YAAM,eAAe,aAAa;AAClC,YAAM,UAAU;AAAA,QACd,mBAAmB,aAAa,aAAa,SAAS,CAAC,CAAC;AAAA,MAC1D;AAEA,UAAI;AACF,cAAM,kBAAkB,IAAI,gBAAgB;AAC5C,2BAAmB,UAAU;AAE7B,cAAM,kBAAkB,SAAS,QAAQ,cAAc;AAGvD,wBAAgB,cAAc,KAAK;AAEnC,cAAM,YAAY;AAAA,UAChB;AAAA,UACA,OAAM,wFAAkC;AAAA,YACtC,IAAI;AAAA,YACJ,UAAU;AAAA,YACV,aAAa,YAAY;AAAA,YACzB,aAAa,YAAY;AAAA,UAC3B,OALM,YAKA;AAAA,YACJ,IAAI;AAAA,YACJ,UAAU;AAAA,YACV,MAAM,YAAY;AAAA,YAClB,GAAG,iBAAiB,QAAQ;AAAA,YAC5B,GAAG,YAAY;AAAA,UACjB;AAAA,UACA;AAAA,UACA,aAAa,iBAAiB,QAAQ;AAAA,UACtC,SAAS;AAAA,YACP,GAAG,iBAAiB,QAAQ;AAAA,YAC5B,GAAG,YAAY;AAAA,UACjB;AAAA,UACA,iBAAiB,MAAM,mBAAmB;AAAA,UAC1C;AAAA,UACA,SAAS,EAAE,QAAQ,GAAG;AACpB,yBAAa,WAAW;AAExB,kBAAM,qBACJ,QAAQ,OAAO,aAAa,aAAa,SAAS,CAAC,EAAE;AAEvD;AAAA,cACE;AAAA,gBACE,GAAI,qBACA,aAAa,MAAM,GAAG,aAAa,SAAS,CAAC,IAC7C;AAAA,gBACJ;AAAA,cACF;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,OAAAF;AAAA,UACA,aAAa,aAAa,aAAa,SAAS,CAAC;AAAA,UACjD;AAAA,UACA;AAAA,QACF,CAAC;AAED,2BAAmB,UAAU;AAE7B,qBAAa,OAAO;AAAA,MACtB,SAAS,KAAK;AAEZ,YAAK,IAAY,SAAS,cAAc;AACtC,6BAAmB,UAAU;AAC7B,uBAAa,OAAO;AACpB,iBAAO;AAAA,QACT;AAEA,YAAI,WAAW,eAAe,OAAO;AACnC,kBAAQ,GAAG;AAAA,QACb;AAEA,iBAAS,GAAY;AACrB,qBAAa,OAAO;AAAA,MACtB;AAIA,YAAMG,YAAW,YAAY;AAC7B,UACE,uBAAuB;AAAA,QACrB,+BAA+B;AAAA,QAC/B,sBAAsB;AAAA,QACtB;AAAA,QACA,UAAAA;AAAA,MACF,CAAC,GACD;AACA,cAAM,eAAe,EAAE,UAAAA,UAAS,CAAC;AAAA,MACnC;AAAA,IACF;AAAA,IACA;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACAH;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAAS;AAAA,IACb,CACE,SACA,EAAE,MAAM,SAAAI,UAAS,MAAAC,MAAK,IAAwB,CAAC,MAC/C;AA5VN;AA6VM,4BAAe;AAAA,QACb,UAAU,YAAY,QAAQ,OAAO;AAAA,UACnC,GAAG;AAAA,UACH,KAAI,aAAQ,OAAR,YAAc,WAAW;AAAA,QAC/B,CAAC;AAAA,QACD,SAAAD;AAAA,QACA,MAAAC;AAAA,QACA;AAAA,MACF,CAAC;AAAA;AAAA,IACH,CAAC,gBAAgB,UAAU;AAAA,EAC7B;AAEA,QAAM,SAAS;AAAA,IACb,OAAO,EAAE,MAAM,SAAAD,UAAS,MAAAC,MAAK,IAAwB,CAAC,MAAM;AAC1D,YAAMF,YAAW,YAAY;AAE7B,UAAIA,UAAS,WAAW,GAAG;AACzB,eAAO;AAAA,MACT;AAGA,YAAM,cAAcA,UAASA,UAAS,SAAS,CAAC;AAChD,aAAO,eAAe;AAAA,QACpB,UACE,YAAY,SAAS,cAAcA,UAAS,MAAM,GAAG,EAAE,IAAIA;AAAA,QAC7D,SAAAC;AAAA,QACA,MAAAC;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,CAAC,cAAc;AAAA,EACjB;AAEA,QAAM,OAAO,YAAY,MAAM;AAC7B,QAAI,mBAAmB,SAAS;AAC9B,yBAAmB,QAAQ,MAAM;AACjC,yBAAmB,UAAU;AAAA,IAC/B;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,QAAM,sBAAsB,YAAY,YAAY;AAClD,UAAMF,YAAW,YAAY;AAE7B,mBAAe,EAAE,UAAAA,UAAS,GAAG,QAAQ;AAAA,EACvC,GAAG,CAAC,cAAc,CAAC;AAEnB,QAAM,cAAc;AAAA,IAClB,CACEA,cAKG;AACH,UAAI,OAAOA,cAAa,YAAY;AAClC,QAAAA,YAAWA,UAAS,YAAY,OAAO;AAAA,MACzC;AAEA,aAAOA,WAAU,KAAK;AACtB,kBAAY,UAAUA;AAAA,IACxB;AAAA,IACA,CAAC,MAAM;AAAA,EACT;AAGA,QAAM,CAAC,OAAO,QAAQ,IAAIF,UAAS,YAAY;AAE/C,QAAM,eAAe;AAAA,IACnB,OACE,OACA,UAEI,CAAC,GACL,aACG;AAvaT;AAwaM,2CAAO,mBAAP;AAEA,UAAI,CAAC,SAAS,CAAC,QAAQ;AAAkB;AAEzC,UAAI,UAAU;AACZ,yBAAiB,UAAU;AAAA,UACzB,GAAG,iBAAiB;AAAA,UACpB,GAAG;AAAA,QACL;AAAA,MACF;AAEA,YAAM,YAAY,MAAM,QAAQ,mCAAS,KAAK,IAC1C,QAAQ,QACR,MAAM,6BAA6B,mCAAS,KAAK;AAErD,qBAAe;AAAA,QACb,UAAU,YAAY,QAAQ,OAAO;AAAA,UACnC,IAAI,WAAW;AAAA,UACf,MAAM;AAAA,UACN,UAAU;AAAA,UACV,OAAO,CAAC,GAAG,WAAW,EAAE,MAAM,QAAQ,MAAM,MAAM,CAAC;AAAA,QACrD,CAAC;AAAA,QACD,SAAS,QAAQ;AAAA,QACjB,MAAM,QAAQ;AAAA,QACd,MAAM,QAAQ;AAAA,MAChB,CAAC;AAED,eAAS,EAAE;AAAA,IACb;AAAA,IACA,CAAC,OAAO,YAAY,cAAc;AAAA,EACpC;AAEA,QAAM,oBAAoB,CAAC,MAAW;AACpC,aAAS,EAAE,OAAO,KAAK;AAAA,EACzB;AAEA,QAAM,gBAAgB;AAAA,IACpB,CAAC,EAAE,YAAY,OAAO,MAA+C;AACnE,YAAM,kBAAkB,YAAY;AAEpC,2BAAqB;AAAA,QACnB,UAAU;AAAA,QACV;AAAA,QACA,YAAY;AAAA,MACd,CAAC;AAGD;AAAA,QACE;AAAA,UACE,GAAG,gBAAgB,MAAM,GAAG,gBAAgB,SAAS,CAAC;AAAA,UACtD;AAAA,YACE,GAAG,gBAAgB,gBAAgB,SAAS,CAAC;AAAA;AAAA;AAAA,YAG7C,YAAY,WAAW;AAAA,UACzB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAGA,UAAI,WAAW,eAAe,WAAW,aAAa;AACpD;AAAA,MACF;AAGA,YAAM,cAAc,gBAAgB,gBAAgB,SAAS,CAAC;AAC9D,UAAI,yCAAyC,WAAW,GAAG;AACzD,uBAAe,EAAE,UAAU,gBAAgB,CAAC;AAAA,MAC9C;AAAA,IACF;AAAA,IACA,CAAC,QAAQ,QAAQ,gBAAgB,UAAU;AAAA,EAC7C;AAEA,SAAO;AAAA,IACL,UAAU,8BAAY,CAAC;AAAA,IACvB,IAAI;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,WAAW,WAAW,eAAe,WAAW;AAAA,IAChD;AAAA,IACA;AAAA,EACF;AACF;;;AGngBA;AAAA,EAGE;AAAA,OACK;AACP,SAAS,eAAAK,cAAa,aAAAC,YAAW,OAAO,UAAAC,SAAQ,YAAAC,iBAAgB;AAChE,OAAOC,aAAY;AAyDZ,SAAS,cAAc;AAAA,EAC5B,MAAM;AAAA,EACN;AAAA,EACA,oBAAoB;AAAA,EACpB,eAAe;AAAA,EACf;AAAA,EACA;AAAA,EACA;AAAA,EACA,iBAAiB;AAAA,EACjB,OAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,uBAAuB;AACzB,IAMI,CAAC,GAAyB;AAE5B,QAAM,SAAS,MAAM;AACrB,QAAM,eAAe,MAAM;AAG3B,QAAM,EAAE,MAAM,OAAO,IAAIC,QAAe,CAAC,KAAK,YAAY,GAAG,MAAM;AAAA,IACjE,cAAc;AAAA,EAChB,CAAC;AAED,QAAM,EAAE,MAAM,YAAY,OAAO,QAAQ,cAAc,IAAIA;AAAA,IACzD,CAAC,cAAc,SAAS;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,CAAC,OAAO,QAAQ,IAAIC,UAA4B,MAAS;AAC/D,QAAM,aAAa;AAGnB,QAAM,CAAC,iBAAiB,kBAAkB,IACxCA,UAAiC,IAAI;AAEvC,QAAM,mBAAmBC,QAAO;AAAA,IAC9B;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,EAAAC,WAAU,MAAM;AACd,qBAAiB,UAAU;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,GAAG,CAAC,aAAa,SAAS,IAAI,CAAC;AAE/B,QAAM,iBAAiBC;AAAA,IACrB,OAAO,QAAgB,YACrB,kBAAkB;AAAA,MAChB;AAAA,MACA;AAAA,MACA,aAAa,iBAAiB,QAAQ;AAAA,MACtC,SAAS,EAAE,GAAG,iBAAiB,QAAQ,SAAS,GAAG,mCAAS,QAAQ;AAAA,MACpE,MAAM;AAAA,QACJ,GAAG,iBAAiB,QAAQ;AAAA,QAC5B,GAAG,mCAAS;AAAA,MACd;AAAA,MACA;AAAA,MACA,OAAAL;AAAA;AAAA,MAEA,eAAe;AAAA,QACb,CAACM,gBAAuB,OAAOA,aAAY,KAAK;AAAA,QAChD;AAAA,MACF;AAAA,MACA,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,IACH;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACAN;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAOK,aAAY,MAAM;AAC7B,QAAI,iBAAiB;AACnB,sBAAgB,MAAM;AACtB,yBAAmB,IAAI;AAAA,IACzB;AAAA,EACF,GAAG,CAAC,eAAe,CAAC;AAEpB,QAAM,gBAAgBA;AAAA,IACpB,CAACC,gBAAuB;AACtB,aAAOA,aAAY,KAAK;AAAA,IAC1B;AAAA,IACA,CAAC,MAAM;AAAA,EACT;AAEA,QAAM,WAAWD;AAAA,IACf,OAAO,QAAQ,YAAY;AACzB,aAAO,eAAe,QAAQ,OAAO;AAAA,IACvC;AAAA,IACA,CAAC,cAAc;AAAA,EACjB;AAEA,QAAM,CAAC,OAAO,QAAQ,IAAIH,UAAS,YAAY;AAE/C,QAAM,eAAeG;AAAA,IACnB,CAAC,UAA4C;AAxLjD;AAyLM,2CAAO,mBAAP;AACA,aAAO,QAAQ,SAAS,KAAK,IAAI;AAAA,IACnC;AAAA,IACA,CAAC,OAAO,QAAQ;AAAA,EAClB;AAEA,QAAM,oBAAoBA;AAAA,IACxB,CAAC,MAAW;AACV,eAAS,EAAE,OAAO,KAAK;AAAA,IACzB;AAAA,IACA,CAAC,QAAQ;AAAA,EACX;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AClNA;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EAEA,mBAAAE;AAAA,EACA;AAAA,OAEK;AACP,SAAS,eAAAC,cAAa,SAAAC,QAAO,UAAAC,SAAQ,YAAAC,iBAAgB;AACrD,OAAOC,aAAY;AAInB,IAAM,mBAAmB,MAAM;AA4F/B,SAAS,UAA+B;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EACA;AAAA,EACA,OAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAGE;AAEA,QAAM,SAASJ,OAAM;AACrB,QAAM,eAAe,kBAAM;AAG3B,QAAM,EAAE,MAAM,OAAO,IAAIG;AAAA,IACvB,CAAC,KAAK,YAAY;AAAA,IAClB;AAAA,IACA,EAAE,cAAc,aAAa;AAAA,EAC/B;AAEA,QAAM,CAAC,OAAO,QAAQ,IAAID,UAA4B,MAAS;AAC/D,QAAM,CAAC,WAAW,YAAY,IAAIA,UAAS,KAAK;AAGhD,QAAM,qBAAqBD,QAA+B,IAAI;AAE9D,QAAM,OAAOF,aAAY,MAAM;AA5IjC;AA6II,QAAI;AACF,+BAAmB,YAAnB,mBAA4B;AAAA,IAC9B,SAAS,SAAS;AAAA,IAClB,UAAE;AACA,mBAAa,KAAK;AAClB,yBAAmB,UAAU;AAAA,IAC/B;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,QAAM,SAAS,OAAO,UAAiB;AAtJzC;AAuJI,QAAI;AACF,aAAO,MAAS;AAChB,mBAAa,IAAI;AACjB,eAAS,MAAS;AAElB,YAAM,kBAAkB,IAAI,gBAAgB;AAC5C,yBAAmB,UAAU;AAE7B,YAAM,cAAcK,UAAA,OAAAA,SAAS,iBAAiB;AAC9C,YAAM,WAAW,MAAM,YAAY,KAAK;AAAA,QACtC,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,GAAG;AAAA,QACL;AAAA,QACA;AAAA,QACA,QAAQ,gBAAgB;AAAA,QACxB,MAAM,KAAK,UAAU,KAAK;AAAA,MAC5B,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI;AAAA,WACP,WAAM,SAAS,KAAK,MAApB,YAA0B;AAAA,QAC7B;AAAA,MACF;AAEA,UAAI,SAAS,QAAQ,MAAM;AACzB,cAAM,IAAI,MAAM,6BAA6B;AAAA,MAC/C;AAEA,UAAI,kBAAkB;AACtB,UAAI,eAAgD;AAEpD,YAAM,SAAS,KAAK,YAAY,IAAI,kBAAkB,CAAC,EAAE;AAAA,QACvD,IAAI,eAAuB;AAAA,UACzB,MAAM,MAAM,OAAO;AACjB,+BAAmB;AAEnB,kBAAM,EAAE,MAAM,IAAI,MAAM,iBAAiB,eAAe;AACxD,kBAAM,gBAAgB;AAEtB,gBAAI,CAACN,iBAAgB,cAAc,aAAa,GAAG;AACjD,6BAAe;AAEf,qBAAO,aAAa;AAAA,YACtB;AAAA,UACF;AAAA,UAEA,MAAM,QAAQ;AACZ,yBAAa,KAAK;AAClB,+BAAmB,UAAU;AAE7B,gBAAI,YAAY,MAAM;AACpB,oBAAM,mBAAmB,MAAM,kBAAkB;AAAA,gBAC/C,OAAO;AAAA,gBACP,QAAQ,SAAS,MAAM;AAAA,cACzB,CAAC;AAED;AAAA,gBACE,iBAAiB,UACb,EAAE,QAAQ,iBAAiB,OAAO,OAAO,OAAU,IACnD,EAAE,QAAQ,QAAW,OAAO,iBAAiB,MAAM;AAAA,cACzD;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,SAASO,QAAO;AACd,UAAI,aAAaA,MAAK,GAAG;AACvB;AAAA,MACF;AAEA,UAAI,WAAWA,kBAAiB,OAAO;AACrC,gBAAQA,MAAK;AAAA,MACf;AAEA,mBAAa,KAAK;AAClB,eAASA,kBAAiB,QAAQA,SAAQ,IAAI,MAAM,OAAOA,MAAK,CAAC,CAAC;AAAA,IACpE;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,IAAM,yBAAyB;","names":["useEffect","useState","fetch","useState","useEffect","messages","headers","body","useCallback","useEffect","useRef","useState","useSWR","fetch","useSWR","useState","useRef","useEffect","useCallback","completion","isDeepEqualData","useCallback","useId","useRef","useState","useSWR","fetch","error"]}
|
|
1
|
+
{"version":3,"sources":["../src/use-chat.ts","../src/throttle.ts","../src/util/use-stable-value.ts","../src/use-completion.ts","../src/use-object.ts"],"sourcesContent":["import type {\n ChatRequestOptions,\n CreateUIMessage,\n FileUIPart,\n UIMessage,\n UseChatOptions,\n} from 'ai';\nimport {\n callChatApi,\n convertFileListToFileUIParts,\n extractMaxToolInvocationStep,\n generateId as generateIdFunc,\n getToolInvocations,\n isAssistantMessageWithCompletedToolCalls,\n shouldResubmitMessages,\n updateToolCallResult,\n} from 'ai';\nimport { useCallback, useEffect, useMemo, useRef, useState } from 'react';\nimport useSWR from 'swr';\nimport { throttle } from './throttle';\nimport { useStableValue } from './util/use-stable-value';\n\nexport type { CreateUIMessage, UIMessage, UseChatOptions };\n\nexport type UseChatHelpers<MESSAGE_METADATA = unknown> = {\n /**\n * The id of the chat.\n */\n readonly id: string;\n\n /**\n * Hook status:\n *\n * - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.\n * - `streaming`: The response is actively streaming in from the API, receiving chunks of data.\n * - `ready`: The full response has been received and processed; a new user message can be submitted.\n * - `error`: An error occurred during the API request, preventing successful completion.\n */\n readonly status: 'submitted' | 'streaming' | 'ready' | 'error';\n\n /** Current messages in the chat */\n readonly messages: UIMessage<MESSAGE_METADATA>[];\n\n /** The error object of the API request */\n readonly error: undefined | Error;\n\n /**\n * Append a user message to the chat list. This triggers the API call to fetch\n * the assistant's response.\n *\n * @param message The message to append\n * @param options Additional options to pass to the API call\n */\n append: (\n message: CreateUIMessage<MESSAGE_METADATA>,\n options?: ChatRequestOptions,\n ) => Promise<void>;\n\n /**\n * Reload the last AI chat response for the given chat history. If the last\n * message isn't from the assistant, it will request the API to generate a\n * new response.\n */\n reload: (\n chatRequestOptions?: ChatRequestOptions,\n ) => Promise<string | null | undefined>;\n\n /**\n * Abort the current request immediately, keep the generated tokens if any.\n */\n stop: () => void;\n\n /**\n * Resume an ongoing chat generation stream. This does not resume an aborted generation.\n */\n experimental_resume: () => void;\n\n /**\n * Update the `messages` state locally. This is useful when you want to\n * edit the messages on the client, and then trigger the `reload` method\n * manually to regenerate the AI response.\n */\n setMessages: (\n messages:\n | UIMessage<MESSAGE_METADATA>[]\n | ((\n messages: UIMessage<MESSAGE_METADATA>[],\n ) => UIMessage<MESSAGE_METADATA>[]),\n ) => void;\n\n /** The current value of the input */\n input: string;\n\n /** setState-powered method to update the input value */\n setInput: React.Dispatch<React.SetStateAction<string>>;\n\n /** An input/textarea-ready onChange handler to control the value of the input */\n handleInputChange: (\n e:\n | React.ChangeEvent<HTMLInputElement>\n | React.ChangeEvent<HTMLTextAreaElement>,\n ) => void;\n\n /** Form submission handler to automatically reset input and append a user message */\n handleSubmit: (\n event?: { preventDefault?: () => void },\n chatRequestOptions?: ChatRequestOptions & {\n files?: FileList | FileUIPart[];\n },\n ) => void;\n\n addToolResult: ({\n toolCallId,\n result,\n }: {\n toolCallId: string;\n result: any;\n }) => void;\n};\n\nexport function useChat<MESSAGE_METADATA>({\n api = '/api/chat',\n id,\n initialMessages,\n initialInput = '',\n onToolCall,\n experimental_prepareRequestBody,\n maxSteps = 1,\n streamProtocol = 'ui-message',\n onFinish,\n onError,\n credentials,\n headers,\n body,\n generateId = generateIdFunc,\n fetch,\n experimental_throttle: throttleWaitMs,\n messageMetadataSchema,\n}: UseChatOptions<MESSAGE_METADATA> & {\n /**\n * Experimental (React only). When a function is provided, it will be used\n * to prepare the request body for the chat API. This can be useful for\n * customizing the request body based on the messages and data in the chat.\n *\n * @param id The id of the chat.\n * @param messages The current messages in the chat.\n * @param requestBody The request body object passed in the chat request.\n */\n experimental_prepareRequestBody?: (options: {\n id: string;\n messages: UIMessage<MESSAGE_METADATA>[];\n requestBody?: object;\n }) => unknown;\n\n /**\nCustom throttle wait in ms for the chat messages and data updates.\nDefault is undefined, which disables throttling.\n */\n experimental_throttle?: number;\n} = {}): UseChatHelpers<MESSAGE_METADATA> {\n // Generate ID once, store in state for stability across re-renders\n const [hookId] = useState(generateId);\n\n // Use the caller-supplied ID if available; otherwise, fall back to our stable ID\n const chatId = id ?? hookId;\n const chatKey = typeof api === 'string' ? [api, chatId] : chatId;\n\n // Store array of the processed initial messages to avoid re-renders:\n const stableInitialMessages = useStableValue(initialMessages ?? []);\n const processedInitialMessages = useMemo(\n () => stableInitialMessages,\n [stableInitialMessages],\n );\n\n // Store the chat state in SWR, using the chatId as the key to share states.\n const { data: messages, mutate } = useSWR<UIMessage<MESSAGE_METADATA>[]>(\n [chatKey, 'messages'],\n null,\n { fallbackData: processedInitialMessages },\n );\n\n // Keep the latest messages in a ref.\n const messagesRef = useRef<UIMessage<MESSAGE_METADATA>[]>(messages || []);\n useEffect(() => {\n messagesRef.current = messages || [];\n }, [messages]);\n\n const { data: status = 'ready', mutate: mutateStatus } = useSWR<\n 'submitted' | 'streaming' | 'ready' | 'error'\n >([chatKey, 'status'], null);\n\n const { data: error = undefined, mutate: setError } = useSWR<\n undefined | Error\n >([chatKey, 'error'], null);\n\n // Abort controller to cancel the current API call.\n const abortControllerRef = useRef<AbortController | null>(null);\n\n const extraMetadataRef = useRef({\n credentials,\n headers,\n body,\n });\n\n useEffect(() => {\n extraMetadataRef.current = {\n credentials,\n headers,\n body,\n };\n }, [credentials, headers, body]);\n\n const triggerRequest = useCallback(\n async (\n chatRequest: ChatRequestOptions & {\n messages: UIMessage<MESSAGE_METADATA>[];\n },\n requestType: 'generate' | 'resume' = 'generate',\n ) => {\n mutateStatus('submitted');\n setError(undefined);\n\n const chatMessages = chatRequest.messages;\n\n const messageCount = chatMessages.length;\n const maxStep = extractMaxToolInvocationStep(\n getToolInvocations(chatMessages[chatMessages.length - 1]),\n );\n\n try {\n const abortController = new AbortController();\n abortControllerRef.current = abortController;\n\n const throttledMutate = throttle(mutate, throttleWaitMs);\n\n // Do an optimistic update to show the updated messages immediately:\n throttledMutate(chatMessages, false);\n\n await callChatApi({\n api,\n body: experimental_prepareRequestBody?.({\n id: chatId,\n messages: chatMessages,\n requestBody: chatRequest.body,\n }) ?? {\n id: chatId,\n messages: chatMessages,\n ...extraMetadataRef.current.body,\n ...chatRequest.body,\n },\n streamProtocol,\n credentials: extraMetadataRef.current.credentials,\n headers: {\n ...extraMetadataRef.current.headers,\n ...chatRequest.headers,\n },\n abortController: () => abortControllerRef.current,\n onUpdate({ message }) {\n mutateStatus('streaming');\n\n const replaceLastMessage =\n message.id === chatMessages[chatMessages.length - 1].id;\n\n throttledMutate(\n [\n ...(replaceLastMessage\n ? chatMessages.slice(0, chatMessages.length - 1)\n : chatMessages),\n message,\n ],\n false,\n );\n },\n onToolCall,\n onFinish,\n generateId,\n fetch,\n lastMessage: chatMessages[chatMessages.length - 1],\n requestType,\n messageMetadataSchema,\n });\n\n abortControllerRef.current = null;\n\n mutateStatus('ready');\n } catch (err) {\n // Ignore abort errors as they are expected.\n if ((err as any).name === 'AbortError') {\n abortControllerRef.current = null;\n mutateStatus('ready');\n return null;\n }\n\n if (onError && err instanceof Error) {\n onError(err);\n }\n\n setError(err as Error);\n mutateStatus('error');\n }\n\n // auto-submit when all tool calls in the last assistant message have results\n // and assistant has not answered yet\n const messages = messagesRef.current;\n if (\n shouldResubmitMessages({\n originalMaxToolInvocationStep: maxStep,\n originalMessageCount: messageCount,\n maxSteps,\n messages,\n })\n ) {\n await triggerRequest({ messages });\n }\n },\n [\n mutate,\n mutateStatus,\n api,\n extraMetadataRef,\n onFinish,\n onError,\n setError,\n streamProtocol,\n experimental_prepareRequestBody,\n onToolCall,\n maxSteps,\n messagesRef,\n abortControllerRef,\n generateId,\n fetch,\n throttleWaitMs,\n chatId,\n messageMetadataSchema,\n ],\n );\n\n const append = useCallback(\n async (\n message: CreateUIMessage<MESSAGE_METADATA>,\n { headers, body }: ChatRequestOptions = {},\n ) => {\n await triggerRequest({\n messages: messagesRef.current.concat({\n ...message,\n id: message.id ?? generateId(),\n }),\n headers,\n body,\n });\n },\n [triggerRequest, generateId],\n );\n\n const reload = useCallback(\n async ({ headers, body }: ChatRequestOptions = {}) => {\n const messages = messagesRef.current;\n\n if (messages.length === 0) {\n return null;\n }\n\n // Remove last assistant message and retry last user message.\n const lastMessage = messages[messages.length - 1];\n return triggerRequest({\n messages:\n lastMessage.role === 'assistant' ? messages.slice(0, -1) : messages,\n headers,\n body,\n });\n },\n [triggerRequest],\n );\n\n const stop = useCallback(() => {\n if (abortControllerRef.current) {\n abortControllerRef.current.abort();\n abortControllerRef.current = null;\n }\n }, []);\n\n const experimental_resume = useCallback(async () => {\n const messages = messagesRef.current;\n\n triggerRequest({ messages }, 'resume');\n }, [triggerRequest]);\n\n const setMessages = useCallback(\n (\n messages:\n | UIMessage<MESSAGE_METADATA>[]\n | ((\n messages: UIMessage<MESSAGE_METADATA>[],\n ) => UIMessage<MESSAGE_METADATA>[]),\n ) => {\n if (typeof messages === 'function') {\n messages = messages(messagesRef.current);\n }\n\n mutate(messages, false);\n messagesRef.current = messages;\n },\n [mutate],\n );\n\n // Input state and handlers.\n const [input, setInput] = useState(initialInput);\n\n const handleSubmit = useCallback(\n async (\n event?: { preventDefault?: () => void },\n options: ChatRequestOptions & {\n files?: FileList | FileUIPart[];\n } = {},\n metadata?: Object,\n ) => {\n event?.preventDefault?.();\n\n const fileParts = Array.isArray(options?.files)\n ? options.files\n : await convertFileListToFileUIParts(options?.files);\n\n if (!input && fileParts.length === 0) return;\n\n if (metadata) {\n extraMetadataRef.current = {\n ...extraMetadataRef.current,\n ...metadata,\n };\n }\n\n triggerRequest({\n messages: messagesRef.current.concat({\n id: generateId(),\n role: 'user',\n metadata: undefined,\n parts: [...fileParts, { type: 'text', text: input }],\n }),\n headers: options.headers,\n body: options.body,\n });\n\n setInput('');\n },\n [input, generateId, triggerRequest],\n );\n\n const handleInputChange = (e: any) => {\n setInput(e.target.value);\n };\n\n const addToolResult = useCallback(\n ({ toolCallId, result }: { toolCallId: string; result: unknown }) => {\n const currentMessages = messagesRef.current;\n\n updateToolCallResult({\n messages: currentMessages,\n toolCallId,\n toolResult: result,\n });\n\n // array mutation is required to trigger a re-render\n mutate(\n [\n ...currentMessages.slice(0, currentMessages.length - 1),\n {\n ...currentMessages[currentMessages.length - 1],\n // @ts-ignore\n // update the revisionId to trigger a re-render\n revisionId: generateId(),\n },\n ],\n false,\n );\n\n // when the request is ongoing, the auto-submit will be triggered after the request is finished\n if (status === 'submitted' || status === 'streaming') {\n return;\n }\n\n // auto-submit when all tool calls in the last assistant message have results:\n const lastMessage = currentMessages[currentMessages.length - 1];\n if (isAssistantMessageWithCompletedToolCalls(lastMessage)) {\n triggerRequest({ messages: currentMessages });\n }\n },\n [mutate, status, triggerRequest, generateId],\n );\n\n return {\n messages: messages ?? [],\n id: chatId,\n setMessages,\n error,\n append,\n reload,\n stop,\n experimental_resume,\n input,\n setInput,\n handleInputChange,\n handleSubmit,\n status,\n addToolResult,\n };\n}\n","import throttleFunction from 'throttleit';\n\nexport function throttle<T extends (...args: any[]) => any>(\n fn: T,\n waitMs: number | undefined,\n): T {\n return waitMs != null ? throttleFunction(fn, waitMs) : fn;\n}\n","import { isDeepEqualData } from 'ai';\nimport { useEffect, useState } from 'react';\n\n/**\n * Returns a stable value that only updates the stored value (and triggers a re-render)\n * when the value's contents differ by deep-compare.\n */\nexport function useStableValue<T>(latestValue: T): T {\n const [value, setValue] = useState<T>(latestValue);\n\n useEffect(() => {\n if (!isDeepEqualData(latestValue, value)) {\n setValue(latestValue);\n }\n }, [latestValue, value]);\n\n return value;\n}\n","import {\n CompletionRequestOptions,\n UseCompletionOptions,\n callCompletionApi,\n} from 'ai';\nimport { useCallback, useEffect, useId, useRef, useState } from 'react';\nimport useSWR from 'swr';\nimport { throttle } from './throttle';\n\nexport type { UseCompletionOptions };\n\nexport type UseCompletionHelpers = {\n /** The current completion result */\n completion: string;\n /**\n * Send a new prompt to the API endpoint and update the completion state.\n */\n complete: (\n prompt: string,\n options?: CompletionRequestOptions,\n ) => Promise<string | null | undefined>;\n /** The error object of the API request */\n error: undefined | Error;\n /**\n * Abort the current API request but keep the generated tokens.\n */\n stop: () => void;\n /**\n * Update the `completion` state locally.\n */\n setCompletion: (completion: string) => void;\n /** The current value of the input */\n input: string;\n /** setState-powered method to update the input value */\n setInput: React.Dispatch<React.SetStateAction<string>>;\n /**\n * An input/textarea-ready onChange handler to control the value of the input\n * @example\n * ```jsx\n * <input onChange={handleInputChange} value={input} />\n * ```\n */\n handleInputChange: (\n event:\n | React.ChangeEvent<HTMLInputElement>\n | React.ChangeEvent<HTMLTextAreaElement>,\n ) => void;\n\n /**\n * Form submission handler to automatically reset input and append a user message\n * @example\n * ```jsx\n * <form onSubmit={handleSubmit}>\n * <input onChange={handleInputChange} value={input} />\n * </form>\n * ```\n */\n handleSubmit: (event?: { preventDefault?: () => void }) => void;\n\n /** Whether the API request is in progress */\n isLoading: boolean;\n};\n\nexport function useCompletion({\n api = '/api/completion',\n id,\n initialCompletion = '',\n initialInput = '',\n credentials,\n headers,\n body,\n streamProtocol = 'data',\n fetch,\n onFinish,\n onError,\n experimental_throttle: throttleWaitMs,\n}: UseCompletionOptions & {\n /**\n * Custom throttle wait in ms for the completion and data updates.\n * Default is undefined, which disables throttling.\n */\n experimental_throttle?: number;\n} = {}): UseCompletionHelpers {\n // Generate an unique id for the completion if not provided.\n const hookId = useId();\n const completionId = id || hookId;\n\n // Store the completion state in SWR, using the completionId as the key to share states.\n const { data, mutate } = useSWR<string>([api, completionId], null, {\n fallbackData: initialCompletion,\n });\n\n const { data: isLoading = false, mutate: mutateLoading } = useSWR<boolean>(\n [completionId, 'loading'],\n null,\n );\n\n const [error, setError] = useState<undefined | Error>(undefined);\n const completion = data!;\n\n // Abort controller to cancel the current API call.\n const [abortController, setAbortController] =\n useState<AbortController | null>(null);\n\n const extraMetadataRef = useRef({\n credentials,\n headers,\n body,\n });\n\n useEffect(() => {\n extraMetadataRef.current = {\n credentials,\n headers,\n body,\n };\n }, [credentials, headers, body]);\n\n const triggerRequest = useCallback(\n async (prompt: string, options?: CompletionRequestOptions) =>\n callCompletionApi({\n api,\n prompt,\n credentials: extraMetadataRef.current.credentials,\n headers: { ...extraMetadataRef.current.headers, ...options?.headers },\n body: {\n ...extraMetadataRef.current.body,\n ...options?.body,\n },\n streamProtocol,\n fetch,\n // throttle streamed ui updates:\n setCompletion: throttle(\n (completion: string) => mutate(completion, false),\n throttleWaitMs,\n ),\n setLoading: mutateLoading,\n setError,\n setAbortController,\n onFinish,\n onError,\n }),\n [\n mutate,\n mutateLoading,\n api,\n extraMetadataRef,\n setAbortController,\n onFinish,\n onError,\n setError,\n streamProtocol,\n fetch,\n throttleWaitMs,\n ],\n );\n\n const stop = useCallback(() => {\n if (abortController) {\n abortController.abort();\n setAbortController(null);\n }\n }, [abortController]);\n\n const setCompletion = useCallback(\n (completion: string) => {\n mutate(completion, false);\n },\n [mutate],\n );\n\n const complete = useCallback<UseCompletionHelpers['complete']>(\n async (prompt, options) => {\n return triggerRequest(prompt, options);\n },\n [triggerRequest],\n );\n\n const [input, setInput] = useState(initialInput);\n\n const handleSubmit = useCallback(\n (event?: { preventDefault?: () => void }) => {\n event?.preventDefault?.();\n return input ? complete(input) : undefined;\n },\n [input, complete],\n );\n\n const handleInputChange = useCallback(\n (e: any) => {\n setInput(e.target.value);\n },\n [setInput],\n );\n\n return {\n completion,\n complete,\n error,\n setCompletion,\n stop,\n input,\n setInput,\n handleInputChange,\n handleSubmit,\n isLoading,\n };\n}\n","import {\n FetchFunction,\n isAbortError,\n safeValidateTypes,\n} from '@ai-sdk/provider-utils';\nimport {\n asSchema,\n DeepPartial,\n isDeepEqualData,\n parsePartialJson,\n Schema,\n} from 'ai';\nimport { useCallback, useId, useRef, useState } from 'react';\nimport useSWR from 'swr';\nimport z from 'zod';\n\n// use function to allow for mocking in tests:\nconst getOriginalFetch = () => fetch;\n\nexport type Experimental_UseObjectOptions<RESULT> = {\n /**\n * The API endpoint. It should stream JSON that matches the schema as chunked text.\n */\n api: string;\n\n /**\n * A Zod schema that defines the shape of the complete object.\n */\n schema: z.Schema<RESULT, z.ZodTypeDef, any> | Schema<RESULT>;\n\n /**\n * An unique identifier. If not provided, a random one will be\n * generated. When provided, the `useObject` hook with the same `id` will\n * have shared states across components.\n */\n id?: string;\n\n /**\n * An optional value for the initial object.\n */\n initialValue?: DeepPartial<RESULT>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nCallback that is called when the stream has finished.\n */\n onFinish?: (event: {\n /**\nThe generated object (typed according to the schema).\nCan be undefined if the final object does not match the schema.\n */\n object: RESULT | undefined;\n\n /**\nOptional error object. This is e.g. a TypeValidationError when the final object does not match the schema.\n */\n error: Error | undefined;\n }) => Promise<void> | void;\n\n /**\n * Callback function to be called when an error is encountered.\n */\n onError?: (error: Error) => void;\n\n /**\n * Additional HTTP headers to be included in the request.\n */\n headers?: Record<string, string> | Headers;\n\n /**\n * The credentials mode to be used for the fetch request.\n * Possible values are: 'omit', 'same-origin', 'include'.\n * Defaults to 'same-origin'.\n */\n credentials?: RequestCredentials;\n};\n\nexport type Experimental_UseObjectHelpers<RESULT, INPUT> = {\n /**\n * Calls the API with the provided input as JSON body.\n */\n submit: (input: INPUT) => void;\n\n /**\n * The current value for the generated object. Updated as the API streams JSON chunks.\n */\n object: DeepPartial<RESULT> | undefined;\n\n /**\n * The error object of the API request if any.\n */\n error: Error | undefined;\n\n /**\n * Flag that indicates whether an API request is in progress.\n */\n isLoading: boolean;\n\n /**\n * Abort the current request immediately, keep the current partial object if any.\n */\n stop: () => void;\n};\n\nfunction useObject<RESULT, INPUT = any>({\n api,\n id,\n schema, // required, in the future we will use it for validation\n initialValue,\n fetch,\n onError,\n onFinish,\n headers,\n credentials,\n}: Experimental_UseObjectOptions<RESULT>): Experimental_UseObjectHelpers<\n RESULT,\n INPUT\n> {\n // Generate an unique id if not provided.\n const hookId = useId();\n const completionId = id ?? hookId;\n\n // Store the completion state in SWR, using the completionId as the key to share states.\n const { data, mutate } = useSWR<DeepPartial<RESULT>>(\n [api, completionId],\n null,\n { fallbackData: initialValue },\n );\n\n const [error, setError] = useState<undefined | Error>(undefined);\n const [isLoading, setIsLoading] = useState(false);\n\n // Abort controller to cancel the current API call.\n const abortControllerRef = useRef<AbortController | null>(null);\n\n const stop = useCallback(() => {\n try {\n abortControllerRef.current?.abort();\n } catch (ignored) {\n } finally {\n setIsLoading(false);\n abortControllerRef.current = null;\n }\n }, []);\n\n const submit = async (input: INPUT) => {\n try {\n mutate(undefined); // reset the data\n setIsLoading(true);\n setError(undefined);\n\n const abortController = new AbortController();\n abortControllerRef.current = abortController;\n\n const actualFetch = fetch ?? getOriginalFetch();\n const response = await actualFetch(api, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...headers,\n },\n credentials,\n signal: abortController.signal,\n body: JSON.stringify(input),\n });\n\n if (!response.ok) {\n throw new Error(\n (await response.text()) ?? 'Failed to fetch the response.',\n );\n }\n\n if (response.body == null) {\n throw new Error('The response body is empty.');\n }\n\n let accumulatedText = '';\n let latestObject: DeepPartial<RESULT> | undefined = undefined;\n\n await response.body.pipeThrough(new TextDecoderStream()).pipeTo(\n new WritableStream<string>({\n async write(chunk) {\n accumulatedText += chunk;\n\n const { value } = await parsePartialJson(accumulatedText);\n const currentObject = value as DeepPartial<RESULT>;\n\n if (!isDeepEqualData(latestObject, currentObject)) {\n latestObject = currentObject;\n\n mutate(currentObject);\n }\n },\n\n async close() {\n setIsLoading(false);\n abortControllerRef.current = null;\n\n if (onFinish != null) {\n const validationResult = await safeValidateTypes({\n value: latestObject,\n schema: asSchema(schema),\n });\n\n onFinish(\n validationResult.success\n ? { object: validationResult.value, error: undefined }\n : { object: undefined, error: validationResult.error },\n );\n }\n },\n }),\n );\n } catch (error) {\n if (isAbortError(error)) {\n return;\n }\n\n if (onError && error instanceof Error) {\n onError(error);\n }\n\n setIsLoading(false);\n setError(error instanceof Error ? error : new Error(String(error)));\n }\n };\n\n return {\n submit,\n object: data,\n error,\n isLoading,\n stop,\n };\n}\n\nexport const experimental_useObject = useObject;\n"],"mappings":";AAOA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EACd;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,aAAa,aAAAA,YAAW,SAAS,QAAQ,YAAAC,iBAAgB;AAClE,OAAO,YAAY;;;AClBnB,OAAO,sBAAsB;AAEtB,SAAS,SACd,IACA,QACG;AACH,SAAO,UAAU,OAAO,iBAAiB,IAAI,MAAM,IAAI;AACzD;;;ACPA,SAAS,uBAAuB;AAChC,SAAS,WAAW,gBAAgB;AAM7B,SAAS,eAAkB,aAAmB;AACnD,QAAM,CAAC,OAAO,QAAQ,IAAI,SAAY,WAAW;AAEjD,YAAU,MAAM;AACd,QAAI,CAAC,gBAAgB,aAAa,KAAK,GAAG;AACxC,eAAS,WAAW;AAAA,IACtB;AAAA,EACF,GAAG,CAAC,aAAa,KAAK,CAAC;AAEvB,SAAO;AACT;;;AFuGO,SAAS,QAA0B;AAAA,EACxC,MAAM;AAAA,EACN;AAAA,EACA;AAAA,EACA,eAAe;AAAA,EACf;AAAA,EACA;AAAA,EACA,WAAW;AAAA,EACX,iBAAiB;AAAA,EACjB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,aAAa;AAAA,EACb,OAAAC;AAAA,EACA,uBAAuB;AAAA,EACvB;AACF,IAqBI,CAAC,GAAqC;AAExC,QAAM,CAAC,MAAM,IAAIC,UAAS,UAAU;AAGpC,QAAM,SAAS,kBAAM;AACrB,QAAM,UAAU,OAAO,QAAQ,WAAW,CAAC,KAAK,MAAM,IAAI;AAG1D,QAAM,wBAAwB,eAAe,4CAAmB,CAAC,CAAC;AAClE,QAAM,2BAA2B;AAAA,IAC/B,MAAM;AAAA,IACN,CAAC,qBAAqB;AAAA,EACxB;AAGA,QAAM,EAAE,MAAM,UAAU,OAAO,IAAI;AAAA,IACjC,CAAC,SAAS,UAAU;AAAA,IACpB;AAAA,IACA,EAAE,cAAc,yBAAyB;AAAA,EAC3C;AAGA,QAAM,cAAc,OAAsC,YAAY,CAAC,CAAC;AACxE,EAAAC,WAAU,MAAM;AACd,gBAAY,UAAU,YAAY,CAAC;AAAA,EACrC,GAAG,CAAC,QAAQ,CAAC;AAEb,QAAM,EAAE,MAAM,SAAS,SAAS,QAAQ,aAAa,IAAI,OAEvD,CAAC,SAAS,QAAQ,GAAG,IAAI;AAE3B,QAAM,EAAE,MAAM,QAAQ,QAAW,QAAQ,SAAS,IAAI,OAEpD,CAAC,SAAS,OAAO,GAAG,IAAI;AAG1B,QAAM,qBAAqB,OAA+B,IAAI;AAE9D,QAAM,mBAAmB,OAAO;AAAA,IAC9B;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,EAAAA,WAAU,MAAM;AACd,qBAAiB,UAAU;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,GAAG,CAAC,aAAa,SAAS,IAAI,CAAC;AAE/B,QAAM,iBAAiB;AAAA,IACrB,OACE,aAGA,cAAqC,eAClC;AA1NT;AA2NM,mBAAa,WAAW;AACxB,eAAS,MAAS;AAElB,YAAM,eAAe,YAAY;AAEjC,YAAM,eAAe,aAAa;AAClC,YAAM,UAAU;AAAA,QACd,mBAAmB,aAAa,aAAa,SAAS,CAAC,CAAC;AAAA,MAC1D;AAEA,UAAI;AACF,cAAM,kBAAkB,IAAI,gBAAgB;AAC5C,2BAAmB,UAAU;AAE7B,cAAM,kBAAkB,SAAS,QAAQ,cAAc;AAGvD,wBAAgB,cAAc,KAAK;AAEnC,cAAM,YAAY;AAAA,UAChB;AAAA,UACA,OAAM,wFAAkC;AAAA,YACtC,IAAI;AAAA,YACJ,UAAU;AAAA,YACV,aAAa,YAAY;AAAA,UAC3B,OAJM,YAIA;AAAA,YACJ,IAAI;AAAA,YACJ,UAAU;AAAA,YACV,GAAG,iBAAiB,QAAQ;AAAA,YAC5B,GAAG,YAAY;AAAA,UACjB;AAAA,UACA;AAAA,UACA,aAAa,iBAAiB,QAAQ;AAAA,UACtC,SAAS;AAAA,YACP,GAAG,iBAAiB,QAAQ;AAAA,YAC5B,GAAG,YAAY;AAAA,UACjB;AAAA,UACA,iBAAiB,MAAM,mBAAmB;AAAA,UAC1C,SAAS,EAAE,QAAQ,GAAG;AACpB,yBAAa,WAAW;AAExB,kBAAM,qBACJ,QAAQ,OAAO,aAAa,aAAa,SAAS,CAAC,EAAE;AAEvD;AAAA,cACE;AAAA,gBACE,GAAI,qBACA,aAAa,MAAM,GAAG,aAAa,SAAS,CAAC,IAC7C;AAAA,gBACJ;AAAA,cACF;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,OAAAF;AAAA,UACA,aAAa,aAAa,aAAa,SAAS,CAAC;AAAA,UACjD;AAAA,UACA;AAAA,QACF,CAAC;AAED,2BAAmB,UAAU;AAE7B,qBAAa,OAAO;AAAA,MACtB,SAAS,KAAK;AAEZ,YAAK,IAAY,SAAS,cAAc;AACtC,6BAAmB,UAAU;AAC7B,uBAAa,OAAO;AACpB,iBAAO;AAAA,QACT;AAEA,YAAI,WAAW,eAAe,OAAO;AACnC,kBAAQ,GAAG;AAAA,QACb;AAEA,iBAAS,GAAY;AACrB,qBAAa,OAAO;AAAA,MACtB;AAIA,YAAMG,YAAW,YAAY;AAC7B,UACE,uBAAuB;AAAA,QACrB,+BAA+B;AAAA,QAC/B,sBAAsB;AAAA,QACtB;AAAA,QACA,UAAAA;AAAA,MACF,CAAC,GACD;AACA,cAAM,eAAe,EAAE,UAAAA,UAAS,CAAC;AAAA,MACnC;AAAA,IACF;AAAA,IACA;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACAH;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAAS;AAAA,IACb,OACE,SACA,EAAE,SAAAI,UAAS,MAAAC,MAAK,IAAwB,CAAC,MACtC;AArVT;AAsVM,YAAM,eAAe;AAAA,QACnB,UAAU,YAAY,QAAQ,OAAO;AAAA,UACnC,GAAG;AAAA,UACH,KAAI,aAAQ,OAAR,YAAc,WAAW;AAAA,QAC/B,CAAC;AAAA,QACD,SAAAD;AAAA,QACA,MAAAC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,CAAC,gBAAgB,UAAU;AAAA,EAC7B;AAEA,QAAM,SAAS;AAAA,IACb,OAAO,EAAE,SAAAD,UAAS,MAAAC,MAAK,IAAwB,CAAC,MAAM;AACpD,YAAMF,YAAW,YAAY;AAE7B,UAAIA,UAAS,WAAW,GAAG;AACzB,eAAO;AAAA,MACT;AAGA,YAAM,cAAcA,UAASA,UAAS,SAAS,CAAC;AAChD,aAAO,eAAe;AAAA,QACpB,UACE,YAAY,SAAS,cAAcA,UAAS,MAAM,GAAG,EAAE,IAAIA;AAAA,QAC7D,SAAAC;AAAA,QACA,MAAAC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,CAAC,cAAc;AAAA,EACjB;AAEA,QAAM,OAAO,YAAY,MAAM;AAC7B,QAAI,mBAAmB,SAAS;AAC9B,yBAAmB,QAAQ,MAAM;AACjC,yBAAmB,UAAU;AAAA,IAC/B;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,QAAM,sBAAsB,YAAY,YAAY;AAClD,UAAMF,YAAW,YAAY;AAE7B,mBAAe,EAAE,UAAAA,UAAS,GAAG,QAAQ;AAAA,EACvC,GAAG,CAAC,cAAc,CAAC;AAEnB,QAAM,cAAc;AAAA,IAClB,CACEA,cAKG;AACH,UAAI,OAAOA,cAAa,YAAY;AAClC,QAAAA,YAAWA,UAAS,YAAY,OAAO;AAAA,MACzC;AAEA,aAAOA,WAAU,KAAK;AACtB,kBAAY,UAAUA;AAAA,IACxB;AAAA,IACA,CAAC,MAAM;AAAA,EACT;AAGA,QAAM,CAAC,OAAO,QAAQ,IAAIF,UAAS,YAAY;AAE/C,QAAM,eAAe;AAAA,IACnB,OACE,OACA,UAEI,CAAC,GACL,aACG;AA/ZT;AAgaM,2CAAO,mBAAP;AAEA,YAAM,YAAY,MAAM,QAAQ,mCAAS,KAAK,IAC1C,QAAQ,QACR,MAAM,6BAA6B,mCAAS,KAAK;AAErD,UAAI,CAAC,SAAS,UAAU,WAAW;AAAG;AAEtC,UAAI,UAAU;AACZ,yBAAiB,UAAU;AAAA,UACzB,GAAG,iBAAiB;AAAA,UACpB,GAAG;AAAA,QACL;AAAA,MACF;AAEA,qBAAe;AAAA,QACb,UAAU,YAAY,QAAQ,OAAO;AAAA,UACnC,IAAI,WAAW;AAAA,UACf,MAAM;AAAA,UACN,UAAU;AAAA,UACV,OAAO,CAAC,GAAG,WAAW,EAAE,MAAM,QAAQ,MAAM,MAAM,CAAC;AAAA,QACrD,CAAC;AAAA,QACD,SAAS,QAAQ;AAAA,QACjB,MAAM,QAAQ;AAAA,MAChB,CAAC;AAED,eAAS,EAAE;AAAA,IACb;AAAA,IACA,CAAC,OAAO,YAAY,cAAc;AAAA,EACpC;AAEA,QAAM,oBAAoB,CAAC,MAAW;AACpC,aAAS,EAAE,OAAO,KAAK;AAAA,EACzB;AAEA,QAAM,gBAAgB;AAAA,IACpB,CAAC,EAAE,YAAY,OAAO,MAA+C;AACnE,YAAM,kBAAkB,YAAY;AAEpC,2BAAqB;AAAA,QACnB,UAAU;AAAA,QACV;AAAA,QACA,YAAY;AAAA,MACd,CAAC;AAGD;AAAA,QACE;AAAA,UACE,GAAG,gBAAgB,MAAM,GAAG,gBAAgB,SAAS,CAAC;AAAA,UACtD;AAAA,YACE,GAAG,gBAAgB,gBAAgB,SAAS,CAAC;AAAA;AAAA;AAAA,YAG7C,YAAY,WAAW;AAAA,UACzB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAGA,UAAI,WAAW,eAAe,WAAW,aAAa;AACpD;AAAA,MACF;AAGA,YAAM,cAAc,gBAAgB,gBAAgB,SAAS,CAAC;AAC9D,UAAI,yCAAyC,WAAW,GAAG;AACzD,uBAAe,EAAE,UAAU,gBAAgB,CAAC;AAAA,MAC9C;AAAA,IACF;AAAA,IACA,CAAC,QAAQ,QAAQ,gBAAgB,UAAU;AAAA,EAC7C;AAEA,SAAO;AAAA,IACL,UAAU,8BAAY,CAAC;AAAA,IACvB,IAAI;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AGzfA;AAAA,EAGE;AAAA,OACK;AACP,SAAS,eAAAK,cAAa,aAAAC,YAAW,OAAO,UAAAC,SAAQ,YAAAC,iBAAgB;AAChE,OAAOC,aAAY;AAyDZ,SAAS,cAAc;AAAA,EAC5B,MAAM;AAAA,EACN;AAAA,EACA,oBAAoB;AAAA,EACpB,eAAe;AAAA,EACf;AAAA,EACA;AAAA,EACA;AAAA,EACA,iBAAiB;AAAA,EACjB,OAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA,uBAAuB;AACzB,IAMI,CAAC,GAAyB;AAE5B,QAAM,SAAS,MAAM;AACrB,QAAM,eAAe,MAAM;AAG3B,QAAM,EAAE,MAAM,OAAO,IAAIC,QAAe,CAAC,KAAK,YAAY,GAAG,MAAM;AAAA,IACjE,cAAc;AAAA,EAChB,CAAC;AAED,QAAM,EAAE,MAAM,YAAY,OAAO,QAAQ,cAAc,IAAIA;AAAA,IACzD,CAAC,cAAc,SAAS;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,CAAC,OAAO,QAAQ,IAAIC,UAA4B,MAAS;AAC/D,QAAM,aAAa;AAGnB,QAAM,CAAC,iBAAiB,kBAAkB,IACxCA,UAAiC,IAAI;AAEvC,QAAM,mBAAmBC,QAAO;AAAA,IAC9B;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,EAAAC,WAAU,MAAM;AACd,qBAAiB,UAAU;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,GAAG,CAAC,aAAa,SAAS,IAAI,CAAC;AAE/B,QAAM,iBAAiBC;AAAA,IACrB,OAAO,QAAgB,YACrB,kBAAkB;AAAA,MAChB;AAAA,MACA;AAAA,MACA,aAAa,iBAAiB,QAAQ;AAAA,MACtC,SAAS,EAAE,GAAG,iBAAiB,QAAQ,SAAS,GAAG,mCAAS,QAAQ;AAAA,MACpE,MAAM;AAAA,QACJ,GAAG,iBAAiB,QAAQ;AAAA,QAC5B,GAAG,mCAAS;AAAA,MACd;AAAA,MACA;AAAA,MACA,OAAAL;AAAA;AAAA,MAEA,eAAe;AAAA,QACb,CAACM,gBAAuB,OAAOA,aAAY,KAAK;AAAA,QAChD;AAAA,MACF;AAAA,MACA,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,IACH;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACAN;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAOK,aAAY,MAAM;AAC7B,QAAI,iBAAiB;AACnB,sBAAgB,MAAM;AACtB,yBAAmB,IAAI;AAAA,IACzB;AAAA,EACF,GAAG,CAAC,eAAe,CAAC;AAEpB,QAAM,gBAAgBA;AAAA,IACpB,CAACC,gBAAuB;AACtB,aAAOA,aAAY,KAAK;AAAA,IAC1B;AAAA,IACA,CAAC,MAAM;AAAA,EACT;AAEA,QAAM,WAAWD;AAAA,IACf,OAAO,QAAQ,YAAY;AACzB,aAAO,eAAe,QAAQ,OAAO;AAAA,IACvC;AAAA,IACA,CAAC,cAAc;AAAA,EACjB;AAEA,QAAM,CAAC,OAAO,QAAQ,IAAIH,UAAS,YAAY;AAE/C,QAAM,eAAeG;AAAA,IACnB,CAAC,UAA4C;AArLjD;AAsLM,2CAAO,mBAAP;AACA,aAAO,QAAQ,SAAS,KAAK,IAAI;AAAA,IACnC;AAAA,IACA,CAAC,OAAO,QAAQ;AAAA,EAClB;AAEA,QAAM,oBAAoBA;AAAA,IACxB,CAAC,MAAW;AACV,eAAS,EAAE,OAAO,KAAK;AAAA,IACzB;AAAA,IACA,CAAC,QAAQ;AAAA,EACX;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AC/MA;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EAEA,mBAAAE;AAAA,EACA;AAAA,OAEK;AACP,SAAS,eAAAC,cAAa,SAAAC,QAAO,UAAAC,SAAQ,YAAAC,iBAAgB;AACrD,OAAOC,aAAY;AAInB,IAAM,mBAAmB,MAAM;AA4F/B,SAAS,UAA+B;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EACA;AAAA,EACA,OAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAGE;AAEA,QAAM,SAASJ,OAAM;AACrB,QAAM,eAAe,kBAAM;AAG3B,QAAM,EAAE,MAAM,OAAO,IAAIG;AAAA,IACvB,CAAC,KAAK,YAAY;AAAA,IAClB;AAAA,IACA,EAAE,cAAc,aAAa;AAAA,EAC/B;AAEA,QAAM,CAAC,OAAO,QAAQ,IAAID,UAA4B,MAAS;AAC/D,QAAM,CAAC,WAAW,YAAY,IAAIA,UAAS,KAAK;AAGhD,QAAM,qBAAqBD,QAA+B,IAAI;AAE9D,QAAM,OAAOF,aAAY,MAAM;AA5IjC;AA6II,QAAI;AACF,+BAAmB,YAAnB,mBAA4B;AAAA,IAC9B,SAAS,SAAS;AAAA,IAClB,UAAE;AACA,mBAAa,KAAK;AAClB,yBAAmB,UAAU;AAAA,IAC/B;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,QAAM,SAAS,OAAO,UAAiB;AAtJzC;AAuJI,QAAI;AACF,aAAO,MAAS;AAChB,mBAAa,IAAI;AACjB,eAAS,MAAS;AAElB,YAAM,kBAAkB,IAAI,gBAAgB;AAC5C,yBAAmB,UAAU;AAE7B,YAAM,cAAcK,UAAA,OAAAA,SAAS,iBAAiB;AAC9C,YAAM,WAAW,MAAM,YAAY,KAAK;AAAA,QACtC,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,GAAG;AAAA,QACL;AAAA,QACA;AAAA,QACA,QAAQ,gBAAgB;AAAA,QACxB,MAAM,KAAK,UAAU,KAAK;AAAA,MAC5B,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI;AAAA,WACP,WAAM,SAAS,KAAK,MAApB,YAA0B;AAAA,QAC7B;AAAA,MACF;AAEA,UAAI,SAAS,QAAQ,MAAM;AACzB,cAAM,IAAI,MAAM,6BAA6B;AAAA,MAC/C;AAEA,UAAI,kBAAkB;AACtB,UAAI,eAAgD;AAEpD,YAAM,SAAS,KAAK,YAAY,IAAI,kBAAkB,CAAC,EAAE;AAAA,QACvD,IAAI,eAAuB;AAAA,UACzB,MAAM,MAAM,OAAO;AACjB,+BAAmB;AAEnB,kBAAM,EAAE,MAAM,IAAI,MAAM,iBAAiB,eAAe;AACxD,kBAAM,gBAAgB;AAEtB,gBAAI,CAACN,iBAAgB,cAAc,aAAa,GAAG;AACjD,6BAAe;AAEf,qBAAO,aAAa;AAAA,YACtB;AAAA,UACF;AAAA,UAEA,MAAM,QAAQ;AACZ,yBAAa,KAAK;AAClB,+BAAmB,UAAU;AAE7B,gBAAI,YAAY,MAAM;AACpB,oBAAM,mBAAmB,MAAM,kBAAkB;AAAA,gBAC/C,OAAO;AAAA,gBACP,QAAQ,SAAS,MAAM;AAAA,cACzB,CAAC;AAED;AAAA,gBACE,iBAAiB,UACb,EAAE,QAAQ,iBAAiB,OAAO,OAAO,OAAU,IACnD,EAAE,QAAQ,QAAW,OAAO,iBAAiB,MAAM;AAAA,cACzD;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,SAASO,QAAO;AACd,UAAI,aAAaA,MAAK,GAAG;AACvB;AAAA,MACF;AAEA,UAAI,WAAWA,kBAAiB,OAAO;AACrC,gBAAQA,MAAK;AAAA,MACf;AAEA,mBAAa,KAAK;AAClB,eAASA,kBAAiB,QAAQA,SAAQ,IAAI,MAAM,OAAOA,MAAK,CAAC,CAAC;AAAA,IACpE;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,IAAM,yBAAyB;","names":["useEffect","useState","fetch","useState","useEffect","messages","headers","body","useCallback","useEffect","useRef","useState","useSWR","fetch","useSWR","useState","useRef","useEffect","useCallback","completion","isDeepEqualData","useCallback","useId","useRef","useState","useSWR","fetch","error"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ai-sdk/react",
|
|
3
|
-
"version": "2.0.0-canary.
|
|
3
|
+
"version": "2.0.0-canary.21",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -21,7 +21,7 @@
|
|
|
21
21
|
"dependencies": {
|
|
22
22
|
"swr": "^2.2.5",
|
|
23
23
|
"throttleit": "2.1.0",
|
|
24
|
-
"ai": "5.0.0-canary.
|
|
24
|
+
"ai": "5.0.0-canary.22",
|
|
25
25
|
"@ai-sdk/provider-utils": "3.0.0-canary.17"
|
|
26
26
|
},
|
|
27
27
|
"devDependencies": {
|