@ai-sdk/react 2.0.0-canary.20 → 2.0.0-canary.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/dist/index.d.mts +20 -39
- package/dist/index.d.ts +20 -39
- package/dist/index.js +147 -290
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +132 -280
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.js
CHANGED
|
@@ -37,299 +37,152 @@ __export(src_exports, {
|
|
|
37
37
|
module.exports = __toCommonJS(src_exports);
|
|
38
38
|
|
|
39
39
|
// src/use-chat.ts
|
|
40
|
-
var import_ai2 = require("ai");
|
|
41
|
-
var import_react2 = require("react");
|
|
42
|
-
var import_swr = __toESM(require("swr"));
|
|
43
|
-
|
|
44
|
-
// src/throttle.ts
|
|
45
|
-
var import_throttleit = __toESM(require("throttleit"));
|
|
46
|
-
function throttle(fn, waitMs) {
|
|
47
|
-
return waitMs != null ? (0, import_throttleit.default)(fn, waitMs) : fn;
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
// src/util/use-stable-value.ts
|
|
51
40
|
var import_ai = require("ai");
|
|
52
41
|
var import_react = require("react");
|
|
53
|
-
function useStableValue(latestValue) {
|
|
54
|
-
const [value, setValue] = (0, import_react.useState)(latestValue);
|
|
55
|
-
(0, import_react.useEffect)(() => {
|
|
56
|
-
if (!(0, import_ai.isDeepEqualData)(latestValue, value)) {
|
|
57
|
-
setValue(latestValue);
|
|
58
|
-
}
|
|
59
|
-
}, [latestValue, value]);
|
|
60
|
-
return value;
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
// src/use-chat.ts
|
|
64
42
|
function useChat({
|
|
65
|
-
api = "/api/chat",
|
|
66
43
|
id,
|
|
67
|
-
initialMessages,
|
|
68
44
|
initialInput = "",
|
|
69
45
|
onToolCall,
|
|
70
|
-
experimental_prepareRequestBody,
|
|
71
|
-
maxSteps = 1,
|
|
72
|
-
streamProtocol = "data",
|
|
73
|
-
onResponse,
|
|
74
46
|
onFinish,
|
|
75
47
|
onError,
|
|
76
|
-
|
|
77
|
-
headers,
|
|
78
|
-
body,
|
|
79
|
-
generateId = import_ai2.generateId,
|
|
80
|
-
fetch: fetch2,
|
|
48
|
+
generateId = import_ai.generateId,
|
|
81
49
|
experimental_throttle: throttleWaitMs,
|
|
82
|
-
|
|
50
|
+
chatStore: chatStoreArg
|
|
83
51
|
} = {}) {
|
|
84
|
-
const [hookId] = (0,
|
|
52
|
+
const [hookId] = (0, import_react.useState)(generateId);
|
|
85
53
|
const chatId = id != null ? id : hookId;
|
|
86
|
-
const
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
54
|
+
const chatStore = (0, import_react.useRef)(
|
|
55
|
+
chatStoreArg != null ? chatStoreArg : (0, import_ai.defaultChatStore)({
|
|
56
|
+
api: "/api/chat",
|
|
57
|
+
generateId
|
|
58
|
+
})
|
|
91
59
|
);
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
headers,
|
|
107
|
-
body
|
|
108
|
-
});
|
|
109
|
-
(0, import_react2.useEffect)(() => {
|
|
110
|
-
extraMetadataRef.current = {
|
|
111
|
-
credentials,
|
|
112
|
-
headers,
|
|
113
|
-
body
|
|
114
|
-
};
|
|
115
|
-
}, [credentials, headers, body]);
|
|
116
|
-
const triggerRequest = (0, import_react2.useCallback)(
|
|
117
|
-
async (chatRequest, requestType = "generate") => {
|
|
118
|
-
var _a;
|
|
119
|
-
mutateStatus("submitted");
|
|
120
|
-
setError(void 0);
|
|
121
|
-
const chatMessages = chatRequest.messages;
|
|
122
|
-
const messageCount = chatMessages.length;
|
|
123
|
-
const maxStep = (0, import_ai2.extractMaxToolInvocationStep)(
|
|
124
|
-
(0, import_ai2.getToolInvocations)(chatMessages[chatMessages.length - 1])
|
|
125
|
-
);
|
|
126
|
-
try {
|
|
127
|
-
const abortController = new AbortController();
|
|
128
|
-
abortControllerRef.current = abortController;
|
|
129
|
-
const throttledMutate = throttle(mutate, throttleWaitMs);
|
|
130
|
-
throttledMutate(chatMessages, false);
|
|
131
|
-
await (0, import_ai2.callChatApi)({
|
|
132
|
-
api,
|
|
133
|
-
body: (_a = experimental_prepareRequestBody == null ? void 0 : experimental_prepareRequestBody({
|
|
134
|
-
id: chatId,
|
|
135
|
-
messages: chatMessages,
|
|
136
|
-
requestData: chatRequest.data,
|
|
137
|
-
requestBody: chatRequest.body
|
|
138
|
-
})) != null ? _a : {
|
|
139
|
-
id: chatId,
|
|
140
|
-
messages: chatMessages,
|
|
141
|
-
data: chatRequest.data,
|
|
142
|
-
...extraMetadataRef.current.body,
|
|
143
|
-
...chatRequest.body
|
|
144
|
-
},
|
|
145
|
-
streamProtocol,
|
|
146
|
-
credentials: extraMetadataRef.current.credentials,
|
|
147
|
-
headers: {
|
|
148
|
-
...extraMetadataRef.current.headers,
|
|
149
|
-
...chatRequest.headers
|
|
150
|
-
},
|
|
151
|
-
abortController: () => abortControllerRef.current,
|
|
152
|
-
onResponse,
|
|
153
|
-
onUpdate({ message }) {
|
|
154
|
-
mutateStatus("streaming");
|
|
155
|
-
const replaceLastMessage = message.id === chatMessages[chatMessages.length - 1].id;
|
|
156
|
-
throttledMutate(
|
|
157
|
-
[
|
|
158
|
-
...replaceLastMessage ? chatMessages.slice(0, chatMessages.length - 1) : chatMessages,
|
|
159
|
-
message
|
|
160
|
-
],
|
|
161
|
-
false
|
|
162
|
-
);
|
|
163
|
-
},
|
|
164
|
-
onToolCall,
|
|
165
|
-
onFinish,
|
|
166
|
-
generateId,
|
|
167
|
-
fetch: fetch2,
|
|
168
|
-
lastMessage: chatMessages[chatMessages.length - 1],
|
|
169
|
-
requestType,
|
|
170
|
-
messageMetadataSchema
|
|
171
|
-
});
|
|
172
|
-
abortControllerRef.current = null;
|
|
173
|
-
mutateStatus("ready");
|
|
174
|
-
} catch (err) {
|
|
175
|
-
if (err.name === "AbortError") {
|
|
176
|
-
abortControllerRef.current = null;
|
|
177
|
-
mutateStatus("ready");
|
|
178
|
-
return null;
|
|
179
|
-
}
|
|
180
|
-
if (onError && err instanceof Error) {
|
|
181
|
-
onError(err);
|
|
60
|
+
if (!chatStore.current.hasChat(chatId)) {
|
|
61
|
+
chatStore.current.addChat(chatId, []);
|
|
62
|
+
}
|
|
63
|
+
const subscribe = (0, import_react.useCallback)(
|
|
64
|
+
({
|
|
65
|
+
onStoreChange,
|
|
66
|
+
eventType
|
|
67
|
+
}) => {
|
|
68
|
+
return chatStore.current.subscribe({
|
|
69
|
+
onChatChanged: (event) => {
|
|
70
|
+
if (event.chatId !== chatId || event.type !== eventType) {
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
onStoreChange();
|
|
182
74
|
}
|
|
183
|
-
|
|
184
|
-
mutateStatus("error");
|
|
185
|
-
}
|
|
186
|
-
const messages2 = messagesRef.current;
|
|
187
|
-
if ((0, import_ai2.shouldResubmitMessages)({
|
|
188
|
-
originalMaxToolInvocationStep: maxStep,
|
|
189
|
-
originalMessageCount: messageCount,
|
|
190
|
-
maxSteps,
|
|
191
|
-
messages: messages2
|
|
192
|
-
})) {
|
|
193
|
-
await triggerRequest({ messages: messages2 });
|
|
194
|
-
}
|
|
75
|
+
});
|
|
195
76
|
},
|
|
196
|
-
[
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
77
|
+
[chatStore, chatId]
|
|
78
|
+
);
|
|
79
|
+
const addToolResult = (0, import_react.useCallback)(
|
|
80
|
+
(options) => chatStore.current.addToolResult({ chatId, ...options }),
|
|
81
|
+
[chatStore, chatId]
|
|
82
|
+
);
|
|
83
|
+
const stopStream = (0, import_react.useCallback)(() => {
|
|
84
|
+
chatStore.current.stopStream({ chatId });
|
|
85
|
+
}, [chatStore, chatId]);
|
|
86
|
+
const error = (0, import_react.useSyncExternalStore)(
|
|
87
|
+
(callback) => subscribe({
|
|
88
|
+
onStoreChange: callback,
|
|
89
|
+
eventType: "chat-status-changed"
|
|
90
|
+
}),
|
|
91
|
+
() => chatStore.current.getError(chatId),
|
|
92
|
+
() => chatStore.current.getError(chatId)
|
|
93
|
+
);
|
|
94
|
+
const status = (0, import_react.useSyncExternalStore)(
|
|
95
|
+
(callback) => subscribe({
|
|
96
|
+
onStoreChange: callback,
|
|
97
|
+
eventType: "chat-status-changed"
|
|
98
|
+
}),
|
|
99
|
+
() => chatStore.current.getStatus(chatId),
|
|
100
|
+
() => chatStore.current.getStatus(chatId)
|
|
101
|
+
);
|
|
102
|
+
const messages = (0, import_react.useSyncExternalStore)(
|
|
103
|
+
(callback) => {
|
|
104
|
+
return subscribe({
|
|
105
|
+
onStoreChange: callback,
|
|
106
|
+
eventType: "chat-messages-changed"
|
|
107
|
+
});
|
|
108
|
+
},
|
|
109
|
+
() => chatStore.current.getMessages(chatId),
|
|
110
|
+
() => chatStore.current.getMessages(chatId)
|
|
111
|
+
);
|
|
112
|
+
const append = (0, import_react.useCallback)(
|
|
113
|
+
(message, { headers, body } = {}) => chatStore.current.submitMessage({
|
|
114
|
+
chatId,
|
|
115
|
+
message,
|
|
116
|
+
headers,
|
|
117
|
+
body,
|
|
203
118
|
onError,
|
|
204
|
-
setError,
|
|
205
|
-
streamProtocol,
|
|
206
|
-
experimental_prepareRequestBody,
|
|
207
119
|
onToolCall,
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
120
|
+
onFinish
|
|
121
|
+
}),
|
|
122
|
+
[chatStore, chatId, onError, onToolCall, onFinish]
|
|
123
|
+
);
|
|
124
|
+
const reload = (0, import_react.useCallback)(
|
|
125
|
+
async ({ headers, body } = {}) => chatStore.current.resubmitLastUserMessage({
|
|
214
126
|
chatId,
|
|
215
|
-
|
|
216
|
-
|
|
127
|
+
headers,
|
|
128
|
+
body,
|
|
129
|
+
onError,
|
|
130
|
+
onToolCall,
|
|
131
|
+
onFinish
|
|
132
|
+
}),
|
|
133
|
+
[chatStore, chatId, onError, onToolCall, onFinish]
|
|
217
134
|
);
|
|
218
|
-
const
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
body: body2,
|
|
228
|
-
data
|
|
229
|
-
});
|
|
230
|
-
},
|
|
231
|
-
[triggerRequest, generateId]
|
|
135
|
+
const stop = (0, import_react.useCallback)(() => stopStream(), [stopStream]);
|
|
136
|
+
const experimental_resume = (0, import_react.useCallback)(
|
|
137
|
+
async () => chatStore.current.resumeStream({
|
|
138
|
+
chatId,
|
|
139
|
+
onError,
|
|
140
|
+
onToolCall,
|
|
141
|
+
onFinish
|
|
142
|
+
}),
|
|
143
|
+
[chatStore, chatId, onError, onToolCall, onFinish]
|
|
232
144
|
);
|
|
233
|
-
const
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
return null;
|
|
145
|
+
const setMessages = (0, import_react.useCallback)(
|
|
146
|
+
(messagesParam) => {
|
|
147
|
+
if (typeof messagesParam === "function") {
|
|
148
|
+
messagesParam = messagesParam(messages);
|
|
238
149
|
}
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
messages:
|
|
242
|
-
headers: headers2,
|
|
243
|
-
body: body2,
|
|
244
|
-
data
|
|
150
|
+
chatStore.current.setMessages({
|
|
151
|
+
id: chatId,
|
|
152
|
+
messages: messagesParam
|
|
245
153
|
});
|
|
246
154
|
},
|
|
247
|
-
[
|
|
155
|
+
[chatId, messages]
|
|
248
156
|
);
|
|
249
|
-
const
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
abortControllerRef.current = null;
|
|
253
|
-
}
|
|
254
|
-
}, []);
|
|
255
|
-
const experimental_resume = (0, import_react2.useCallback)(async () => {
|
|
256
|
-
const messages2 = messagesRef.current;
|
|
257
|
-
triggerRequest({ messages: messages2 }, "resume");
|
|
258
|
-
}, [triggerRequest]);
|
|
259
|
-
const setMessages = (0, import_react2.useCallback)(
|
|
260
|
-
(messages2) => {
|
|
261
|
-
if (typeof messages2 === "function") {
|
|
262
|
-
messages2 = messages2(messagesRef.current);
|
|
263
|
-
}
|
|
264
|
-
mutate(messages2, false);
|
|
265
|
-
messagesRef.current = messages2;
|
|
266
|
-
},
|
|
267
|
-
[mutate]
|
|
268
|
-
);
|
|
269
|
-
const [input, setInput] = (0, import_react2.useState)(initialInput);
|
|
270
|
-
const handleSubmit = (0, import_react2.useCallback)(
|
|
271
|
-
async (event, options = {}, metadata) => {
|
|
157
|
+
const [input, setInput] = (0, import_react.useState)(initialInput);
|
|
158
|
+
const handleSubmit = (0, import_react.useCallback)(
|
|
159
|
+
async (event, options = {}) => {
|
|
272
160
|
var _a;
|
|
273
161
|
(_a = event == null ? void 0 : event.preventDefault) == null ? void 0 : _a.call(event);
|
|
274
|
-
|
|
162
|
+
const fileParts = Array.isArray(options == null ? void 0 : options.files) ? options.files : await (0, import_ai.convertFileListToFileUIParts)(options == null ? void 0 : options.files);
|
|
163
|
+
if (!input && fileParts.length === 0)
|
|
275
164
|
return;
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
...extraMetadataRef.current,
|
|
279
|
-
...metadata
|
|
280
|
-
};
|
|
281
|
-
}
|
|
282
|
-
const fileParts = Array.isArray(options == null ? void 0 : options.files) ? options.files : await (0, import_ai2.convertFileListToFileUIParts)(options == null ? void 0 : options.files);
|
|
283
|
-
triggerRequest({
|
|
284
|
-
messages: messagesRef.current.concat({
|
|
165
|
+
append(
|
|
166
|
+
{
|
|
285
167
|
id: generateId(),
|
|
286
168
|
role: "user",
|
|
287
169
|
metadata: void 0,
|
|
288
170
|
parts: [...fileParts, { type: "text", text: input }]
|
|
289
|
-
}
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
171
|
+
},
|
|
172
|
+
{
|
|
173
|
+
headers: options.headers,
|
|
174
|
+
body: options.body
|
|
175
|
+
}
|
|
176
|
+
);
|
|
294
177
|
setInput("");
|
|
295
178
|
},
|
|
296
|
-
[input, generateId,
|
|
179
|
+
[input, generateId, append, messages]
|
|
297
180
|
);
|
|
298
181
|
const handleInputChange = (e) => {
|
|
299
182
|
setInput(e.target.value);
|
|
300
183
|
};
|
|
301
|
-
const addToolResult = (0, import_react2.useCallback)(
|
|
302
|
-
({ toolCallId, result }) => {
|
|
303
|
-
const currentMessages = messagesRef.current;
|
|
304
|
-
(0, import_ai2.updateToolCallResult)({
|
|
305
|
-
messages: currentMessages,
|
|
306
|
-
toolCallId,
|
|
307
|
-
toolResult: result
|
|
308
|
-
});
|
|
309
|
-
mutate(
|
|
310
|
-
[
|
|
311
|
-
...currentMessages.slice(0, currentMessages.length - 1),
|
|
312
|
-
{
|
|
313
|
-
...currentMessages[currentMessages.length - 1],
|
|
314
|
-
// @ts-ignore
|
|
315
|
-
// update the revisionId to trigger a re-render
|
|
316
|
-
revisionId: generateId()
|
|
317
|
-
}
|
|
318
|
-
],
|
|
319
|
-
false
|
|
320
|
-
);
|
|
321
|
-
if (status === "submitted" || status === "streaming") {
|
|
322
|
-
return;
|
|
323
|
-
}
|
|
324
|
-
const lastMessage = currentMessages[currentMessages.length - 1];
|
|
325
|
-
if ((0, import_ai2.isAssistantMessageWithCompletedToolCalls)(lastMessage)) {
|
|
326
|
-
triggerRequest({ messages: currentMessages });
|
|
327
|
-
}
|
|
328
|
-
},
|
|
329
|
-
[mutate, status, triggerRequest, generateId]
|
|
330
|
-
);
|
|
331
184
|
return {
|
|
332
|
-
messages
|
|
185
|
+
messages,
|
|
333
186
|
id: chatId,
|
|
334
187
|
setMessages,
|
|
335
188
|
error,
|
|
@@ -341,16 +194,23 @@ function useChat({
|
|
|
341
194
|
setInput,
|
|
342
195
|
handleInputChange,
|
|
343
196
|
handleSubmit,
|
|
344
|
-
isLoading: status === "submitted" || status === "streaming",
|
|
345
197
|
status,
|
|
346
198
|
addToolResult
|
|
347
199
|
};
|
|
348
200
|
}
|
|
349
201
|
|
|
350
202
|
// src/use-completion.ts
|
|
351
|
-
var
|
|
352
|
-
var
|
|
353
|
-
var
|
|
203
|
+
var import_ai2 = require("ai");
|
|
204
|
+
var import_react2 = require("react");
|
|
205
|
+
var import_swr = __toESM(require("swr"));
|
|
206
|
+
|
|
207
|
+
// src/throttle.ts
|
|
208
|
+
var import_throttleit = __toESM(require("throttleit"));
|
|
209
|
+
function throttle(fn, waitMs) {
|
|
210
|
+
return waitMs != null ? (0, import_throttleit.default)(fn, waitMs) : fn;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
// src/use-completion.ts
|
|
354
214
|
function useCompletion({
|
|
355
215
|
api = "/api/completion",
|
|
356
216
|
id,
|
|
@@ -361,37 +221,36 @@ function useCompletion({
|
|
|
361
221
|
body,
|
|
362
222
|
streamProtocol = "data",
|
|
363
223
|
fetch: fetch2,
|
|
364
|
-
onResponse,
|
|
365
224
|
onFinish,
|
|
366
225
|
onError,
|
|
367
226
|
experimental_throttle: throttleWaitMs
|
|
368
227
|
} = {}) {
|
|
369
|
-
const hookId = (0,
|
|
228
|
+
const hookId = (0, import_react2.useId)();
|
|
370
229
|
const completionId = id || hookId;
|
|
371
|
-
const { data, mutate } = (0,
|
|
230
|
+
const { data, mutate } = (0, import_swr.default)([api, completionId], null, {
|
|
372
231
|
fallbackData: initialCompletion
|
|
373
232
|
});
|
|
374
|
-
const { data: isLoading = false, mutate: mutateLoading } = (0,
|
|
233
|
+
const { data: isLoading = false, mutate: mutateLoading } = (0, import_swr.default)(
|
|
375
234
|
[completionId, "loading"],
|
|
376
235
|
null
|
|
377
236
|
);
|
|
378
|
-
const [error, setError] = (0,
|
|
237
|
+
const [error, setError] = (0, import_react2.useState)(void 0);
|
|
379
238
|
const completion = data;
|
|
380
|
-
const [abortController, setAbortController] = (0,
|
|
381
|
-
const extraMetadataRef = (0,
|
|
239
|
+
const [abortController, setAbortController] = (0, import_react2.useState)(null);
|
|
240
|
+
const extraMetadataRef = (0, import_react2.useRef)({
|
|
382
241
|
credentials,
|
|
383
242
|
headers,
|
|
384
243
|
body
|
|
385
244
|
});
|
|
386
|
-
(0,
|
|
245
|
+
(0, import_react2.useEffect)(() => {
|
|
387
246
|
extraMetadataRef.current = {
|
|
388
247
|
credentials,
|
|
389
248
|
headers,
|
|
390
249
|
body
|
|
391
250
|
};
|
|
392
251
|
}, [credentials, headers, body]);
|
|
393
|
-
const triggerRequest = (0,
|
|
394
|
-
async (prompt, options) => (0,
|
|
252
|
+
const triggerRequest = (0, import_react2.useCallback)(
|
|
253
|
+
async (prompt, options) => (0, import_ai2.callCompletionApi)({
|
|
395
254
|
api,
|
|
396
255
|
prompt,
|
|
397
256
|
credentials: extraMetadataRef.current.credentials,
|
|
@@ -410,7 +269,6 @@ function useCompletion({
|
|
|
410
269
|
setLoading: mutateLoading,
|
|
411
270
|
setError,
|
|
412
271
|
setAbortController,
|
|
413
|
-
onResponse,
|
|
414
272
|
onFinish,
|
|
415
273
|
onError
|
|
416
274
|
}),
|
|
@@ -420,7 +278,6 @@ function useCompletion({
|
|
|
420
278
|
api,
|
|
421
279
|
extraMetadataRef,
|
|
422
280
|
setAbortController,
|
|
423
|
-
onResponse,
|
|
424
281
|
onFinish,
|
|
425
282
|
onError,
|
|
426
283
|
setError,
|
|
@@ -429,26 +286,26 @@ function useCompletion({
|
|
|
429
286
|
throttleWaitMs
|
|
430
287
|
]
|
|
431
288
|
);
|
|
432
|
-
const stop = (0,
|
|
289
|
+
const stop = (0, import_react2.useCallback)(() => {
|
|
433
290
|
if (abortController) {
|
|
434
291
|
abortController.abort();
|
|
435
292
|
setAbortController(null);
|
|
436
293
|
}
|
|
437
294
|
}, [abortController]);
|
|
438
|
-
const setCompletion = (0,
|
|
295
|
+
const setCompletion = (0, import_react2.useCallback)(
|
|
439
296
|
(completion2) => {
|
|
440
297
|
mutate(completion2, false);
|
|
441
298
|
},
|
|
442
299
|
[mutate]
|
|
443
300
|
);
|
|
444
|
-
const complete = (0,
|
|
301
|
+
const complete = (0, import_react2.useCallback)(
|
|
445
302
|
async (prompt, options) => {
|
|
446
303
|
return triggerRequest(prompt, options);
|
|
447
304
|
},
|
|
448
305
|
[triggerRequest]
|
|
449
306
|
);
|
|
450
|
-
const [input, setInput] = (0,
|
|
451
|
-
const handleSubmit = (0,
|
|
307
|
+
const [input, setInput] = (0, import_react2.useState)(initialInput);
|
|
308
|
+
const handleSubmit = (0, import_react2.useCallback)(
|
|
452
309
|
(event) => {
|
|
453
310
|
var _a;
|
|
454
311
|
(_a = event == null ? void 0 : event.preventDefault) == null ? void 0 : _a.call(event);
|
|
@@ -456,7 +313,7 @@ function useCompletion({
|
|
|
456
313
|
},
|
|
457
314
|
[input, complete]
|
|
458
315
|
);
|
|
459
|
-
const handleInputChange = (0,
|
|
316
|
+
const handleInputChange = (0, import_react2.useCallback)(
|
|
460
317
|
(e) => {
|
|
461
318
|
setInput(e.target.value);
|
|
462
319
|
},
|
|
@@ -478,9 +335,9 @@ function useCompletion({
|
|
|
478
335
|
|
|
479
336
|
// src/use-object.ts
|
|
480
337
|
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
481
|
-
var
|
|
482
|
-
var
|
|
483
|
-
var
|
|
338
|
+
var import_ai3 = require("ai");
|
|
339
|
+
var import_react3 = require("react");
|
|
340
|
+
var import_swr2 = __toESM(require("swr"));
|
|
484
341
|
var getOriginalFetch = () => fetch;
|
|
485
342
|
function useObject({
|
|
486
343
|
api,
|
|
@@ -494,17 +351,17 @@ function useObject({
|
|
|
494
351
|
headers,
|
|
495
352
|
credentials
|
|
496
353
|
}) {
|
|
497
|
-
const hookId = (0,
|
|
354
|
+
const hookId = (0, import_react3.useId)();
|
|
498
355
|
const completionId = id != null ? id : hookId;
|
|
499
|
-
const { data, mutate } = (0,
|
|
356
|
+
const { data, mutate } = (0, import_swr2.default)(
|
|
500
357
|
[api, completionId],
|
|
501
358
|
null,
|
|
502
359
|
{ fallbackData: initialValue }
|
|
503
360
|
);
|
|
504
|
-
const [error, setError] = (0,
|
|
505
|
-
const [isLoading, setIsLoading] = (0,
|
|
506
|
-
const abortControllerRef = (0,
|
|
507
|
-
const stop = (0,
|
|
361
|
+
const [error, setError] = (0, import_react3.useState)(void 0);
|
|
362
|
+
const [isLoading, setIsLoading] = (0, import_react3.useState)(false);
|
|
363
|
+
const abortControllerRef = (0, import_react3.useRef)(null);
|
|
364
|
+
const stop = (0, import_react3.useCallback)(() => {
|
|
508
365
|
var _a;
|
|
509
366
|
try {
|
|
510
367
|
(_a = abortControllerRef.current) == null ? void 0 : _a.abort();
|
|
@@ -547,9 +404,9 @@ function useObject({
|
|
|
547
404
|
new WritableStream({
|
|
548
405
|
async write(chunk) {
|
|
549
406
|
accumulatedText += chunk;
|
|
550
|
-
const { value } = await (0,
|
|
407
|
+
const { value } = await (0, import_ai3.parsePartialJson)(accumulatedText);
|
|
551
408
|
const currentObject = value;
|
|
552
|
-
if (!(0,
|
|
409
|
+
if (!(0, import_ai3.isDeepEqualData)(latestObject, currentObject)) {
|
|
553
410
|
latestObject = currentObject;
|
|
554
411
|
mutate(currentObject);
|
|
555
412
|
}
|
|
@@ -560,7 +417,7 @@ function useObject({
|
|
|
560
417
|
if (onFinish != null) {
|
|
561
418
|
const validationResult = await (0, import_provider_utils.safeValidateTypes)({
|
|
562
419
|
value: latestObject,
|
|
563
|
-
schema: (0,
|
|
420
|
+
schema: (0, import_ai3.asSchema)(schema)
|
|
564
421
|
});
|
|
565
422
|
onFinish(
|
|
566
423
|
validationResult.success ? { object: validationResult.value, error: void 0 } : { object: void 0, error: validationResult.error }
|