ai 2.0.1 → 2.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +48 -8
- package/dist/index.mjs +234 -22
- package/package.json +20 -8
- package/react/dist/index.d.ts +206 -3
- package/react/dist/index.mjs +384 -7
- package/svelte/dist/index.d.ts +194 -4
- package/svelte/dist/index.mjs +779 -7
- package/{react/dist/types-f862f74a.d.ts → vue/dist/index.d.ts} +72 -1
- package/vue/dist/index.js +384 -0
- package/vue/dist/index.mjs +349 -0
- package/dist/ai-stream.d.ts +0 -18
- package/dist/ai-stream.js +0 -132
- package/dist/ai-stream.mjs +0 -13
- package/dist/anthropic-stream.d.ts +0 -5
- package/dist/anthropic-stream.js +0 -133
- package/dist/anthropic-stream.mjs +0 -8
- package/dist/chunk-265FSSO4.mjs +0 -91
- package/dist/chunk-2L3ZO4UM.mjs +0 -45
- package/dist/chunk-GT4HKF2X.mjs +0 -33
- package/dist/chunk-JGDC3BXD.mjs +0 -22
- package/dist/chunk-NK2CVBLI.mjs +0 -38
- package/dist/chunk-PEYAHBDF.mjs +0 -43
- package/dist/chunk-TJMME6CL.mjs +0 -24
- package/dist/huggingface-stream.d.ts +0 -5
- package/dist/huggingface-stream.js +0 -121
- package/dist/huggingface-stream.mjs +0 -8
- package/dist/index.test.d.ts +0 -2
- package/dist/index.test.js +0 -12
- package/dist/index.test.mjs +0 -10
- package/dist/langchain-stream.d.ts +0 -12
- package/dist/langchain-stream.js +0 -102
- package/dist/langchain-stream.mjs +0 -8
- package/dist/openai-stream.d.ts +0 -5
- package/dist/openai-stream.js +0 -144
- package/dist/openai-stream.mjs +0 -8
- package/dist/streaming-text-response.d.ts +0 -17
- package/dist/streaming-text-response.js +0 -75
- package/dist/streaming-text-response.mjs +0 -9
- package/react/dist/chunk-5PP6W52J.mjs +0 -202
- package/react/dist/chunk-6EH3SWMP.mjs +0 -55
- package/react/dist/chunk-PW6HSU2N.mjs +0 -154
- package/react/dist/use-chat.d.ts +0 -42
- package/react/dist/use-chat.js +0 -276
- package/react/dist/use-chat.mjs +0 -8
- package/react/dist/use-completion.d.ts +0 -47
- package/react/dist/use-completion.js +0 -229
- package/react/dist/use-completion.mjs +0 -8
- package/svelte/dist/chunk-6USBQIV6.mjs +0 -177
- package/svelte/dist/chunk-BQ64GHZ3.mjs +0 -136
- package/svelte/dist/chunk-CENOSGDG.mjs +0 -493
- package/svelte/dist/types-f862f74a.d.ts +0 -123
- package/svelte/dist/use-chat.d.ts +0 -39
- package/svelte/dist/use-chat.js +0 -680
- package/svelte/dist/use-chat.mjs +0 -7
- package/svelte/dist/use-completion.d.ts +0 -38
- package/svelte/dist/use-completion.js +0 -640
- package/svelte/dist/use-completion.mjs +0 -7
package/react/dist/index.mjs
CHANGED
@@ -1,11 +1,388 @@
|
|
1
1
|
'use client'
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
2
|
+
var __defProp = Object.defineProperty;
|
3
|
+
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
|
4
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
5
|
+
var __propIsEnum = Object.prototype.propertyIsEnumerable;
|
6
|
+
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
7
|
+
var __spreadValues = (a, b) => {
|
8
|
+
for (var prop in b || (b = {}))
|
9
|
+
if (__hasOwnProp.call(b, prop))
|
10
|
+
__defNormalProp(a, prop, b[prop]);
|
11
|
+
if (__getOwnPropSymbols)
|
12
|
+
for (var prop of __getOwnPropSymbols(b)) {
|
13
|
+
if (__propIsEnum.call(b, prop))
|
14
|
+
__defNormalProp(a, prop, b[prop]);
|
15
|
+
}
|
16
|
+
return a;
|
17
|
+
};
|
18
|
+
var __async = (__this, __arguments, generator) => {
|
19
|
+
return new Promise((resolve, reject) => {
|
20
|
+
var fulfilled = (value) => {
|
21
|
+
try {
|
22
|
+
step(generator.next(value));
|
23
|
+
} catch (e) {
|
24
|
+
reject(e);
|
25
|
+
}
|
26
|
+
};
|
27
|
+
var rejected = (value) => {
|
28
|
+
try {
|
29
|
+
step(generator.throw(value));
|
30
|
+
} catch (e) {
|
31
|
+
reject(e);
|
32
|
+
}
|
33
|
+
};
|
34
|
+
var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
|
35
|
+
step((generator = generator.apply(__this, __arguments)).next());
|
36
|
+
});
|
37
|
+
};
|
38
|
+
|
39
|
+
// react/use-chat.ts
|
40
|
+
import { useCallback, useId, useRef, useEffect, useState } from "react";
|
41
|
+
import useSWRMutation from "swr/mutation";
|
42
|
+
import useSWR from "swr";
|
43
|
+
|
44
|
+
// shared/utils.ts
|
45
|
+
import { customAlphabet } from "nanoid";
|
46
|
+
var nanoid = customAlphabet(
|
47
|
+
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
48
|
+
7
|
49
|
+
);
|
50
|
+
var decoder = new TextDecoder();
|
51
|
+
function decodeAIStreamChunk(chunk) {
|
52
|
+
return decoder.decode(chunk);
|
53
|
+
}
|
54
|
+
|
55
|
+
// react/use-chat.ts
|
56
|
+
function useChat({
|
57
|
+
api = "/api/chat",
|
58
|
+
id,
|
59
|
+
initialMessages = [],
|
60
|
+
initialInput = "",
|
61
|
+
sendExtraMessageFields,
|
62
|
+
onResponse,
|
63
|
+
onFinish,
|
64
|
+
onError,
|
65
|
+
headers,
|
66
|
+
body
|
67
|
+
} = {}) {
|
68
|
+
const hookId = useId();
|
69
|
+
const chatId = id || hookId;
|
70
|
+
const { data, mutate } = useSWR([api, chatId], null, {
|
71
|
+
fallbackData: initialMessages
|
72
|
+
});
|
73
|
+
const messages = data;
|
74
|
+
const messagesRef = useRef(messages);
|
75
|
+
useEffect(() => {
|
76
|
+
messagesRef.current = messages;
|
77
|
+
}, [messages]);
|
78
|
+
const abortControllerRef = useRef(null);
|
79
|
+
const extraMetadataRef = useRef({
|
80
|
+
headers,
|
81
|
+
body
|
82
|
+
});
|
83
|
+
useEffect(() => {
|
84
|
+
extraMetadataRef.current = {
|
85
|
+
headers,
|
86
|
+
body
|
87
|
+
};
|
88
|
+
}, [headers, body]);
|
89
|
+
const { error, trigger, isMutating } = useSWRMutation(
|
90
|
+
[api, chatId],
|
91
|
+
(_0, _1) => __async(this, [_0, _1], function* (_, { arg: messagesSnapshot }) {
|
92
|
+
try {
|
93
|
+
const abortController = new AbortController();
|
94
|
+
abortControllerRef.current = abortController;
|
95
|
+
const previousMessages = messagesRef.current;
|
96
|
+
mutate(messagesSnapshot, false);
|
97
|
+
const res = yield fetch(api, {
|
98
|
+
method: "POST",
|
99
|
+
body: JSON.stringify(__spreadValues({
|
100
|
+
messages: sendExtraMessageFields ? messagesSnapshot : messagesSnapshot.map(({ role, content }) => ({
|
101
|
+
role,
|
102
|
+
content
|
103
|
+
}))
|
104
|
+
}, extraMetadataRef.current.body)),
|
105
|
+
headers: extraMetadataRef.current.headers || {},
|
106
|
+
signal: abortController.signal
|
107
|
+
}).catch((err) => {
|
108
|
+
mutate(previousMessages, false);
|
109
|
+
throw err;
|
110
|
+
});
|
111
|
+
if (onResponse) {
|
112
|
+
try {
|
113
|
+
yield onResponse(res);
|
114
|
+
} catch (err) {
|
115
|
+
throw err;
|
116
|
+
}
|
117
|
+
}
|
118
|
+
if (!res.ok) {
|
119
|
+
mutate(previousMessages, false);
|
120
|
+
throw new Error(
|
121
|
+
(yield res.text()) || "Failed to fetch the chat response."
|
122
|
+
);
|
123
|
+
}
|
124
|
+
if (!res.body) {
|
125
|
+
throw new Error("The response body is empty.");
|
126
|
+
}
|
127
|
+
let result = "";
|
128
|
+
const createdAt = /* @__PURE__ */ new Date();
|
129
|
+
const replyId = nanoid();
|
130
|
+
const reader = res.body.getReader();
|
131
|
+
while (true) {
|
132
|
+
const { done, value } = yield reader.read();
|
133
|
+
if (done) {
|
134
|
+
break;
|
135
|
+
}
|
136
|
+
result += decodeAIStreamChunk(value);
|
137
|
+
mutate(
|
138
|
+
[
|
139
|
+
...messagesSnapshot,
|
140
|
+
{
|
141
|
+
id: replyId,
|
142
|
+
createdAt,
|
143
|
+
content: result,
|
144
|
+
role: "assistant"
|
145
|
+
}
|
146
|
+
],
|
147
|
+
false
|
148
|
+
);
|
149
|
+
if (abortControllerRef.current === null) {
|
150
|
+
reader.cancel();
|
151
|
+
break;
|
152
|
+
}
|
153
|
+
}
|
154
|
+
if (onFinish) {
|
155
|
+
onFinish({
|
156
|
+
id: replyId,
|
157
|
+
createdAt,
|
158
|
+
content: result,
|
159
|
+
role: "assistant"
|
160
|
+
});
|
161
|
+
}
|
162
|
+
abortControllerRef.current = null;
|
163
|
+
return result;
|
164
|
+
} catch (err) {
|
165
|
+
if (err.name === "AbortError") {
|
166
|
+
abortControllerRef.current = null;
|
167
|
+
return null;
|
168
|
+
}
|
169
|
+
if (onError && err instanceof Error) {
|
170
|
+
onError(err);
|
171
|
+
}
|
172
|
+
throw err;
|
173
|
+
}
|
174
|
+
}),
|
175
|
+
{
|
176
|
+
populateCache: false,
|
177
|
+
revalidate: false
|
178
|
+
}
|
179
|
+
);
|
180
|
+
const append = useCallback(
|
181
|
+
(message) => __async(this, null, function* () {
|
182
|
+
if (!message.id) {
|
183
|
+
message.id = nanoid();
|
184
|
+
}
|
185
|
+
return trigger(messagesRef.current.concat(message));
|
186
|
+
}),
|
187
|
+
[trigger]
|
188
|
+
);
|
189
|
+
const reload = useCallback(() => __async(this, null, function* () {
|
190
|
+
if (messagesRef.current.length === 0)
|
191
|
+
return null;
|
192
|
+
const lastMessage = messagesRef.current[messagesRef.current.length - 1];
|
193
|
+
if (lastMessage.role === "assistant") {
|
194
|
+
return trigger(messagesRef.current.slice(0, -1));
|
195
|
+
}
|
196
|
+
return trigger(messagesRef.current);
|
197
|
+
}), [trigger]);
|
198
|
+
const stop = useCallback(() => {
|
199
|
+
if (abortControllerRef.current) {
|
200
|
+
abortControllerRef.current.abort();
|
201
|
+
abortControllerRef.current = null;
|
202
|
+
}
|
203
|
+
}, []);
|
204
|
+
const setMessages = useCallback(
|
205
|
+
(messages2) => {
|
206
|
+
mutate(messages2, false);
|
207
|
+
messagesRef.current = messages2;
|
208
|
+
},
|
209
|
+
[mutate]
|
210
|
+
);
|
211
|
+
const [input, setInput] = useState(initialInput);
|
212
|
+
const handleSubmit = useCallback(
|
213
|
+
(e) => {
|
214
|
+
e.preventDefault();
|
215
|
+
if (!input)
|
216
|
+
return;
|
217
|
+
append({
|
218
|
+
content: input,
|
219
|
+
role: "user"
|
220
|
+
});
|
221
|
+
setInput("");
|
222
|
+
},
|
223
|
+
[input, append]
|
224
|
+
);
|
225
|
+
const handleInputChange = (e) => {
|
226
|
+
setInput(e.target.value);
|
227
|
+
};
|
228
|
+
return {
|
229
|
+
messages,
|
230
|
+
error,
|
231
|
+
append,
|
232
|
+
reload,
|
233
|
+
stop,
|
234
|
+
setMessages,
|
235
|
+
input,
|
236
|
+
setInput,
|
237
|
+
handleInputChange,
|
238
|
+
handleSubmit,
|
239
|
+
isLoading: isMutating
|
240
|
+
};
|
241
|
+
}
|
242
|
+
|
243
|
+
// react/use-completion.ts
|
244
|
+
import { useCallback as useCallback2, useEffect as useEffect2, useId as useId2, useRef as useRef2, useState as useState2 } from "react";
|
245
|
+
import useSWRMutation2 from "swr/mutation";
|
246
|
+
import useSWR2 from "swr";
|
247
|
+
function useCompletion({
|
248
|
+
api = "/api/completion",
|
249
|
+
id,
|
250
|
+
initialCompletion = "",
|
251
|
+
initialInput = "",
|
252
|
+
headers,
|
253
|
+
body,
|
254
|
+
onResponse,
|
255
|
+
onFinish,
|
256
|
+
onError
|
257
|
+
} = {}) {
|
258
|
+
const hookId = useId2();
|
259
|
+
const completionId = id || hookId;
|
260
|
+
const { data, mutate } = useSWR2([api, completionId], null, {
|
261
|
+
fallbackData: initialCompletion
|
262
|
+
});
|
263
|
+
const completion = data;
|
264
|
+
const [abortController, setAbortController] = useState2(null);
|
265
|
+
const extraMetadataRef = useRef2({
|
266
|
+
headers,
|
267
|
+
body
|
268
|
+
});
|
269
|
+
useEffect2(() => {
|
270
|
+
extraMetadataRef.current = {
|
271
|
+
headers,
|
272
|
+
body
|
273
|
+
};
|
274
|
+
}, [headers, body]);
|
275
|
+
const { error, trigger, isMutating } = useSWRMutation2(
|
276
|
+
[api, completionId],
|
277
|
+
(_0, _1) => __async(this, [_0, _1], function* (_, { arg: prompt }) {
|
278
|
+
try {
|
279
|
+
const abortController2 = new AbortController();
|
280
|
+
setAbortController(abortController2);
|
281
|
+
mutate("", false);
|
282
|
+
const res = yield fetch(api, {
|
283
|
+
method: "POST",
|
284
|
+
body: JSON.stringify(__spreadValues({
|
285
|
+
prompt
|
286
|
+
}, extraMetadataRef.current.body)),
|
287
|
+
headers: extraMetadataRef.current.headers || {},
|
288
|
+
signal: abortController2.signal
|
289
|
+
}).catch((err) => {
|
290
|
+
throw err;
|
291
|
+
});
|
292
|
+
if (onResponse) {
|
293
|
+
try {
|
294
|
+
yield onResponse(res);
|
295
|
+
} catch (err) {
|
296
|
+
throw err;
|
297
|
+
}
|
298
|
+
}
|
299
|
+
if (!res.ok) {
|
300
|
+
throw new Error(
|
301
|
+
(yield res.text()) || "Failed to fetch the chat response."
|
302
|
+
);
|
303
|
+
}
|
304
|
+
if (!res.body) {
|
305
|
+
throw new Error("The response body is empty.");
|
306
|
+
}
|
307
|
+
let result = "";
|
308
|
+
const reader = res.body.getReader();
|
309
|
+
while (true) {
|
310
|
+
const { done, value } = yield reader.read();
|
311
|
+
if (done) {
|
312
|
+
break;
|
313
|
+
}
|
314
|
+
result += decodeAIStreamChunk(value);
|
315
|
+
mutate(result, false);
|
316
|
+
if (abortController2 === null) {
|
317
|
+
reader.cancel();
|
318
|
+
break;
|
319
|
+
}
|
320
|
+
}
|
321
|
+
if (onFinish) {
|
322
|
+
onFinish(prompt, result);
|
323
|
+
}
|
324
|
+
setAbortController(null);
|
325
|
+
return result;
|
326
|
+
} catch (err) {
|
327
|
+
if (err.name === "AbortError") {
|
328
|
+
setAbortController(null);
|
329
|
+
return null;
|
330
|
+
}
|
331
|
+
if (onError && err instanceof Error) {
|
332
|
+
onError(err);
|
333
|
+
}
|
334
|
+
throw err;
|
335
|
+
}
|
336
|
+
}),
|
337
|
+
{
|
338
|
+
populateCache: false,
|
339
|
+
revalidate: false
|
340
|
+
}
|
341
|
+
);
|
342
|
+
const stop = useCallback2(() => {
|
343
|
+
if (abortController) {
|
344
|
+
abortController.abort();
|
345
|
+
setAbortController(null);
|
346
|
+
}
|
347
|
+
}, [abortController]);
|
348
|
+
const setCompletion = useCallback2(
|
349
|
+
(completion2) => {
|
350
|
+
mutate(completion2, false);
|
351
|
+
},
|
352
|
+
[mutate]
|
353
|
+
);
|
354
|
+
const [input, setInput] = useState2(initialInput);
|
355
|
+
const handleSubmit = useCallback2(
|
356
|
+
(e) => {
|
357
|
+
e.preventDefault();
|
358
|
+
if (!input)
|
359
|
+
return;
|
360
|
+
return trigger(input);
|
361
|
+
},
|
362
|
+
[input, trigger]
|
363
|
+
);
|
364
|
+
const handleInputChange = (e) => {
|
365
|
+
setInput(e.target.value);
|
366
|
+
};
|
367
|
+
const complete = useCallback2(
|
368
|
+
(prompt) => __async(this, null, function* () {
|
369
|
+
return trigger(prompt);
|
370
|
+
}),
|
371
|
+
[trigger]
|
372
|
+
);
|
373
|
+
return {
|
374
|
+
completion,
|
375
|
+
complete,
|
376
|
+
error,
|
377
|
+
setCompletion,
|
378
|
+
stop,
|
379
|
+
input,
|
380
|
+
setInput,
|
381
|
+
handleInputChange,
|
382
|
+
handleSubmit,
|
383
|
+
isLoading: isMutating
|
384
|
+
};
|
385
|
+
}
|
9
386
|
export {
|
10
387
|
useChat,
|
11
388
|
useCompletion
|
package/svelte/dist/index.d.ts
CHANGED
@@ -1,4 +1,194 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
1
|
+
import { Readable, Writable } from 'svelte/store';
|
2
|
+
|
3
|
+
/**
|
4
|
+
* Shared types between the API and UI packages.
|
5
|
+
*/
|
6
|
+
type Message = {
|
7
|
+
id: string;
|
8
|
+
createdAt?: Date;
|
9
|
+
content: string;
|
10
|
+
role: 'system' | 'user' | 'assistant';
|
11
|
+
};
|
12
|
+
type CreateMessage = {
|
13
|
+
id?: string;
|
14
|
+
createdAt?: Date;
|
15
|
+
content: string;
|
16
|
+
role: 'system' | 'user' | 'assistant';
|
17
|
+
};
|
18
|
+
type UseChatOptions = {
|
19
|
+
/**
|
20
|
+
* The API endpoint that accepts a `{ messages: Message[] }` object and returns
|
21
|
+
* a stream of tokens of the AI chat response. Defaults to `/api/chat`.
|
22
|
+
*/
|
23
|
+
api?: string;
|
24
|
+
/**
|
25
|
+
* An unique identifier for the chat. If not provided, a random one will be
|
26
|
+
* generated. When provided, the `useChat` hook with the same `id` will
|
27
|
+
* have shared states across components.
|
28
|
+
*/
|
29
|
+
id?: string;
|
30
|
+
/**
|
31
|
+
* Initial messages of the chat. Useful to load an existing chat history.
|
32
|
+
*/
|
33
|
+
initialMessages?: Message[];
|
34
|
+
/**
|
35
|
+
* Initial input of the chat.
|
36
|
+
*/
|
37
|
+
initialInput?: string;
|
38
|
+
/**
|
39
|
+
* Callback function to be called when the API response is received.
|
40
|
+
*/
|
41
|
+
onResponse?: (response: Response) => void;
|
42
|
+
/**
|
43
|
+
* Callback function to be called when the chat is finished streaming.
|
44
|
+
*/
|
45
|
+
onFinish?: (message: Message) => void;
|
46
|
+
/**
|
47
|
+
* Callback function to be called when an error is encountered.
|
48
|
+
*/
|
49
|
+
onError?: (error: Error) => void;
|
50
|
+
/**
|
51
|
+
* HTTP headers to be sent with the API request.
|
52
|
+
*/
|
53
|
+
headers?: Record<string, string> | Headers;
|
54
|
+
/**
|
55
|
+
* Extra body object to be sent with the API request.
|
56
|
+
* @example
|
57
|
+
* Send a `sessionId` to the API along with the messages.
|
58
|
+
* ```js
|
59
|
+
* useChat({
|
60
|
+
* body: {
|
61
|
+
* sessionId: '123',
|
62
|
+
* }
|
63
|
+
* })
|
64
|
+
* ```
|
65
|
+
*/
|
66
|
+
body?: object;
|
67
|
+
/**
|
68
|
+
* Whether to send extra message fields such as `message.id` and `message.createdAt` to the API.
|
69
|
+
* Defaults to `false`. When set to `true`, the API endpoint might need to
|
70
|
+
* handle the extra fields before forwarding the request to the AI service.
|
71
|
+
*/
|
72
|
+
sendExtraMessageFields?: boolean;
|
73
|
+
};
|
74
|
+
type UseCompletionOptions = {
|
75
|
+
/**
|
76
|
+
* The API endpoint that accepts a `{ prompt: string }` object and returns
|
77
|
+
* a stream of tokens of the AI completion response. Defaults to `/api/completion`.
|
78
|
+
*/
|
79
|
+
api?: string;
|
80
|
+
/**
|
81
|
+
* An unique identifier for the chat. If not provided, a random one will be
|
82
|
+
* generated. When provided, the `useChat` hook with the same `id` will
|
83
|
+
* have shared states across components.
|
84
|
+
*/
|
85
|
+
id?: string;
|
86
|
+
/**
|
87
|
+
* Initial prompt input of the completion.
|
88
|
+
*/
|
89
|
+
initialInput?: string;
|
90
|
+
/**
|
91
|
+
* Initial completion result. Useful to load an existing history.
|
92
|
+
*/
|
93
|
+
initialCompletion?: string;
|
94
|
+
/**
|
95
|
+
* Callback function to be called when the API response is received.
|
96
|
+
*/
|
97
|
+
onResponse?: (response: Response) => void;
|
98
|
+
/**
|
99
|
+
* Callback function to be called when the completion is finished streaming.
|
100
|
+
*/
|
101
|
+
onFinish?: (prompt: string, completion: string) => void;
|
102
|
+
/**
|
103
|
+
* Callback function to be called when an error is encountered.
|
104
|
+
*/
|
105
|
+
onError?: (error: Error) => void;
|
106
|
+
/**
|
107
|
+
* HTTP headers to be sent with the API request.
|
108
|
+
*/
|
109
|
+
headers?: Record<string, string> | Headers;
|
110
|
+
/**
|
111
|
+
* Extra body object to be sent with the API request.
|
112
|
+
* @example
|
113
|
+
* Send a `sessionId` to the API along with the prompt.
|
114
|
+
* ```js
|
115
|
+
* useChat({
|
116
|
+
* body: {
|
117
|
+
* sessionId: '123',
|
118
|
+
* }
|
119
|
+
* })
|
120
|
+
* ```
|
121
|
+
*/
|
122
|
+
body?: object;
|
123
|
+
};
|
124
|
+
|
125
|
+
type UseChatHelpers = {
|
126
|
+
/** Current messages in the chat */
|
127
|
+
messages: Readable<Message[]>;
|
128
|
+
/** The error object of the API request */
|
129
|
+
error: Readable<undefined | Error>;
|
130
|
+
/**
|
131
|
+
* Append a user message to the chat list. This triggers the API call to fetch
|
132
|
+
* the assistant's response.
|
133
|
+
*/
|
134
|
+
append: (message: Message | CreateMessage) => Promise<string | null | undefined>;
|
135
|
+
/**
|
136
|
+
* Reload the last AI chat response for the given chat history. If the last
|
137
|
+
* message isn't from the assistant, it will request the API to generate a
|
138
|
+
* new response.
|
139
|
+
*/
|
140
|
+
reload: () => Promise<string | null | undefined>;
|
141
|
+
/**
|
142
|
+
* Abort the current request immediately, keep the generated tokens if any.
|
143
|
+
*/
|
144
|
+
stop: () => void;
|
145
|
+
/**
|
146
|
+
* Update the `messages` state locally. This is useful when you want to
|
147
|
+
* edit the messages on the client, and then trigger the `reload` method
|
148
|
+
* manually to regenerate the AI response.
|
149
|
+
*/
|
150
|
+
setMessages: (messages: Message[]) => void;
|
151
|
+
/** The current value of the input */
|
152
|
+
input: Writable<string>;
|
153
|
+
/** Form submission handler to automattically reset input and append a user message */
|
154
|
+
handleSubmit: (e: any) => void;
|
155
|
+
/** Whether the API request is in progress */
|
156
|
+
isLoading: Writable<boolean>;
|
157
|
+
};
|
158
|
+
declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, onResponse, onFinish, onError, headers, body }?: UseChatOptions): UseChatHelpers;
|
159
|
+
|
160
|
+
type UseCompletionHelpers = {
|
161
|
+
/** The current completion result */
|
162
|
+
completion: Readable<string>;
|
163
|
+
/** The error object of the API request */
|
164
|
+
error: Readable<undefined | Error>;
|
165
|
+
/**
|
166
|
+
* Send a new prompt to the API endpoint and update the completion state.
|
167
|
+
*/
|
168
|
+
complete: (prompt: string) => Promise<string | null | undefined>;
|
169
|
+
/**
|
170
|
+
* Abort the current API request but keep the generated tokens.
|
171
|
+
*/
|
172
|
+
stop: () => void;
|
173
|
+
/**
|
174
|
+
* Update the `completion` state locally.
|
175
|
+
*/
|
176
|
+
setCompletion: (completion: string) => void;
|
177
|
+
/** The current value of the input */
|
178
|
+
input: Writable<string>;
|
179
|
+
/**
|
180
|
+
* Form submission handler to automattically reset input and append a user message
|
181
|
+
* @example
|
182
|
+
* ```jsx
|
183
|
+
* <form onSubmit={handleSubmit}>
|
184
|
+
* <input onChange={handleInputChange} value={input} />
|
185
|
+
* </form>
|
186
|
+
* ```
|
187
|
+
*/
|
188
|
+
handleSubmit: (e: any) => void;
|
189
|
+
/** Whether the API request is in progress */
|
190
|
+
isLoading: Writable<boolean>;
|
191
|
+
};
|
192
|
+
declare function useCompletion({ api, id, initialCompletion, initialInput, headers, body, onResponse, onFinish, onError }?: UseCompletionOptions): UseCompletionHelpers;
|
193
|
+
|
194
|
+
export { CreateMessage, Message, UseChatHelpers, UseChatOptions, UseCompletionHelpers, useChat, useCompletion };
|