ai 3.0.4 → 3.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,475 @@
1
+ interface FunctionCall {
2
+ /**
3
+ * The arguments to call the function with, as generated by the model in JSON
4
+ * format. Note that the model does not always generate valid JSON, and may
5
+ * hallucinate parameters not defined by your function schema. Validate the
6
+ * arguments in your code before calling your function.
7
+ */
8
+ arguments?: string;
9
+ /**
10
+ * The name of the function to call.
11
+ */
12
+ name?: string;
13
+ }
14
+ /**
15
+ * The tool calls generated by the model, such as function calls.
16
+ */
17
+ interface ToolCall {
18
+ id: string;
19
+ type: string;
20
+ function: {
21
+ name: string;
22
+ arguments: string;
23
+ };
24
+ }
25
+ /**
26
+ * Controls which (if any) function is called by the model.
27
+ * - none means the model will not call a function and instead generates a message.
28
+ * - auto means the model can pick between generating a message or calling a function.
29
+ * - Specifying a particular function via {"type: "function", "function": {"name": "my_function"}} forces the model to call that function.
30
+ * none is the default when no functions are present. auto is the default if functions are present.
31
+ */
32
+ type ToolChoice = 'none' | 'auto' | {
33
+ type: 'function';
34
+ function: {
35
+ name: string;
36
+ };
37
+ };
38
+ /**
39
+ * A list of tools the model may call. Currently, only functions are supported as a tool.
40
+ * Use this to provide a list of functions the model may generate JSON inputs for.
41
+ */
42
+ interface Tool {
43
+ type: 'function';
44
+ function: Function;
45
+ }
46
+ interface Function {
47
+ /**
48
+ * The name of the function to be called. Must be a-z, A-Z, 0-9, or contain
49
+ * underscores and dashes, with a maximum length of 64.
50
+ */
51
+ name: string;
52
+ /**
53
+ * The parameters the functions accepts, described as a JSON Schema object. See the
54
+ * [guide](/docs/guides/gpt/function-calling) for examples, and the
55
+ * [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for
56
+ * documentation about the format.
57
+ *
58
+ * To describe a function that accepts no parameters, provide the value
59
+ * `{"type": "object", "properties": {}}`.
60
+ */
61
+ parameters: Record<string, unknown>;
62
+ /**
63
+ * A description of what the function does, used by the model to choose when and
64
+ * how to call the function.
65
+ */
66
+ description?: string;
67
+ }
68
+ type IdGenerator = () => string;
69
+ /**
70
+ * Shared types between the API and UI packages.
71
+ */
72
+ interface Message {
73
+ id: string;
74
+ tool_call_id?: string;
75
+ createdAt?: Date;
76
+ content: string;
77
+ ui?: string | JSX.Element | JSX.Element[] | null | undefined;
78
+ role: 'system' | 'user' | 'assistant' | 'function' | 'data' | 'tool';
79
+ /**
80
+ * If the message has a role of `function`, the `name` field is the name of the function.
81
+ * Otherwise, the name field should not be set.
82
+ */
83
+ name?: string;
84
+ /**
85
+ * If the assistant role makes a function call, the `function_call` field
86
+ * contains the function call name and arguments. Otherwise, the field should
87
+ * not be set. (Deprecated and replaced by tool_calls.)
88
+ */
89
+ function_call?: string | FunctionCall;
90
+ data?: JSONValue;
91
+ /**
92
+ * If the assistant role makes a tool call, the `tool_calls` field contains
93
+ * the tool call name and arguments. Otherwise, the field should not be set.
94
+ */
95
+ tool_calls?: string | ToolCall[];
96
+ /**
97
+ * Additional message-specific information added on the server via StreamData
98
+ */
99
+ annotations?: JSONValue[] | undefined;
100
+ }
101
+ type CreateMessage = Omit<Message, 'id'> & {
102
+ id?: Message['id'];
103
+ };
104
+ type ChatRequest = {
105
+ messages: Message[];
106
+ options?: RequestOptions;
107
+ functions?: Array<Function>;
108
+ function_call?: FunctionCall;
109
+ data?: Record<string, string>;
110
+ tools?: Array<Tool>;
111
+ tool_choice?: ToolChoice;
112
+ };
113
+ type FunctionCallHandler = (chatMessages: Message[], functionCall: FunctionCall) => Promise<ChatRequest | void>;
114
+ type ToolCallHandler = (chatMessages: Message[], toolCalls: ToolCall[]) => Promise<ChatRequest | void>;
115
+ type RequestOptions = {
116
+ headers?: Record<string, string> | Headers;
117
+ body?: object;
118
+ };
119
+ type ChatRequestOptions = {
120
+ options?: RequestOptions;
121
+ functions?: Array<Function>;
122
+ function_call?: FunctionCall;
123
+ tools?: Array<Tool>;
124
+ tool_choice?: ToolChoice;
125
+ data?: Record<string, string>;
126
+ };
127
+ type UseChatOptions = {
128
+ /**
129
+ * The API endpoint that accepts a `{ messages: Message[] }` object and returns
130
+ * a stream of tokens of the AI chat response. Defaults to `/api/chat`.
131
+ */
132
+ api?: string;
133
+ /**
134
+ * A unique identifier for the chat. If not provided, a random one will be
135
+ * generated. When provided, the `useChat` hook with the same `id` will
136
+ * have shared states across components.
137
+ */
138
+ id?: string;
139
+ /**
140
+ * Initial messages of the chat. Useful to load an existing chat history.
141
+ */
142
+ initialMessages?: Message[];
143
+ /**
144
+ * Initial input of the chat.
145
+ */
146
+ initialInput?: string;
147
+ /**
148
+ * Callback function to be called when a function call is received.
149
+ * If the function returns a `ChatRequest` object, the request will be sent
150
+ * automatically to the API and will be used to update the chat.
151
+ */
152
+ experimental_onFunctionCall?: FunctionCallHandler;
153
+ /**
154
+ * Callback function to be called when a tool call is received.
155
+ * If the function returns a `ChatRequest` object, the request will be sent
156
+ * automatically to the API and will be used to update the chat.
157
+ */
158
+ experimental_onToolCall?: ToolCallHandler;
159
+ /**
160
+ * Callback function to be called when the API response is received.
161
+ */
162
+ onResponse?: (response: Response) => void | Promise<void>;
163
+ /**
164
+ * Callback function to be called when the chat is finished streaming.
165
+ */
166
+ onFinish?: (message: Message) => void;
167
+ /**
168
+ * Callback function to be called when an error is encountered.
169
+ */
170
+ onError?: (error: Error) => void;
171
+ /**
172
+ * A way to provide a function that is going to be used for ids for messages.
173
+ * If not provided nanoid is used by default.
174
+ */
175
+ generateId?: IdGenerator;
176
+ /**
177
+ * The credentials mode to be used for the fetch request.
178
+ * Possible values are: 'omit', 'same-origin', 'include'.
179
+ * Defaults to 'same-origin'.
180
+ */
181
+ credentials?: RequestCredentials;
182
+ /**
183
+ * HTTP headers to be sent with the API request.
184
+ */
185
+ headers?: Record<string, string> | Headers;
186
+ /**
187
+ * Extra body object to be sent with the API request.
188
+ * @example
189
+ * Send a `sessionId` to the API along with the messages.
190
+ * ```js
191
+ * useChat({
192
+ * body: {
193
+ * sessionId: '123',
194
+ * }
195
+ * })
196
+ * ```
197
+ */
198
+ body?: object;
199
+ /**
200
+ * Whether to send extra message fields such as `message.id` and `message.createdAt` to the API.
201
+ * Defaults to `false`. When set to `true`, the API endpoint might need to
202
+ * handle the extra fields before forwarding the request to the AI service.
203
+ */
204
+ sendExtraMessageFields?: boolean;
205
+ };
206
+ type UseCompletionOptions = {
207
+ /**
208
+ * The API endpoint that accepts a `{ prompt: string }` object and returns
209
+ * a stream of tokens of the AI completion response. Defaults to `/api/completion`.
210
+ */
211
+ api?: string;
212
+ /**
213
+ * An unique identifier for the chat. If not provided, a random one will be
214
+ * generated. When provided, the `useChat` hook with the same `id` will
215
+ * have shared states across components.
216
+ */
217
+ id?: string;
218
+ /**
219
+ * Initial prompt input of the completion.
220
+ */
221
+ initialInput?: string;
222
+ /**
223
+ * Initial completion result. Useful to load an existing history.
224
+ */
225
+ initialCompletion?: string;
226
+ /**
227
+ * Callback function to be called when the API response is received.
228
+ */
229
+ onResponse?: (response: Response) => void | Promise<void>;
230
+ /**
231
+ * Callback function to be called when the completion is finished streaming.
232
+ */
233
+ onFinish?: (prompt: string, completion: string) => void;
234
+ /**
235
+ * Callback function to be called when an error is encountered.
236
+ */
237
+ onError?: (error: Error) => void;
238
+ /**
239
+ * The credentials mode to be used for the fetch request.
240
+ * Possible values are: 'omit', 'same-origin', 'include'.
241
+ * Defaults to 'same-origin'.
242
+ */
243
+ credentials?: RequestCredentials;
244
+ /**
245
+ * HTTP headers to be sent with the API request.
246
+ */
247
+ headers?: Record<string, string> | Headers;
248
+ /**
249
+ * Extra body object to be sent with the API request.
250
+ * @example
251
+ * Send a `sessionId` to the API along with the prompt.
252
+ * ```js
253
+ * useChat({
254
+ * body: {
255
+ * sessionId: '123',
256
+ * }
257
+ * })
258
+ * ```
259
+ */
260
+ body?: object;
261
+ };
262
+ type JSONValue = null | string | number | boolean | {
263
+ [x: string]: JSONValue;
264
+ } | Array<JSONValue>;
265
+
266
+ /**
267
+ * A stream wrapper to send custom JSON-encoded data back to the client.
268
+ */
269
+ declare class experimental_StreamData {
270
+ private encoder;
271
+ private controller;
272
+ stream: TransformStream<Uint8Array, Uint8Array>;
273
+ private isClosedPromise;
274
+ private isClosedPromiseResolver;
275
+ private isClosed;
276
+ private data;
277
+ private messageAnnotations;
278
+ constructor();
279
+ close(): Promise<void>;
280
+ append(value: JSONValue): void;
281
+ appendMessageAnnotation(value: JSONValue): void;
282
+ }
283
+
284
+ /**
285
+ * This is a naive implementation of the streaming React response API.
286
+ * Currently, it can carry the original raw content, data payload and a special
287
+ * UI payload and stream them via "rows" (nested promises).
288
+ * It must be used inside Server Actions so Flight can encode the React elements.
289
+ *
290
+ * It is naive as unlike the StreamingTextResponse, it does not send the diff
291
+ * between the rows, but flushing the full payload on each row.
292
+ */
293
+
294
+ type UINode = string | JSX.Element | JSX.Element[] | null | undefined;
295
+ /**
296
+ * A utility class for streaming React responses.
297
+ */
298
+ declare class experimental_StreamingReactResponse {
299
+ constructor(res: ReadableStream, options?: {
300
+ ui?: (message: {
301
+ content: string;
302
+ data?: JSONValue[] | undefined;
303
+ }) => UINode | Promise<UINode>;
304
+ data?: experimental_StreamData;
305
+ generateId?: IdGenerator;
306
+ });
307
+ }
308
+
309
+ type UseChatHelpers = {
310
+ /** Current messages in the chat */
311
+ messages: Message[];
312
+ /** The error object of the API request */
313
+ error: undefined | Error;
314
+ /**
315
+ * Append a user message to the chat list. This triggers the API call to fetch
316
+ * the assistant's response.
317
+ * @param message The message to append
318
+ * @param options Additional options to pass to the API call
319
+ */
320
+ append: (message: Message | CreateMessage, chatRequestOptions?: ChatRequestOptions) => Promise<string | null | undefined>;
321
+ /**
322
+ * Reload the last AI chat response for the given chat history. If the last
323
+ * message isn't from the assistant, it will request the API to generate a
324
+ * new response.
325
+ */
326
+ reload: (chatRequestOptions?: ChatRequestOptions) => Promise<string | null | undefined>;
327
+ /**
328
+ * Abort the current request immediately, keep the generated tokens if any.
329
+ */
330
+ stop: () => void;
331
+ /**
332
+ * Update the `messages` state locally. This is useful when you want to
333
+ * edit the messages on the client, and then trigger the `reload` method
334
+ * manually to regenerate the AI response.
335
+ */
336
+ setMessages: (messages: Message[]) => void;
337
+ /** The current value of the input */
338
+ input: string;
339
+ /** setState-powered method to update the input value */
340
+ setInput: React.Dispatch<React.SetStateAction<string>>;
341
+ /** An input/textarea-ready onChange handler to control the value of the input */
342
+ handleInputChange: (e: React.ChangeEvent<HTMLInputElement> | React.ChangeEvent<HTMLTextAreaElement>) => void;
343
+ /** Form submission handler to automatically reset input and append a user message */
344
+ handleSubmit: (e: React.FormEvent<HTMLFormElement>, chatRequestOptions?: ChatRequestOptions) => void;
345
+ metadata?: Object;
346
+ /** Whether the API request is in progress */
347
+ isLoading: boolean;
348
+ /** Additional data added on the server via StreamData */
349
+ data?: JSONValue[] | undefined;
350
+ };
351
+ type StreamingReactResponseAction = (payload: {
352
+ messages: Message[];
353
+ data?: Record<string, string>;
354
+ }) => Promise<experimental_StreamingReactResponse>;
355
+ declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, experimental_onToolCall, onResponse, onFinish, onError, credentials, headers, body, generateId, }?: Omit<UseChatOptions, 'api'> & {
356
+ api?: string | StreamingReactResponseAction;
357
+ key?: string;
358
+ }): UseChatHelpers;
359
+
360
+ type UseCompletionHelpers = {
361
+ /** The current completion result */
362
+ completion: string;
363
+ /**
364
+ * Send a new prompt to the API endpoint and update the completion state.
365
+ */
366
+ complete: (prompt: string, options?: RequestOptions) => Promise<string | null | undefined>;
367
+ /** The error object of the API request */
368
+ error: undefined | Error;
369
+ /**
370
+ * Abort the current API request but keep the generated tokens.
371
+ */
372
+ stop: () => void;
373
+ /**
374
+ * Update the `completion` state locally.
375
+ */
376
+ setCompletion: (completion: string) => void;
377
+ /** The current value of the input */
378
+ input: string;
379
+ /** setState-powered method to update the input value */
380
+ setInput: React.Dispatch<React.SetStateAction<string>>;
381
+ /**
382
+ * An input/textarea-ready onChange handler to control the value of the input
383
+ * @example
384
+ * ```jsx
385
+ * <input onChange={handleInputChange} value={input} />
386
+ * ```
387
+ */
388
+ handleInputChange: (e: React.ChangeEvent<HTMLInputElement> | React.ChangeEvent<HTMLTextAreaElement>) => void;
389
+ /**
390
+ * Form submission handler to automatically reset input and append a user message
391
+ * @example
392
+ * ```jsx
393
+ * <form onSubmit={handleSubmit}>
394
+ * <input onChange={handleInputChange} value={input} />
395
+ * </form>
396
+ * ```
397
+ */
398
+ handleSubmit: (e: React.FormEvent<HTMLFormElement>) => void;
399
+ /** Whether the API request is in progress */
400
+ isLoading: boolean;
401
+ /** Additional data added on the server via StreamData */
402
+ data?: JSONValue[] | undefined;
403
+ };
404
+ declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError, }?: UseCompletionOptions): UseCompletionHelpers;
405
+
406
+ type AssistantStatus = 'in_progress' | 'awaiting_message';
407
+ type UseAssistantHelpers = {
408
+ /**
409
+ * The current array of chat messages.
410
+ */
411
+ messages: Message[];
412
+ /**
413
+ * The current thread ID.
414
+ */
415
+ threadId: string | undefined;
416
+ /**
417
+ * The current value of the input field.
418
+ */
419
+ input: string;
420
+ /**
421
+ * setState-powered method to update the input value.
422
+ */
423
+ setInput: React.Dispatch<React.SetStateAction<string>>;
424
+ /**
425
+ * Handler for the `onChange` event of the input field to control the input's value.
426
+ */
427
+ handleInputChange: (event: React.ChangeEvent<HTMLInputElement> | React.ChangeEvent<HTMLTextAreaElement>) => void;
428
+ /**
429
+ * Form submission handler that automatically resets the input field and appends a user message.
430
+ */
431
+ submitMessage: (event?: React.FormEvent<HTMLFormElement>, requestOptions?: {
432
+ data?: Record<string, string>;
433
+ }) => Promise<void>;
434
+ /**
435
+ * The current status of the assistant. This can be used to show a loading indicator.
436
+ */
437
+ status: AssistantStatus;
438
+ /**
439
+ * The error thrown during the assistant message processing, if any.
440
+ */
441
+ error: undefined | unknown;
442
+ };
443
+ type UseAssistantOptions = {
444
+ /**
445
+ * The API endpoint that accepts a `{ threadId: string | null; message: string; }` object and returns an `AssistantResponse` stream.
446
+ * The threadId refers to an existing thread with messages (or is `null` to create a new thread).
447
+ * The message is the next message that should be appended to the thread and sent to the assistant.
448
+ */
449
+ api: string;
450
+ /**
451
+ * An optional string that represents the ID of an existing thread.
452
+ * If not provided, a new thread will be created.
453
+ */
454
+ threadId?: string | undefined;
455
+ /**
456
+ * An optional literal that sets the mode of credentials to be used on the request.
457
+ * Defaults to "same-origin".
458
+ */
459
+ credentials?: RequestCredentials;
460
+ /**
461
+ * An optional object of headers to be passed to the API endpoint.
462
+ */
463
+ headers?: Record<string, string> | Headers;
464
+ /**
465
+ * An optional, additional body object to be passed to the API endpoint.
466
+ */
467
+ body?: object;
468
+ /**
469
+ * An optional callback that will be called when the assistant encounters an error.
470
+ */
471
+ onError?: (error: Error) => void;
472
+ };
473
+ declare function experimental_useAssistant({ api, threadId: threadIdParam, credentials, headers, body, onError, }: UseAssistantOptions): UseAssistantHelpers;
474
+
475
+ export { AssistantStatus, CreateMessage, Message, UseAssistantHelpers, UseAssistantOptions, UseChatHelpers, UseChatOptions, UseCompletionHelpers, experimental_useAssistant, useChat, useCompletion };