ai 2.1.9 → 2.1.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -5,8 +5,8 @@ The Vercel AI SDK is **a library for building edge-ready AI-powered streaming te
5
5
  ## Features
6
6
 
7
7
  - [SWR](https://swr.vercel.app)-powered React, Svelte and Vue helpers for streaming text responses and building chat and completion UIs
8
- - First-class support for [LangChain](js.langchain.com/docs) and [OpenAI](https://openai.com), [Anthropic](https://www.anthropic.com), and [HuggingFace](https://huggingface.co)
9
- - [Edge Runtime](https://edge-runtime.vercel.app/) compatibility
8
+ - First-class support for [LangChain](js.langchain.com/docs) and [OpenAI](https://openai.com), [Anthropic](https://www.anthropic.com), and [Hugging Face](https://huggingface.co)
9
+ - Node.js, Serverless, and [Edge Runtime](https://edge-runtime.vercel.app/) support
10
10
  - Callbacks for saving completed streaming responses to a database (in the same request)
11
11
 
12
12
  ## Installation
package/dist/index.d.ts CHANGED
@@ -1,4 +1,6 @@
1
1
  import { ServerResponse } from 'node:http';
2
+ import { ChatCompletionRequestMessageFunctionCall, CreateChatCompletionRequestFunctionCall } from 'openai-edge';
3
+ import { ChatCompletionFunctions } from 'openai-edge/types/api';
2
4
 
3
5
  /**
4
6
  * Helper callback methods for AIStream stream lifecycle events
@@ -97,8 +99,15 @@ declare function LangChainStream(callbacks?: AIStreamCallbacks): {
97
99
  stream: ReadableStream<Uint8Array>;
98
100
  handlers: {
99
101
  handleLLMNewToken: (token: string) => Promise<void>;
100
- handleChainEnd: () => Promise<void>;
101
- handleLLMError: (e: any) => Promise<void>;
102
+ handleLLMStart: (_llm: any, _prompts: string[], runId: string) => Promise<void>;
103
+ handleLLMEnd: (_output: any, runId: string) => Promise<void>;
104
+ handleLLMError: (e: Error, runId: string) => Promise<void>;
105
+ handleChainStart: (_chain: any, _inputs: any, runId: string) => Promise<void>;
106
+ handleChainEnd: (_outputs: any, runId: string) => Promise<void>;
107
+ handleChainError: (e: Error, runId: string) => Promise<void>;
108
+ handleToolStart: (_tool: any, _input: string, runId: string) => Promise<void>;
109
+ handleToolEnd: (_output: string, runId: string) => Promise<void>;
110
+ handleToolError: (e: Error, runId: string) => Promise<void>;
102
111
  };
103
112
  };
104
113
 
@@ -109,18 +118,38 @@ type Message = {
109
118
  id: string;
110
119
  createdAt?: Date;
111
120
  content: string;
112
- role: 'system' | 'user' | 'assistant';
121
+ role: 'system' | 'user' | 'assistant' | 'function';
122
+ /**
123
+ * If the message has a role of `function`, the `name` field is the name of the function.
124
+ * Otherwise, the name field should not be set.
125
+ */
126
+ name?: string;
127
+ /**
128
+ * If the assistant role makes a function call, the `function_call` field
129
+ * contains the function call name and arguments. Otherwise, the field should
130
+ * not be set.
131
+ */
132
+ function_call?: string | ChatCompletionRequestMessageFunctionCall;
113
133
  };
114
- type CreateMessage = {
115
- id?: string;
116
- createdAt?: Date;
117
- content: string;
118
- role: 'system' | 'user' | 'assistant';
134
+ type CreateMessage = Omit<Message, 'id'> & {
135
+ id?: Message['id'];
119
136
  };
137
+ type ChatRequest = {
138
+ messages: Message[];
139
+ options?: RequestOptions;
140
+ functions?: Array<ChatCompletionFunctions>;
141
+ function_call?: CreateChatCompletionRequestFunctionCall;
142
+ };
143
+ type FunctionCallHandler = (chatMessages: Message[], functionCall: ChatCompletionRequestMessageFunctionCall) => Promise<ChatRequest | void>;
120
144
  type RequestOptions = {
121
145
  headers?: Record<string, string> | Headers;
122
146
  body?: object;
123
147
  };
148
+ type ChatRequestOptions = {
149
+ options?: RequestOptions;
150
+ functions?: Array<ChatCompletionFunctions>;
151
+ function_call?: CreateChatCompletionRequestFunctionCall;
152
+ };
124
153
  type UseChatOptions = {
125
154
  /**
126
155
  * The API endpoint that accepts a `{ messages: Message[] }` object and returns
@@ -128,7 +157,7 @@ type UseChatOptions = {
128
157
  */
129
158
  api?: string;
130
159
  /**
131
- * An unique identifier for the chat. If not provided, a random one will be
160
+ * A unique identifier for the chat. If not provided, a random one will be
132
161
  * generated. When provided, the `useChat` hook with the same `id` will
133
162
  * have shared states across components.
134
163
  */
@@ -141,6 +170,12 @@ type UseChatOptions = {
141
170
  * Initial input of the chat.
142
171
  */
143
172
  initialInput?: string;
173
+ /**
174
+ * Callback function to be called when a function call is received.
175
+ * If the function returns a `ChatRequest` object, the request will be sent
176
+ * automatically to the API and will be used to update the chat.
177
+ */
178
+ experimental_onFunctionCall?: FunctionCallHandler;
144
179
  /**
145
180
  * Callback function to be called when the API response is received.
146
181
  */
@@ -153,6 +188,12 @@ type UseChatOptions = {
153
188
  * Callback function to be called when an error is encountered.
154
189
  */
155
190
  onError?: (error: Error) => void;
191
+ /**
192
+ * The credentials mode to be used for the fetch request.
193
+ * Possible values are: 'omit', 'same-origin', 'include'.
194
+ * Defaults to 'same-origin'.
195
+ */
196
+ credentials?: RequestCredentials;
156
197
  /**
157
198
  * HTTP headers to be sent with the API request.
158
199
  */
@@ -209,6 +250,12 @@ type UseCompletionOptions = {
209
250
  * Callback function to be called when an error is encountered.
210
251
  */
211
252
  onError?: (error: Error) => void;
253
+ /**
254
+ * The credentials mode to be used for the fetch request.
255
+ * Possible values are: 'omit', 'same-origin', 'include'.
256
+ * Defaults to 'same-origin'.
257
+ */
258
+ credentials?: RequestCredentials;
212
259
  /**
213
260
  * HTTP headers to be sent with the API request.
214
261
  */
@@ -228,4 +275,7 @@ type UseCompletionOptions = {
228
275
  body?: object;
229
276
  };
230
277
 
231
- export { AIStream, AIStreamCallbacks, AIStreamParser, AnthropicStream, CohereStream, CreateMessage, HuggingFaceStream, LangChainStream, Message, OpenAIStream, RequestOptions, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createEventStreamTransformer, streamToResponse, trimStartOfStreamHelper };
278
+ declare const nanoid: (size?: number | undefined) => string;
279
+ declare function createChunkDecoder(): (chunk: Uint8Array | undefined) => string;
280
+
281
+ export { AIStream, AIStreamCallbacks, AIStreamParser, AnthropicStream, ChatRequest, ChatRequestOptions, CohereStream, CreateMessage, FunctionCallHandler, HuggingFaceStream, LangChainStream, Message, OpenAIStream, RequestOptions, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, nanoid, streamToResponse, trimStartOfStreamHelper };
package/dist/index.js CHANGED
@@ -65,7 +65,9 @@ __export(streams_exports, {
65
65
  OpenAIStream: () => OpenAIStream,
66
66
  StreamingTextResponse: () => StreamingTextResponse,
67
67
  createCallbacksTransformer: () => createCallbacksTransformer,
68
+ createChunkDecoder: () => createChunkDecoder,
68
69
  createEventStreamTransformer: () => createEventStreamTransformer,
70
+ nanoid: () => nanoid,
69
71
  streamToResponse: () => streamToResponse,
70
72
  trimStartOfStreamHelper: () => trimStartOfStreamHelper
71
73
  });
@@ -159,10 +161,19 @@ function createEmptyReadableStream() {
159
161
  function parseOpenAIStream() {
160
162
  const trimStartOfStream = trimStartOfStreamHelper();
161
163
  return (data) => {
162
- var _a, _b, _c, _d, _e;
164
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
163
165
  const json = JSON.parse(data);
166
+ if ((_c = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.function_call) == null ? void 0 : _c.name) {
167
+ return `{"function_call": {"name": "${(_e = (_d = json.choices[0]) == null ? void 0 : _d.delta) == null ? void 0 : _e.function_call.name}", "arguments": "`;
168
+ } else if ((_h = (_g = (_f = json.choices[0]) == null ? void 0 : _f.delta) == null ? void 0 : _g.function_call) == null ? void 0 : _h.arguments) {
169
+ const argumentChunk = json.choices[0].delta.function_call.arguments;
170
+ let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
171
+ return `${escapedPartialJson}`;
172
+ } else if (((_i = json.choices[0]) == null ? void 0 : _i.finish_reason) === "function_call") {
173
+ return '"}}';
174
+ }
164
175
  const text = trimStartOfStream(
165
- (_e = (_d = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.content) != null ? _d : (_c = json.choices[0]) == null ? void 0 : _c.text) != null ? _e : ""
176
+ (_n = (_m = (_k = (_j = json.choices[0]) == null ? void 0 : _j.delta) == null ? void 0 : _k.content) != null ? _m : (_l = json.choices[0]) == null ? void 0 : _l.text) != null ? _n : ""
166
177
  );
167
178
  return text;
168
179
  };
@@ -275,6 +286,22 @@ function AnthropicStream(res, cb) {
275
286
  function LangChainStream(callbacks) {
276
287
  const stream = new TransformStream();
277
288
  const writer = stream.writable.getWriter();
289
+ const runs = /* @__PURE__ */ new Set();
290
+ const handleError = (e, runId) => __async(this, null, function* () {
291
+ runs.delete(runId);
292
+ yield writer.ready;
293
+ yield writer.abort(e);
294
+ });
295
+ const handleStart = (runId) => __async(this, null, function* () {
296
+ runs.add(runId);
297
+ });
298
+ const handleEnd = (runId) => __async(this, null, function* () {
299
+ runs.delete(runId);
300
+ if (runs.size === 0) {
301
+ yield writer.ready;
302
+ yield writer.close();
303
+ }
304
+ });
278
305
  return {
279
306
  stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)),
280
307
  handlers: {
@@ -282,17 +309,51 @@ function LangChainStream(callbacks) {
282
309
  yield writer.ready;
283
310
  yield writer.write(token);
284
311
  }),
285
- handleChainEnd: () => __async(this, null, function* () {
286
- yield writer.ready;
287
- yield writer.close();
312
+ handleLLMStart: (_llm, _prompts, runId) => __async(this, null, function* () {
313
+ handleStart(runId);
288
314
  }),
289
- handleLLMError: (e) => __async(this, null, function* () {
290
- yield writer.ready;
291
- yield writer.abort(e);
315
+ handleLLMEnd: (_output, runId) => __async(this, null, function* () {
316
+ yield handleEnd(runId);
317
+ }),
318
+ handleLLMError: (e, runId) => __async(this, null, function* () {
319
+ yield handleError(e, runId);
320
+ }),
321
+ handleChainStart: (_chain, _inputs, runId) => __async(this, null, function* () {
322
+ handleStart(runId);
323
+ }),
324
+ handleChainEnd: (_outputs, runId) => __async(this, null, function* () {
325
+ yield handleEnd(runId);
326
+ }),
327
+ handleChainError: (e, runId) => __async(this, null, function* () {
328
+ yield handleError(e, runId);
329
+ }),
330
+ handleToolStart: (_tool, _input, runId) => __async(this, null, function* () {
331
+ handleStart(runId);
332
+ }),
333
+ handleToolEnd: (_output, runId) => __async(this, null, function* () {
334
+ yield handleEnd(runId);
335
+ }),
336
+ handleToolError: (e, runId) => __async(this, null, function* () {
337
+ yield handleError(e, runId);
292
338
  })
293
339
  }
294
340
  };
295
341
  }
342
+
343
+ // shared/utils.ts
344
+ var import_nanoid = require("nanoid");
345
+ var nanoid = (0, import_nanoid.customAlphabet)(
346
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
347
+ 7
348
+ );
349
+ function createChunkDecoder() {
350
+ const decoder = new TextDecoder();
351
+ return function(chunk) {
352
+ if (!chunk)
353
+ return "";
354
+ return decoder.decode(chunk, { stream: true });
355
+ };
356
+ }
296
357
  // Annotate the CommonJS export names for ESM import in node:
297
358
  0 && (module.exports = {
298
359
  AIStream,
@@ -303,7 +364,9 @@ function LangChainStream(callbacks) {
303
364
  OpenAIStream,
304
365
  StreamingTextResponse,
305
366
  createCallbacksTransformer,
367
+ createChunkDecoder,
306
368
  createEventStreamTransformer,
369
+ nanoid,
307
370
  streamToResponse,
308
371
  trimStartOfStreamHelper
309
372
  });
package/dist/index.mjs CHANGED
@@ -128,10 +128,19 @@ function createEmptyReadableStream() {
128
128
  function parseOpenAIStream() {
129
129
  const trimStartOfStream = trimStartOfStreamHelper();
130
130
  return (data) => {
131
- var _a, _b, _c, _d, _e;
131
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
132
132
  const json = JSON.parse(data);
133
+ if ((_c = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.function_call) == null ? void 0 : _c.name) {
134
+ return `{"function_call": {"name": "${(_e = (_d = json.choices[0]) == null ? void 0 : _d.delta) == null ? void 0 : _e.function_call.name}", "arguments": "`;
135
+ } else if ((_h = (_g = (_f = json.choices[0]) == null ? void 0 : _f.delta) == null ? void 0 : _g.function_call) == null ? void 0 : _h.arguments) {
136
+ const argumentChunk = json.choices[0].delta.function_call.arguments;
137
+ let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
138
+ return `${escapedPartialJson}`;
139
+ } else if (((_i = json.choices[0]) == null ? void 0 : _i.finish_reason) === "function_call") {
140
+ return '"}}';
141
+ }
133
142
  const text = trimStartOfStream(
134
- (_e = (_d = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.content) != null ? _d : (_c = json.choices[0]) == null ? void 0 : _c.text) != null ? _e : ""
143
+ (_n = (_m = (_k = (_j = json.choices[0]) == null ? void 0 : _j.delta) == null ? void 0 : _k.content) != null ? _m : (_l = json.choices[0]) == null ? void 0 : _l.text) != null ? _n : ""
135
144
  );
136
145
  return text;
137
146
  };
@@ -244,6 +253,22 @@ function AnthropicStream(res, cb) {
244
253
  function LangChainStream(callbacks) {
245
254
  const stream = new TransformStream();
246
255
  const writer = stream.writable.getWriter();
256
+ const runs = /* @__PURE__ */ new Set();
257
+ const handleError = (e, runId) => __async(this, null, function* () {
258
+ runs.delete(runId);
259
+ yield writer.ready;
260
+ yield writer.abort(e);
261
+ });
262
+ const handleStart = (runId) => __async(this, null, function* () {
263
+ runs.add(runId);
264
+ });
265
+ const handleEnd = (runId) => __async(this, null, function* () {
266
+ runs.delete(runId);
267
+ if (runs.size === 0) {
268
+ yield writer.ready;
269
+ yield writer.close();
270
+ }
271
+ });
247
272
  return {
248
273
  stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)),
249
274
  handlers: {
@@ -251,17 +276,51 @@ function LangChainStream(callbacks) {
251
276
  yield writer.ready;
252
277
  yield writer.write(token);
253
278
  }),
254
- handleChainEnd: () => __async(this, null, function* () {
255
- yield writer.ready;
256
- yield writer.close();
279
+ handleLLMStart: (_llm, _prompts, runId) => __async(this, null, function* () {
280
+ handleStart(runId);
257
281
  }),
258
- handleLLMError: (e) => __async(this, null, function* () {
259
- yield writer.ready;
260
- yield writer.abort(e);
282
+ handleLLMEnd: (_output, runId) => __async(this, null, function* () {
283
+ yield handleEnd(runId);
284
+ }),
285
+ handleLLMError: (e, runId) => __async(this, null, function* () {
286
+ yield handleError(e, runId);
287
+ }),
288
+ handleChainStart: (_chain, _inputs, runId) => __async(this, null, function* () {
289
+ handleStart(runId);
290
+ }),
291
+ handleChainEnd: (_outputs, runId) => __async(this, null, function* () {
292
+ yield handleEnd(runId);
293
+ }),
294
+ handleChainError: (e, runId) => __async(this, null, function* () {
295
+ yield handleError(e, runId);
296
+ }),
297
+ handleToolStart: (_tool, _input, runId) => __async(this, null, function* () {
298
+ handleStart(runId);
299
+ }),
300
+ handleToolEnd: (_output, runId) => __async(this, null, function* () {
301
+ yield handleEnd(runId);
302
+ }),
303
+ handleToolError: (e, runId) => __async(this, null, function* () {
304
+ yield handleError(e, runId);
261
305
  })
262
306
  }
263
307
  };
264
308
  }
309
+
310
+ // shared/utils.ts
311
+ import { customAlphabet } from "nanoid";
312
+ var nanoid = customAlphabet(
313
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
314
+ 7
315
+ );
316
+ function createChunkDecoder() {
317
+ const decoder = new TextDecoder();
318
+ return function(chunk) {
319
+ if (!chunk)
320
+ return "";
321
+ return decoder.decode(chunk, { stream: true });
322
+ };
323
+ }
265
324
  export {
266
325
  AIStream,
267
326
  AnthropicStream,
@@ -271,7 +330,9 @@ export {
271
330
  OpenAIStream,
272
331
  StreamingTextResponse,
273
332
  createCallbacksTransformer,
333
+ createChunkDecoder,
274
334
  createEventStreamTransformer,
335
+ nanoid,
275
336
  streamToResponse,
276
337
  trimStartOfStreamHelper
277
338
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai",
3
- "version": "2.1.9",
3
+ "version": "2.1.11",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -45,9 +45,9 @@
45
45
  },
46
46
  "dependencies": {
47
47
  "eventsource-parser": "1.0.0",
48
+ "swr": "2.1.5",
48
49
  "nanoid": "^3.3.6",
49
50
  "sswr": "^1.10.0",
50
- "swr": "2.1.5",
51
51
  "swrv": "1.0.3"
52
52
  },
53
53
  "devDependencies": {
@@ -58,6 +58,7 @@
58
58
  "@types/react-dom": "^18.2.0",
59
59
  "eslint": "^7.32.0",
60
60
  "jest": "29.2.1",
61
+ "openai-edge": "^1.1.0",
61
62
  "ts-jest": "29.0.3",
62
63
  "tsup": "^6.7.0",
63
64
  "typescript": "5.1.3",
@@ -66,7 +67,7 @@
66
67
  },
67
68
  "peerDependencies": {
68
69
  "react": "^18.2.0",
69
- "svelte": "^3.29.0",
70
+ "svelte": "^4.0.0",
70
71
  "vue": "^3.3.4"
71
72
  },
72
73
  "peerDependenciesMeta": {
@@ -1,3 +1,5 @@
1
+ import { ChatCompletionRequestMessageFunctionCall, CreateChatCompletionRequestFunctionCall } from 'openai-edge';
2
+ import { ChatCompletionFunctions } from 'openai-edge/types/api';
1
3
  import * as react_jsx_runtime from 'react/jsx-runtime';
2
4
 
3
5
  /**
@@ -7,18 +9,38 @@ type Message = {
7
9
  id: string;
8
10
  createdAt?: Date;
9
11
  content: string;
10
- role: 'system' | 'user' | 'assistant';
12
+ role: 'system' | 'user' | 'assistant' | 'function';
13
+ /**
14
+ * If the message has a role of `function`, the `name` field is the name of the function.
15
+ * Otherwise, the name field should not be set.
16
+ */
17
+ name?: string;
18
+ /**
19
+ * If the assistant role makes a function call, the `function_call` field
20
+ * contains the function call name and arguments. Otherwise, the field should
21
+ * not be set.
22
+ */
23
+ function_call?: string | ChatCompletionRequestMessageFunctionCall;
11
24
  };
12
- type CreateMessage = {
13
- id?: string;
14
- createdAt?: Date;
15
- content: string;
16
- role: 'system' | 'user' | 'assistant';
25
+ type CreateMessage = Omit<Message, 'id'> & {
26
+ id?: Message['id'];
27
+ };
28
+ type ChatRequest = {
29
+ messages: Message[];
30
+ options?: RequestOptions;
31
+ functions?: Array<ChatCompletionFunctions>;
32
+ function_call?: CreateChatCompletionRequestFunctionCall;
17
33
  };
34
+ type FunctionCallHandler = (chatMessages: Message[], functionCall: ChatCompletionRequestMessageFunctionCall) => Promise<ChatRequest | void>;
18
35
  type RequestOptions = {
19
36
  headers?: Record<string, string> | Headers;
20
37
  body?: object;
21
38
  };
39
+ type ChatRequestOptions = {
40
+ options?: RequestOptions;
41
+ functions?: Array<ChatCompletionFunctions>;
42
+ function_call?: CreateChatCompletionRequestFunctionCall;
43
+ };
22
44
  type UseChatOptions = {
23
45
  /**
24
46
  * The API endpoint that accepts a `{ messages: Message[] }` object and returns
@@ -26,7 +48,7 @@ type UseChatOptions = {
26
48
  */
27
49
  api?: string;
28
50
  /**
29
- * An unique identifier for the chat. If not provided, a random one will be
51
+ * A unique identifier for the chat. If not provided, a random one will be
30
52
  * generated. When provided, the `useChat` hook with the same `id` will
31
53
  * have shared states across components.
32
54
  */
@@ -39,6 +61,12 @@ type UseChatOptions = {
39
61
  * Initial input of the chat.
40
62
  */
41
63
  initialInput?: string;
64
+ /**
65
+ * Callback function to be called when a function call is received.
66
+ * If the function returns a `ChatRequest` object, the request will be sent
67
+ * automatically to the API and will be used to update the chat.
68
+ */
69
+ experimental_onFunctionCall?: FunctionCallHandler;
42
70
  /**
43
71
  * Callback function to be called when the API response is received.
44
72
  */
@@ -51,6 +79,12 @@ type UseChatOptions = {
51
79
  * Callback function to be called when an error is encountered.
52
80
  */
53
81
  onError?: (error: Error) => void;
82
+ /**
83
+ * The credentials mode to be used for the fetch request.
84
+ * Possible values are: 'omit', 'same-origin', 'include'.
85
+ * Defaults to 'same-origin'.
86
+ */
87
+ credentials?: RequestCredentials;
54
88
  /**
55
89
  * HTTP headers to be sent with the API request.
56
90
  */
@@ -107,6 +141,12 @@ type UseCompletionOptions = {
107
141
  * Callback function to be called when an error is encountered.
108
142
  */
109
143
  onError?: (error: Error) => void;
144
+ /**
145
+ * The credentials mode to be used for the fetch request.
146
+ * Possible values are: 'omit', 'same-origin', 'include'.
147
+ * Defaults to 'same-origin'.
148
+ */
149
+ credentials?: RequestCredentials;
110
150
  /**
111
151
  * HTTP headers to be sent with the API request.
112
152
  */
@@ -137,13 +177,13 @@ type UseChatHelpers = {
137
177
  * @param message The message to append
138
178
  * @param options Additional options to pass to the API call
139
179
  */
140
- append: (message: Message | CreateMessage, options?: RequestOptions) => Promise<string | null | undefined>;
180
+ append: (message: Message | CreateMessage, chatRequestOptions?: ChatRequestOptions) => Promise<string | null | undefined>;
141
181
  /**
142
182
  * Reload the last AI chat response for the given chat history. If the last
143
183
  * message isn't from the assistant, it will request the API to generate a
144
184
  * new response.
145
185
  */
146
- reload: (options?: RequestOptions) => Promise<string | null | undefined>;
186
+ reload: (chatRequestOptions?: ChatRequestOptions) => Promise<string | null | undefined>;
147
187
  /**
148
188
  * Abort the current request immediately, keep the generated tokens if any.
149
189
  */
@@ -161,11 +201,12 @@ type UseChatHelpers = {
161
201
  /** An input/textarea-ready onChange handler to control the value of the input */
162
202
  handleInputChange: (e: React.ChangeEvent<HTMLInputElement> | React.ChangeEvent<HTMLTextAreaElement>) => void;
163
203
  /** Form submission handler to automattically reset input and append a user message */
164
- handleSubmit: (e: React.FormEvent<HTMLFormElement>) => void;
204
+ handleSubmit: (e: React.FormEvent<HTMLFormElement>, chatRequestOptions?: ChatRequestOptions) => void;
205
+ metadata?: Object;
165
206
  /** Whether the API request is in progress */
166
207
  isLoading: boolean;
167
208
  };
168
- declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, onResponse, onFinish, onError, headers, body }?: UseChatOptions): UseChatHelpers;
209
+ declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body }?: UseChatOptions): UseChatHelpers;
169
210
 
170
211
  type UseCompletionHelpers = {
171
212
  /** The current completion result */
@@ -209,7 +250,7 @@ type UseCompletionHelpers = {
209
250
  /** Whether the API request is in progress */
210
251
  isLoading: boolean;
211
252
  };
212
- declare function useCompletion({ api, id, initialCompletion, initialInput, headers, body, onResponse, onFinish, onError }?: UseCompletionOptions): UseCompletionHelpers;
253
+ declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError }?: UseCompletionOptions): UseCompletionHelpers;
213
254
 
214
255
  type Props = {
215
256
  /**