ai 2.1.9 → 2.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +51 -8
- package/dist/index.js +30 -2
- package/dist/index.mjs +28 -2
- package/package.json +6 -5
- package/react/dist/index.d.ts +53 -12
- package/react/dist/index.js +134 -82
- package/react/dist/index.mjs +135 -83
- package/svelte/dist/index.d.ts +44 -9
- package/svelte/dist/index.js +7 -3
- package/svelte/dist/index.mjs +7 -3
- package/vue/dist/index.d.ts +44 -9
- package/vue/dist/index.js +6 -2
- package/vue/dist/index.mjs +6 -2
package/dist/index.d.ts
CHANGED
@@ -1,4 +1,6 @@
|
|
1
1
|
import { ServerResponse } from 'node:http';
|
2
|
+
import { ChatCompletionRequestMessageFunctionCall, CreateChatCompletionRequestFunctionCall } from 'openai-edge';
|
3
|
+
import { ChatCompletionFunctions } from 'openai-edge/types/api';
|
2
4
|
|
3
5
|
/**
|
4
6
|
* Helper callback methods for AIStream stream lifecycle events
|
@@ -109,18 +111,38 @@ type Message = {
|
|
109
111
|
id: string;
|
110
112
|
createdAt?: Date;
|
111
113
|
content: string;
|
112
|
-
role: 'system' | 'user' | 'assistant';
|
114
|
+
role: 'system' | 'user' | 'assistant' | 'function';
|
115
|
+
/**
|
116
|
+
* If the message has a role of `function`, the `name` field is the name of the function.
|
117
|
+
* Otherwise, the name field should not be set.
|
118
|
+
*/
|
119
|
+
name?: string;
|
120
|
+
/**
|
121
|
+
* If the assistant role makes a function call, the `function_call` field
|
122
|
+
* contains the function call name and arguments. Otherwise, the field should
|
123
|
+
* not be set.
|
124
|
+
*/
|
125
|
+
function_call?: string | ChatCompletionRequestMessageFunctionCall;
|
113
126
|
};
|
114
|
-
type CreateMessage = {
|
115
|
-
id?:
|
116
|
-
createdAt?: Date;
|
117
|
-
content: string;
|
118
|
-
role: 'system' | 'user' | 'assistant';
|
127
|
+
type CreateMessage = Omit<Message, 'id'> & {
|
128
|
+
id?: Message['id'];
|
119
129
|
};
|
130
|
+
type ChatRequest = {
|
131
|
+
messages: Message[];
|
132
|
+
options?: RequestOptions;
|
133
|
+
functions?: Array<ChatCompletionFunctions>;
|
134
|
+
function_call?: CreateChatCompletionRequestFunctionCall;
|
135
|
+
};
|
136
|
+
type FunctionCallHandler = (chatMessages: Message[], functionCall: ChatCompletionRequestMessageFunctionCall) => Promise<ChatRequest | void>;
|
120
137
|
type RequestOptions = {
|
121
138
|
headers?: Record<string, string> | Headers;
|
122
139
|
body?: object;
|
123
140
|
};
|
141
|
+
type ChatRequestOptions = {
|
142
|
+
options?: RequestOptions;
|
143
|
+
functions?: Array<ChatCompletionFunctions>;
|
144
|
+
function_call?: CreateChatCompletionRequestFunctionCall;
|
145
|
+
};
|
124
146
|
type UseChatOptions = {
|
125
147
|
/**
|
126
148
|
* The API endpoint that accepts a `{ messages: Message[] }` object and returns
|
@@ -128,7 +150,7 @@ type UseChatOptions = {
|
|
128
150
|
*/
|
129
151
|
api?: string;
|
130
152
|
/**
|
131
|
-
*
|
153
|
+
* A unique identifier for the chat. If not provided, a random one will be
|
132
154
|
* generated. When provided, the `useChat` hook with the same `id` will
|
133
155
|
* have shared states across components.
|
134
156
|
*/
|
@@ -141,6 +163,12 @@ type UseChatOptions = {
|
|
141
163
|
* Initial input of the chat.
|
142
164
|
*/
|
143
165
|
initialInput?: string;
|
166
|
+
/**
|
167
|
+
* Callback function to be called when a function call is received.
|
168
|
+
* If the function returns a `ChatRequest` object, the request will be sent
|
169
|
+
* automatically to the API and will be used to update the chat.
|
170
|
+
*/
|
171
|
+
experimental_onFunctionCall?: FunctionCallHandler;
|
144
172
|
/**
|
145
173
|
* Callback function to be called when the API response is received.
|
146
174
|
*/
|
@@ -153,6 +181,12 @@ type UseChatOptions = {
|
|
153
181
|
* Callback function to be called when an error is encountered.
|
154
182
|
*/
|
155
183
|
onError?: (error: Error) => void;
|
184
|
+
/**
|
185
|
+
* The credentials mode to be used for the fetch request.
|
186
|
+
* Possible values are: 'omit', 'same-origin', 'include'.
|
187
|
+
* Defaults to 'same-origin'.
|
188
|
+
*/
|
189
|
+
credentials?: RequestCredentials;
|
156
190
|
/**
|
157
191
|
* HTTP headers to be sent with the API request.
|
158
192
|
*/
|
@@ -209,6 +243,12 @@ type UseCompletionOptions = {
|
|
209
243
|
* Callback function to be called when an error is encountered.
|
210
244
|
*/
|
211
245
|
onError?: (error: Error) => void;
|
246
|
+
/**
|
247
|
+
* The credentials mode to be used for the fetch request.
|
248
|
+
* Possible values are: 'omit', 'same-origin', 'include'.
|
249
|
+
* Defaults to 'same-origin'.
|
250
|
+
*/
|
251
|
+
credentials?: RequestCredentials;
|
212
252
|
/**
|
213
253
|
* HTTP headers to be sent with the API request.
|
214
254
|
*/
|
@@ -228,4 +268,7 @@ type UseCompletionOptions = {
|
|
228
268
|
body?: object;
|
229
269
|
};
|
230
270
|
|
231
|
-
|
271
|
+
declare const nanoid: (size?: number | undefined) => string;
|
272
|
+
declare function createChunkDecoder(): (chunk: Uint8Array | undefined) => string;
|
273
|
+
|
274
|
+
export { AIStream, AIStreamCallbacks, AIStreamParser, AnthropicStream, ChatRequest, ChatRequestOptions, CohereStream, CreateMessage, FunctionCallHandler, HuggingFaceStream, LangChainStream, Message, OpenAIStream, RequestOptions, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, nanoid, streamToResponse, trimStartOfStreamHelper };
|
package/dist/index.js
CHANGED
@@ -65,7 +65,9 @@ __export(streams_exports, {
|
|
65
65
|
OpenAIStream: () => OpenAIStream,
|
66
66
|
StreamingTextResponse: () => StreamingTextResponse,
|
67
67
|
createCallbacksTransformer: () => createCallbacksTransformer,
|
68
|
+
createChunkDecoder: () => createChunkDecoder,
|
68
69
|
createEventStreamTransformer: () => createEventStreamTransformer,
|
70
|
+
nanoid: () => nanoid,
|
69
71
|
streamToResponse: () => streamToResponse,
|
70
72
|
trimStartOfStreamHelper: () => trimStartOfStreamHelper
|
71
73
|
});
|
@@ -159,10 +161,19 @@ function createEmptyReadableStream() {
|
|
159
161
|
function parseOpenAIStream() {
|
160
162
|
const trimStartOfStream = trimStartOfStreamHelper();
|
161
163
|
return (data) => {
|
162
|
-
var _a, _b, _c, _d, _e;
|
164
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
|
163
165
|
const json = JSON.parse(data);
|
166
|
+
if ((_c = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.function_call) == null ? void 0 : _c.name) {
|
167
|
+
return `{"function_call": {"name": "${(_e = (_d = json.choices[0]) == null ? void 0 : _d.delta) == null ? void 0 : _e.function_call.name}", "arguments": "`;
|
168
|
+
} else if ((_h = (_g = (_f = json.choices[0]) == null ? void 0 : _f.delta) == null ? void 0 : _g.function_call) == null ? void 0 : _h.arguments) {
|
169
|
+
const argumentChunk = json.choices[0].delta.function_call.arguments;
|
170
|
+
let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
|
171
|
+
return `${escapedPartialJson}`;
|
172
|
+
} else if (((_i = json.choices[0]) == null ? void 0 : _i.finish_reason) === "function_call") {
|
173
|
+
return '"}}';
|
174
|
+
}
|
164
175
|
const text = trimStartOfStream(
|
165
|
-
(
|
176
|
+
(_n = (_m = (_k = (_j = json.choices[0]) == null ? void 0 : _j.delta) == null ? void 0 : _k.content) != null ? _m : (_l = json.choices[0]) == null ? void 0 : _l.text) != null ? _n : ""
|
166
177
|
);
|
167
178
|
return text;
|
168
179
|
};
|
@@ -293,6 +304,21 @@ function LangChainStream(callbacks) {
|
|
293
304
|
}
|
294
305
|
};
|
295
306
|
}
|
307
|
+
|
308
|
+
// shared/utils.ts
|
309
|
+
var import_nanoid = require("nanoid");
|
310
|
+
var nanoid = (0, import_nanoid.customAlphabet)(
|
311
|
+
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
312
|
+
7
|
313
|
+
);
|
314
|
+
function createChunkDecoder() {
|
315
|
+
const decoder = new TextDecoder();
|
316
|
+
return function(chunk) {
|
317
|
+
if (!chunk)
|
318
|
+
return "";
|
319
|
+
return decoder.decode(chunk, { stream: true });
|
320
|
+
};
|
321
|
+
}
|
296
322
|
// Annotate the CommonJS export names for ESM import in node:
|
297
323
|
0 && (module.exports = {
|
298
324
|
AIStream,
|
@@ -303,7 +329,9 @@ function LangChainStream(callbacks) {
|
|
303
329
|
OpenAIStream,
|
304
330
|
StreamingTextResponse,
|
305
331
|
createCallbacksTransformer,
|
332
|
+
createChunkDecoder,
|
306
333
|
createEventStreamTransformer,
|
334
|
+
nanoid,
|
307
335
|
streamToResponse,
|
308
336
|
trimStartOfStreamHelper
|
309
337
|
});
|
package/dist/index.mjs
CHANGED
@@ -128,10 +128,19 @@ function createEmptyReadableStream() {
|
|
128
128
|
function parseOpenAIStream() {
|
129
129
|
const trimStartOfStream = trimStartOfStreamHelper();
|
130
130
|
return (data) => {
|
131
|
-
var _a, _b, _c, _d, _e;
|
131
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
|
132
132
|
const json = JSON.parse(data);
|
133
|
+
if ((_c = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.function_call) == null ? void 0 : _c.name) {
|
134
|
+
return `{"function_call": {"name": "${(_e = (_d = json.choices[0]) == null ? void 0 : _d.delta) == null ? void 0 : _e.function_call.name}", "arguments": "`;
|
135
|
+
} else if ((_h = (_g = (_f = json.choices[0]) == null ? void 0 : _f.delta) == null ? void 0 : _g.function_call) == null ? void 0 : _h.arguments) {
|
136
|
+
const argumentChunk = json.choices[0].delta.function_call.arguments;
|
137
|
+
let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
|
138
|
+
return `${escapedPartialJson}`;
|
139
|
+
} else if (((_i = json.choices[0]) == null ? void 0 : _i.finish_reason) === "function_call") {
|
140
|
+
return '"}}';
|
141
|
+
}
|
133
142
|
const text = trimStartOfStream(
|
134
|
-
(
|
143
|
+
(_n = (_m = (_k = (_j = json.choices[0]) == null ? void 0 : _j.delta) == null ? void 0 : _k.content) != null ? _m : (_l = json.choices[0]) == null ? void 0 : _l.text) != null ? _n : ""
|
135
144
|
);
|
136
145
|
return text;
|
137
146
|
};
|
@@ -262,6 +271,21 @@ function LangChainStream(callbacks) {
|
|
262
271
|
}
|
263
272
|
};
|
264
273
|
}
|
274
|
+
|
275
|
+
// shared/utils.ts
|
276
|
+
import { customAlphabet } from "nanoid";
|
277
|
+
var nanoid = customAlphabet(
|
278
|
+
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
279
|
+
7
|
280
|
+
);
|
281
|
+
function createChunkDecoder() {
|
282
|
+
const decoder = new TextDecoder();
|
283
|
+
return function(chunk) {
|
284
|
+
if (!chunk)
|
285
|
+
return "";
|
286
|
+
return decoder.decode(chunk, { stream: true });
|
287
|
+
};
|
288
|
+
}
|
265
289
|
export {
|
266
290
|
AIStream,
|
267
291
|
AnthropicStream,
|
@@ -271,7 +295,9 @@ export {
|
|
271
295
|
OpenAIStream,
|
272
296
|
StreamingTextResponse,
|
273
297
|
createCallbacksTransformer,
|
298
|
+
createChunkDecoder,
|
274
299
|
createEventStreamTransformer,
|
300
|
+
nanoid,
|
275
301
|
streamToResponse,
|
276
302
|
trimStartOfStreamHelper
|
277
303
|
};
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "ai",
|
3
|
-
"version": "2.1.
|
3
|
+
"version": "2.1.10",
|
4
4
|
"license": "Apache-2.0",
|
5
5
|
"sideEffects": false,
|
6
6
|
"main": "./dist/index.js",
|
@@ -45,9 +45,9 @@
|
|
45
45
|
},
|
46
46
|
"dependencies": {
|
47
47
|
"eventsource-parser": "1.0.0",
|
48
|
+
"swr": "2.1.5",
|
48
49
|
"nanoid": "^3.3.6",
|
49
50
|
"sswr": "^1.10.0",
|
50
|
-
"swr": "2.1.5",
|
51
51
|
"swrv": "1.0.3"
|
52
52
|
},
|
53
53
|
"devDependencies": {
|
@@ -58,15 +58,16 @@
|
|
58
58
|
"@types/react-dom": "^18.2.0",
|
59
59
|
"eslint": "^7.32.0",
|
60
60
|
"jest": "29.2.1",
|
61
|
+
"openai-edge": "^1.1.0",
|
61
62
|
"ts-jest": "29.0.3",
|
62
63
|
"tsup": "^6.7.0",
|
63
64
|
"typescript": "5.1.3",
|
64
|
-
"
|
65
|
-
"
|
65
|
+
"eslint-config-vercel-ai": "0.0.0",
|
66
|
+
"@vercel/ai-tsconfig": "0.0.0"
|
66
67
|
},
|
67
68
|
"peerDependencies": {
|
68
69
|
"react": "^18.2.0",
|
69
|
-
"svelte": "^
|
70
|
+
"svelte": "^4.0.0",
|
70
71
|
"vue": "^3.3.4"
|
71
72
|
},
|
72
73
|
"peerDependenciesMeta": {
|
package/react/dist/index.d.ts
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
import { ChatCompletionRequestMessageFunctionCall, CreateChatCompletionRequestFunctionCall } from 'openai-edge';
|
2
|
+
import { ChatCompletionFunctions } from 'openai-edge/types/api';
|
1
3
|
import * as react_jsx_runtime from 'react/jsx-runtime';
|
2
4
|
|
3
5
|
/**
|
@@ -7,18 +9,38 @@ type Message = {
|
|
7
9
|
id: string;
|
8
10
|
createdAt?: Date;
|
9
11
|
content: string;
|
10
|
-
role: 'system' | 'user' | 'assistant';
|
12
|
+
role: 'system' | 'user' | 'assistant' | 'function';
|
13
|
+
/**
|
14
|
+
* If the message has a role of `function`, the `name` field is the name of the function.
|
15
|
+
* Otherwise, the name field should not be set.
|
16
|
+
*/
|
17
|
+
name?: string;
|
18
|
+
/**
|
19
|
+
* If the assistant role makes a function call, the `function_call` field
|
20
|
+
* contains the function call name and arguments. Otherwise, the field should
|
21
|
+
* not be set.
|
22
|
+
*/
|
23
|
+
function_call?: string | ChatCompletionRequestMessageFunctionCall;
|
11
24
|
};
|
12
|
-
type CreateMessage = {
|
13
|
-
id?:
|
14
|
-
|
15
|
-
|
16
|
-
|
25
|
+
type CreateMessage = Omit<Message, 'id'> & {
|
26
|
+
id?: Message['id'];
|
27
|
+
};
|
28
|
+
type ChatRequest = {
|
29
|
+
messages: Message[];
|
30
|
+
options?: RequestOptions;
|
31
|
+
functions?: Array<ChatCompletionFunctions>;
|
32
|
+
function_call?: CreateChatCompletionRequestFunctionCall;
|
17
33
|
};
|
34
|
+
type FunctionCallHandler = (chatMessages: Message[], functionCall: ChatCompletionRequestMessageFunctionCall) => Promise<ChatRequest | void>;
|
18
35
|
type RequestOptions = {
|
19
36
|
headers?: Record<string, string> | Headers;
|
20
37
|
body?: object;
|
21
38
|
};
|
39
|
+
type ChatRequestOptions = {
|
40
|
+
options?: RequestOptions;
|
41
|
+
functions?: Array<ChatCompletionFunctions>;
|
42
|
+
function_call?: CreateChatCompletionRequestFunctionCall;
|
43
|
+
};
|
22
44
|
type UseChatOptions = {
|
23
45
|
/**
|
24
46
|
* The API endpoint that accepts a `{ messages: Message[] }` object and returns
|
@@ -26,7 +48,7 @@ type UseChatOptions = {
|
|
26
48
|
*/
|
27
49
|
api?: string;
|
28
50
|
/**
|
29
|
-
*
|
51
|
+
* A unique identifier for the chat. If not provided, a random one will be
|
30
52
|
* generated. When provided, the `useChat` hook with the same `id` will
|
31
53
|
* have shared states across components.
|
32
54
|
*/
|
@@ -39,6 +61,12 @@ type UseChatOptions = {
|
|
39
61
|
* Initial input of the chat.
|
40
62
|
*/
|
41
63
|
initialInput?: string;
|
64
|
+
/**
|
65
|
+
* Callback function to be called when a function call is received.
|
66
|
+
* If the function returns a `ChatRequest` object, the request will be sent
|
67
|
+
* automatically to the API and will be used to update the chat.
|
68
|
+
*/
|
69
|
+
experimental_onFunctionCall?: FunctionCallHandler;
|
42
70
|
/**
|
43
71
|
* Callback function to be called when the API response is received.
|
44
72
|
*/
|
@@ -51,6 +79,12 @@ type UseChatOptions = {
|
|
51
79
|
* Callback function to be called when an error is encountered.
|
52
80
|
*/
|
53
81
|
onError?: (error: Error) => void;
|
82
|
+
/**
|
83
|
+
* The credentials mode to be used for the fetch request.
|
84
|
+
* Possible values are: 'omit', 'same-origin', 'include'.
|
85
|
+
* Defaults to 'same-origin'.
|
86
|
+
*/
|
87
|
+
credentials?: RequestCredentials;
|
54
88
|
/**
|
55
89
|
* HTTP headers to be sent with the API request.
|
56
90
|
*/
|
@@ -107,6 +141,12 @@ type UseCompletionOptions = {
|
|
107
141
|
* Callback function to be called when an error is encountered.
|
108
142
|
*/
|
109
143
|
onError?: (error: Error) => void;
|
144
|
+
/**
|
145
|
+
* The credentials mode to be used for the fetch request.
|
146
|
+
* Possible values are: 'omit', 'same-origin', 'include'.
|
147
|
+
* Defaults to 'same-origin'.
|
148
|
+
*/
|
149
|
+
credentials?: RequestCredentials;
|
110
150
|
/**
|
111
151
|
* HTTP headers to be sent with the API request.
|
112
152
|
*/
|
@@ -137,13 +177,13 @@ type UseChatHelpers = {
|
|
137
177
|
* @param message The message to append
|
138
178
|
* @param options Additional options to pass to the API call
|
139
179
|
*/
|
140
|
-
append: (message: Message | CreateMessage,
|
180
|
+
append: (message: Message | CreateMessage, chatRequestOptions?: ChatRequestOptions) => Promise<string | null | undefined>;
|
141
181
|
/**
|
142
182
|
* Reload the last AI chat response for the given chat history. If the last
|
143
183
|
* message isn't from the assistant, it will request the API to generate a
|
144
184
|
* new response.
|
145
185
|
*/
|
146
|
-
reload: (
|
186
|
+
reload: (chatRequestOptions?: ChatRequestOptions) => Promise<string | null | undefined>;
|
147
187
|
/**
|
148
188
|
* Abort the current request immediately, keep the generated tokens if any.
|
149
189
|
*/
|
@@ -161,11 +201,12 @@ type UseChatHelpers = {
|
|
161
201
|
/** An input/textarea-ready onChange handler to control the value of the input */
|
162
202
|
handleInputChange: (e: React.ChangeEvent<HTMLInputElement> | React.ChangeEvent<HTMLTextAreaElement>) => void;
|
163
203
|
/** Form submission handler to automattically reset input and append a user message */
|
164
|
-
handleSubmit: (e: React.FormEvent<HTMLFormElement
|
204
|
+
handleSubmit: (e: React.FormEvent<HTMLFormElement>, chatRequestOptions?: ChatRequestOptions) => void;
|
205
|
+
metadata?: Object;
|
165
206
|
/** Whether the API request is in progress */
|
166
207
|
isLoading: boolean;
|
167
208
|
};
|
168
|
-
declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, onResponse, onFinish, onError, headers, body }?: UseChatOptions): UseChatHelpers;
|
209
|
+
declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body }?: UseChatOptions): UseChatHelpers;
|
169
210
|
|
170
211
|
type UseCompletionHelpers = {
|
171
212
|
/** The current completion result */
|
@@ -209,7 +250,7 @@ type UseCompletionHelpers = {
|
|
209
250
|
/** Whether the API request is in progress */
|
210
251
|
isLoading: boolean;
|
211
252
|
};
|
212
|
-
declare function useCompletion({ api, id, initialCompletion, initialInput, headers, body, onResponse, onFinish, onError }?: UseCompletionOptions): UseCompletionHelpers;
|
253
|
+
declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError }?: UseCompletionOptions): UseCompletionHelpers;
|
213
254
|
|
214
255
|
type Props = {
|
215
256
|
/**
|