ai 2.2.9 → 2.2.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -23,35 +23,35 @@ With the Vercel AI SDK, you can build a ChatGPT-like app in just a few lines of
23
23
 
24
24
  ```tsx
25
25
  // ./app/api/chat/route.js
26
- import OpenAI from 'openai'
27
- import { OpenAIStream, StreamingTextResponse } from 'ai'
26
+ import OpenAI from 'openai';
27
+ import { OpenAIStream, StreamingTextResponse } from 'ai';
28
28
 
29
29
  const openai = new OpenAI({
30
- apiKey: process.env.OPENAI_API_KEY
31
- })
30
+ apiKey: process.env.OPENAI_API_KEY,
31
+ });
32
32
 
33
- export const runtime = 'edge'
33
+ export const runtime = 'edge';
34
34
 
35
35
  export async function POST(req) {
36
- const { messages } = await req.json()
36
+ const { messages } = await req.json();
37
37
  const response = await openai.chat.completions.create({
38
38
  model: 'gpt-4',
39
39
  stream: true,
40
- messages
41
- })
42
- const stream = OpenAIStream(response)
43
- return new StreamingTextResponse(stream)
40
+ messages,
41
+ });
42
+ const stream = OpenAIStream(response);
43
+ return new StreamingTextResponse(stream);
44
44
  }
45
45
  ```
46
46
 
47
47
  ```tsx
48
48
  // ./app/page.js
49
- 'use client'
49
+ 'use client';
50
50
 
51
- import { useChat } from 'ai/react'
51
+ import { useChat } from 'ai/react';
52
52
 
53
53
  export default function Chat() {
54
- const { messages, input, handleInputChange, handleSubmit } = useChat()
54
+ const { messages, input, handleInputChange, handleSubmit } = useChat();
55
55
 
56
56
  return (
57
57
  <div>
@@ -69,7 +69,7 @@ export default function Chat() {
69
69
  />
70
70
  </form>
71
71
  </div>
72
- )
72
+ );
73
73
  }
74
74
  ```
75
75
 
package/dist/index.d.ts CHANGED
@@ -449,6 +449,7 @@ declare function AnthropicStream(res: Response | AsyncIterable<CompletionChunk>,
449
449
 
450
450
  declare function LangChainStream(callbacks?: AIStreamCallbacksAndOptions): {
451
451
  stream: ReadableStream<any>;
452
+ writer: WritableStreamDefaultWriter<any>;
452
453
  handlers: {
453
454
  handleLLMNewToken: (token: string) => Promise<void>;
454
455
  handleLLMStart: (_llm: any, _prompts: string[], runId: string) => Promise<void>;
package/dist/index.js CHANGED
@@ -333,7 +333,9 @@ function chunkToText() {
333
333
  return text;
334
334
  };
335
335
  }
336
- var __internal__OpenAIFnMessagesSymbol = Symbol("internal_openai_fn_messages");
336
+ var __internal__OpenAIFnMessagesSymbol = Symbol(
337
+ "internal_openai_fn_messages"
338
+ );
337
339
  function isChatCompletionChunk(data) {
338
340
  return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
339
341
  }
@@ -655,6 +657,7 @@ function LangChainStream(callbacks) {
655
657
  stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
656
658
  createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
657
659
  ),
660
+ writer,
658
661
  handlers: {
659
662
  handleLLMNewToken: async (token) => {
660
663
  await writer.ready;
package/dist/index.mjs CHANGED
@@ -288,7 +288,9 @@ function chunkToText() {
288
288
  return text;
289
289
  };
290
290
  }
291
- var __internal__OpenAIFnMessagesSymbol = Symbol("internal_openai_fn_messages");
291
+ var __internal__OpenAIFnMessagesSymbol = Symbol(
292
+ "internal_openai_fn_messages"
293
+ );
292
294
  function isChatCompletionChunk(data) {
293
295
  return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
294
296
  }
@@ -610,6 +612,7 @@ function LangChainStream(callbacks) {
610
612
  stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
611
613
  createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
612
614
  ),
615
+ writer,
613
616
  handlers: {
614
617
  handleLLMNewToken: async (token) => {
615
618
  await writer.ready;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai",
3
- "version": "2.2.9",
3
+ "version": "2.2.11",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -206,7 +206,7 @@ type UseChatHelpers = {
206
206
  /** Additional data added on the server via StreamData */
207
207
  data?: any;
208
208
  };
209
- declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body }?: UseChatOptions): UseChatHelpers;
209
+ declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body, }?: UseChatOptions): UseChatHelpers;
210
210
 
211
211
  type UseCompletionHelpers = {
212
212
  /** The current completion result */
@@ -250,7 +250,7 @@ type UseCompletionHelpers = {
250
250
  /** Whether the API request is in progress */
251
251
  isLoading: boolean;
252
252
  };
253
- declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError }?: UseCompletionOptions): UseCompletionHelpers;
253
+ declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError, }?: UseCompletionOptions): UseCompletionHelpers;
254
254
 
255
255
  export { CreateMessage, Message, UseChatHelpers, UseChatOptions, UseCompletionHelpers, useChat, useCompletion };
256
256
  import * as react_jsx_runtime from 'react/jsx-runtime';
@@ -146,13 +146,31 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
146
146
  let responseMessages = [];
147
147
  let responseData = [];
148
148
  const prefixMap = {};
149
+ const NEWLINE = "\n".charCodeAt(0);
150
+ let chunks = [];
151
+ let totalLength = 0;
149
152
  if (isComplexMode) {
150
153
  while (true) {
151
- const { done, value } = await reader.read();
152
- if (done) {
154
+ const { value } = await reader.read();
155
+ if (value) {
156
+ chunks.push(value);
157
+ totalLength += value.length;
158
+ if (value[value.length - 1] !== NEWLINE) {
159
+ continue;
160
+ }
161
+ }
162
+ if (chunks.length === 0) {
153
163
  break;
154
164
  }
155
- const lines = decode(value);
165
+ let concatenatedChunks = new Uint8Array(totalLength);
166
+ let offset = 0;
167
+ for (const chunk of chunks) {
168
+ concatenatedChunks.set(chunk, offset);
169
+ offset += chunk.length;
170
+ }
171
+ chunks.length = 0;
172
+ totalLength = 0;
173
+ const lines = decode(concatenatedChunks);
156
174
  if (typeof lines === "string") {
157
175
  throw new Error(
158
176
  "Invalid response format. Complex mode was set but the response is a string. This should never happen."
@@ -110,13 +110,31 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
110
110
  let responseMessages = [];
111
111
  let responseData = [];
112
112
  const prefixMap = {};
113
+ const NEWLINE = "\n".charCodeAt(0);
114
+ let chunks = [];
115
+ let totalLength = 0;
113
116
  if (isComplexMode) {
114
117
  while (true) {
115
- const { done, value } = await reader.read();
116
- if (done) {
118
+ const { value } = await reader.read();
119
+ if (value) {
120
+ chunks.push(value);
121
+ totalLength += value.length;
122
+ if (value[value.length - 1] !== NEWLINE) {
123
+ continue;
124
+ }
125
+ }
126
+ if (chunks.length === 0) {
117
127
  break;
118
128
  }
119
- const lines = decode(value);
129
+ let concatenatedChunks = new Uint8Array(totalLength);
130
+ let offset = 0;
131
+ for (const chunk of chunks) {
132
+ concatenatedChunks.set(chunk, offset);
133
+ offset += chunk.length;
134
+ }
135
+ chunks.length = 0;
136
+ totalLength = 0;
137
+ const lines = decode(concatenatedChunks);
120
138
  if (typeof lines === "string") {
121
139
  throw new Error(
122
140
  "Invalid response format. Complex mode was set but the response is a string. This should never happen."
@@ -197,7 +197,7 @@ type UseChatHelpers = {
197
197
  /** Whether the API request is in progress */
198
198
  isLoading: Accessor<boolean>;
199
199
  };
200
- declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, onResponse, onFinish, onError, credentials, headers, body }?: UseChatOptions): UseChatHelpers;
200
+ declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, onResponse, onFinish, onError, credentials, headers, body, }?: UseChatOptions): UseChatHelpers;
201
201
 
202
202
  type UseCompletionHelpers = {
203
203
  /** The current completion result */
@@ -233,6 +233,6 @@ type UseCompletionHelpers = {
233
233
  /** Whether the API request is in progress */
234
234
  isLoading: Accessor<boolean>;
235
235
  };
236
- declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError }?: UseCompletionOptions): UseCompletionHelpers;
236
+ declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError, }?: UseCompletionOptions): UseCompletionHelpers;
237
237
 
238
238
  export { CreateMessage, Message, UseChatHelpers, UseChatOptions, UseCompletionHelpers, useChat, useCompletion };
@@ -201,7 +201,7 @@ type UseChatHelpers = {
201
201
  /** Whether the API request is in progress */
202
202
  isLoading: Readable<boolean | undefined>;
203
203
  };
204
- declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body }?: UseChatOptions): UseChatHelpers;
204
+ declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body, }?: UseChatOptions): UseChatHelpers;
205
205
 
206
206
  type UseCompletionHelpers = {
207
207
  /** The current completion result */
@@ -235,6 +235,6 @@ type UseCompletionHelpers = {
235
235
  /** Whether the API request is in progress */
236
236
  isLoading: Readable<boolean | undefined>;
237
237
  };
238
- declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError }?: UseCompletionOptions): UseCompletionHelpers;
238
+ declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError, }?: UseCompletionOptions): UseCompletionHelpers;
239
239
 
240
240
  export { CreateMessage, Message, UseChatHelpers, UseChatOptions, UseCompletionHelpers, useChat, useCompletion };
@@ -193,7 +193,7 @@ type UseChatHelpers = {
193
193
  /** Whether the API request is in progress */
194
194
  isLoading: Ref<boolean | undefined>;
195
195
  };
196
- declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, onResponse, onFinish, onError, credentials, headers, body }?: UseChatOptions): UseChatHelpers;
196
+ declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, onResponse, onFinish, onError, credentials, headers, body, }?: UseChatOptions): UseChatHelpers;
197
197
 
198
198
  type UseCompletionHelpers = {
199
199
  /** The current completion result */
@@ -227,6 +227,6 @@ type UseCompletionHelpers = {
227
227
  /** Whether the API request is in progress */
228
228
  isLoading: Ref<boolean | undefined>;
229
229
  };
230
- declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError }?: UseCompletionOptions): UseCompletionHelpers;
230
+ declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError, }?: UseCompletionOptions): UseCompletionHelpers;
231
231
 
232
232
  export { CreateMessage, Message, UseChatHelpers, UseChatOptions, UseCompletionHelpers, useChat, useCompletion };
package/vue/dist/index.js CHANGED
@@ -280,6 +280,7 @@ function useCompletion({
280
280
  onFinish,
281
281
  onError
282
282
  } = {}) {
283
+ var _a;
283
284
  const completionId = id || `completion-${uniqueId2++}`;
284
285
  const key = `${api}|${completionId}`;
285
286
  const { data, mutate: originalMutate } = useSWRV2(
@@ -287,8 +288,10 @@ function useCompletion({
287
288
  () => store2[key] || initialCompletion
288
289
  );
289
290
  const { data: isLoading, mutate: mutateLoading } = useSWRV2(
290
- `${completionId}-loading`
291
+ `${completionId}-loading`,
292
+ null
291
293
  );
294
+ (_a = isLoading.value) != null ? _a : isLoading.value = false;
292
295
  data.value || (data.value = initialCompletion);
293
296
  const mutate = (data2) => {
294
297
  store2[key] = data2;
@@ -243,6 +243,7 @@ function useCompletion({
243
243
  onFinish,
244
244
  onError
245
245
  } = {}) {
246
+ var _a;
246
247
  const completionId = id || `completion-${uniqueId2++}`;
247
248
  const key = `${api}|${completionId}`;
248
249
  const { data, mutate: originalMutate } = useSWRV2(
@@ -250,8 +251,10 @@ function useCompletion({
250
251
  () => store2[key] || initialCompletion
251
252
  );
252
253
  const { data: isLoading, mutate: mutateLoading } = useSWRV2(
253
- `${completionId}-loading`
254
+ `${completionId}-loading`,
255
+ null
254
256
  );
257
+ (_a = isLoading.value) != null ? _a : isLoading.value = false;
255
258
  data.value || (data.value = initialCompletion);
256
259
  const mutate = (data2) => {
257
260
  store2[key] = data2;