ai 2.1.32 → 2.1.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/index.d.ts +26 -1
- package/dist/index.js +26 -4
- package/dist/index.mjs +26 -4
- package/package.json +5 -5
- package/vue/dist/index.js +4 -1
- package/vue/dist/index.mjs +4 -1
package/README.md
CHANGED
@@ -76,7 +76,7 @@ export default function Chat() {
|
|
76
76
|
|
77
77
|
---
|
78
78
|
|
79
|
-
View the full documentation and examples on [sdk.vercel.ai/docs](https://sdk.vercel.ai/docs)
|
79
|
+
View the full documentation and examples on [sdk.vercel.ai/docs](https://sdk.vercel.ai/docs).
|
80
80
|
|
81
81
|
## Authors
|
82
82
|
|
@@ -88,4 +88,4 @@ This library is created by [Vercel](https://vercel.com) and [Next.js](https://ne
|
|
88
88
|
- Malte Ubl ([@cramforce](https://twitter.com/cramforce)) - [Vercel](https://vercel.com)
|
89
89
|
- Justin Ridgewell ([@jridgewell](https://github.com/jridgewell)) - [Vercel](https://vercel.com)
|
90
90
|
|
91
|
-
[Contributors](https://github.com/vercel
|
91
|
+
[Contributors](https://github.com/vercel/ai/graphs/contributors)
|
package/dist/index.d.ts
CHANGED
@@ -348,7 +348,32 @@ declare function HuggingFaceStream(res: AsyncGenerator<any>, callbacks?: AIStrea
|
|
348
348
|
|
349
349
|
declare function CohereStream(reader: Response, callbacks?: AIStreamCallbacks): ReadableStream;
|
350
350
|
|
351
|
-
|
351
|
+
interface CompletionChunk {
|
352
|
+
/**
|
353
|
+
* The resulting completion up to and excluding the stop sequences.
|
354
|
+
*/
|
355
|
+
completion: string;
|
356
|
+
/**
|
357
|
+
* The model that performed the completion.
|
358
|
+
*/
|
359
|
+
model: string;
|
360
|
+
/**
|
361
|
+
* The reason that we stopped sampling.
|
362
|
+
*
|
363
|
+
* This may be one the following values:
|
364
|
+
*
|
365
|
+
* - `"stop_sequence"`: we reached a stop sequence — either provided by you via the
|
366
|
+
* `stop_sequences` parameter, or a stop sequence built into the model
|
367
|
+
* - `"max_tokens"`: we exceeded `max_tokens_to_sample` or the model's maximum
|
368
|
+
*/
|
369
|
+
stop_reason: string;
|
370
|
+
}
|
371
|
+
/**
|
372
|
+
* Accepts either a fetch Response from the Anthropic `POST /v1/complete` endpoint,
|
373
|
+
* or the return value of `await client.completions.create({ stream: true })`
|
374
|
+
* from the `@anthropic-ai/sdk` package.
|
375
|
+
*/
|
376
|
+
declare function AnthropicStream(res: Response | AsyncIterable<CompletionChunk>, cb?: AIStreamCallbacks): ReadableStream;
|
352
377
|
|
353
378
|
declare function LangChainStream(callbacks?: AIStreamCallbacks): {
|
354
379
|
stream: ReadableStream<Uint8Array>;
|
package/dist/index.js
CHANGED
@@ -397,14 +397,36 @@ function parseAnthropicStream() {
|
|
397
397
|
let previous = "";
|
398
398
|
return (data) => {
|
399
399
|
const json = JSON.parse(data);
|
400
|
+
if ("error" in json) {
|
401
|
+
throw new Error(`${json.error.type}: ${json.error.message}`);
|
402
|
+
}
|
403
|
+
if (!("completion" in json)) {
|
404
|
+
return;
|
405
|
+
}
|
400
406
|
const text = json.completion;
|
401
|
-
|
402
|
-
|
403
|
-
|
407
|
+
if (!previous || text.length > previous.length && text.startsWith(previous)) {
|
408
|
+
const delta = text.slice(previous.length);
|
409
|
+
previous = text;
|
410
|
+
return delta;
|
411
|
+
}
|
412
|
+
return text;
|
404
413
|
};
|
405
414
|
}
|
415
|
+
async function* streamable2(stream) {
|
416
|
+
for await (const chunk of stream) {
|
417
|
+
const text = chunk.completion;
|
418
|
+
if (text)
|
419
|
+
yield text;
|
420
|
+
}
|
421
|
+
}
|
406
422
|
function AnthropicStream(res, cb) {
|
407
|
-
|
423
|
+
if (Symbol.asyncIterator in res) {
|
424
|
+
return readableFromAsyncIterable(streamable2(res)).pipeThrough(
|
425
|
+
createCallbacksTransformer(cb)
|
426
|
+
);
|
427
|
+
} else {
|
428
|
+
return AIStream(res, parseAnthropicStream(), cb);
|
429
|
+
}
|
408
430
|
}
|
409
431
|
|
410
432
|
// streams/langchain-stream.ts
|
package/dist/index.mjs
CHANGED
@@ -359,14 +359,36 @@ function parseAnthropicStream() {
|
|
359
359
|
let previous = "";
|
360
360
|
return (data) => {
|
361
361
|
const json = JSON.parse(data);
|
362
|
+
if ("error" in json) {
|
363
|
+
throw new Error(`${json.error.type}: ${json.error.message}`);
|
364
|
+
}
|
365
|
+
if (!("completion" in json)) {
|
366
|
+
return;
|
367
|
+
}
|
362
368
|
const text = json.completion;
|
363
|
-
|
364
|
-
|
365
|
-
|
369
|
+
if (!previous || text.length > previous.length && text.startsWith(previous)) {
|
370
|
+
const delta = text.slice(previous.length);
|
371
|
+
previous = text;
|
372
|
+
return delta;
|
373
|
+
}
|
374
|
+
return text;
|
366
375
|
};
|
367
376
|
}
|
377
|
+
async function* streamable2(stream) {
|
378
|
+
for await (const chunk of stream) {
|
379
|
+
const text = chunk.completion;
|
380
|
+
if (text)
|
381
|
+
yield text;
|
382
|
+
}
|
383
|
+
}
|
368
384
|
function AnthropicStream(res, cb) {
|
369
|
-
|
385
|
+
if (Symbol.asyncIterator in res) {
|
386
|
+
return readableFromAsyncIterable(streamable2(res)).pipeThrough(
|
387
|
+
createCallbacksTransformer(cb)
|
388
|
+
);
|
389
|
+
} else {
|
390
|
+
return AIStream(res, parseAnthropicStream(), cb);
|
391
|
+
}
|
370
392
|
}
|
371
393
|
|
372
394
|
// streams/langchain-stream.ts
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "ai",
|
3
|
-
"version": "2.1.
|
3
|
+
"version": "2.1.34",
|
4
4
|
"license": "Apache-2.0",
|
5
5
|
"sideEffects": false,
|
6
6
|
"main": "./dist/index.js",
|
@@ -76,7 +76,7 @@
|
|
76
76
|
"eslint": "^7.32.0",
|
77
77
|
"jest": "29.2.1",
|
78
78
|
"openai-edge": "^1.1.0",
|
79
|
-
"replicate": "^0.
|
79
|
+
"replicate": "^0.16.0",
|
80
80
|
"ts-jest": "29.0.3",
|
81
81
|
"tsup": "^6.7.0",
|
82
82
|
"typescript": "5.1.3",
|
@@ -109,13 +109,13 @@
|
|
109
109
|
"publishConfig": {
|
110
110
|
"access": "public"
|
111
111
|
},
|
112
|
-
"homepage": "https://
|
112
|
+
"homepage": "https://sdk.vercel.ai/docs",
|
113
113
|
"repository": {
|
114
114
|
"type": "git",
|
115
|
-
"url": "git+https://github.com/vercel
|
115
|
+
"url": "git+https://github.com/vercel/ai.git"
|
116
116
|
},
|
117
117
|
"bugs": {
|
118
|
-
"url": "https://github.com/vercel
|
118
|
+
"url": "https://github.com/vercel/ai/issues"
|
119
119
|
},
|
120
120
|
"keywords": [
|
121
121
|
"ai",
|
package/vue/dist/index.js
CHANGED
@@ -71,6 +71,7 @@ function useChat({
|
|
71
71
|
headers,
|
72
72
|
body
|
73
73
|
} = {}) {
|
74
|
+
var _a;
|
74
75
|
const chatId = id || `chat-${uniqueId++}`;
|
75
76
|
const key = `${api}|${chatId}`;
|
76
77
|
const { data, mutate: originalMutate } = useSWRV(
|
@@ -78,8 +79,10 @@ function useChat({
|
|
78
79
|
() => store[key] || initialMessages
|
79
80
|
);
|
80
81
|
const { data: isLoading, mutate: mutateLoading } = useSWRV(
|
81
|
-
`${chatId}-loading
|
82
|
+
`${chatId}-loading`,
|
83
|
+
null
|
82
84
|
);
|
85
|
+
(_a = isLoading.value) != null ? _a : isLoading.value = false;
|
83
86
|
data.value || (data.value = initialMessages);
|
84
87
|
const mutate = (data2) => {
|
85
88
|
store[key] = data2;
|
package/vue/dist/index.mjs
CHANGED
@@ -34,6 +34,7 @@ function useChat({
|
|
34
34
|
headers,
|
35
35
|
body
|
36
36
|
} = {}) {
|
37
|
+
var _a;
|
37
38
|
const chatId = id || `chat-${uniqueId++}`;
|
38
39
|
const key = `${api}|${chatId}`;
|
39
40
|
const { data, mutate: originalMutate } = useSWRV(
|
@@ -41,8 +42,10 @@ function useChat({
|
|
41
42
|
() => store[key] || initialMessages
|
42
43
|
);
|
43
44
|
const { data: isLoading, mutate: mutateLoading } = useSWRV(
|
44
|
-
`${chatId}-loading
|
45
|
+
`${chatId}-loading`,
|
46
|
+
null
|
45
47
|
);
|
48
|
+
(_a = isLoading.value) != null ? _a : isLoading.value = false;
|
46
49
|
data.value || (data.value = initialMessages);
|
47
50
|
const mutate = (data2) => {
|
48
51
|
store[key] = data2;
|