ai 2.1.10 → 2.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/index.d.ts +9 -2
- package/dist/index.js +41 -6
- package/dist/index.mjs +41 -6
- package/package.json +3 -3
package/README.md
CHANGED
@@ -5,8 +5,8 @@ The Vercel AI SDK is **a library for building edge-ready AI-powered streaming te
|
|
5
5
|
## Features
|
6
6
|
|
7
7
|
- [SWR](https://swr.vercel.app)-powered React, Svelte and Vue helpers for streaming text responses and building chat and completion UIs
|
8
|
-
- First-class support for [LangChain](js.langchain.com/docs) and [OpenAI](https://openai.com), [Anthropic](https://www.anthropic.com), and [
|
9
|
-
- [Edge Runtime](https://edge-runtime.vercel.app/)
|
8
|
+
- First-class support for [LangChain](js.langchain.com/docs) and [OpenAI](https://openai.com), [Anthropic](https://www.anthropic.com), and [Hugging Face](https://huggingface.co)
|
9
|
+
- Node.js, Serverless, and [Edge Runtime](https://edge-runtime.vercel.app/) support
|
10
10
|
- Callbacks for saving completed streaming responses to a database (in the same request)
|
11
11
|
|
12
12
|
## Installation
|
package/dist/index.d.ts
CHANGED
@@ -99,8 +99,15 @@ declare function LangChainStream(callbacks?: AIStreamCallbacks): {
|
|
99
99
|
stream: ReadableStream<Uint8Array>;
|
100
100
|
handlers: {
|
101
101
|
handleLLMNewToken: (token: string) => Promise<void>;
|
102
|
-
|
103
|
-
|
102
|
+
handleLLMStart: (_llm: any, _prompts: string[], runId: string) => Promise<void>;
|
103
|
+
handleLLMEnd: (_output: any, runId: string) => Promise<void>;
|
104
|
+
handleLLMError: (e: Error, runId: string) => Promise<void>;
|
105
|
+
handleChainStart: (_chain: any, _inputs: any, runId: string) => Promise<void>;
|
106
|
+
handleChainEnd: (_outputs: any, runId: string) => Promise<void>;
|
107
|
+
handleChainError: (e: Error, runId: string) => Promise<void>;
|
108
|
+
handleToolStart: (_tool: any, _input: string, runId: string) => Promise<void>;
|
109
|
+
handleToolEnd: (_output: string, runId: string) => Promise<void>;
|
110
|
+
handleToolError: (e: Error, runId: string) => Promise<void>;
|
104
111
|
};
|
105
112
|
};
|
106
113
|
|
package/dist/index.js
CHANGED
@@ -286,6 +286,22 @@ function AnthropicStream(res, cb) {
|
|
286
286
|
function LangChainStream(callbacks) {
|
287
287
|
const stream = new TransformStream();
|
288
288
|
const writer = stream.writable.getWriter();
|
289
|
+
const runs = /* @__PURE__ */ new Set();
|
290
|
+
const handleError = (e, runId) => __async(this, null, function* () {
|
291
|
+
runs.delete(runId);
|
292
|
+
yield writer.ready;
|
293
|
+
yield writer.abort(e);
|
294
|
+
});
|
295
|
+
const handleStart = (runId) => __async(this, null, function* () {
|
296
|
+
runs.add(runId);
|
297
|
+
});
|
298
|
+
const handleEnd = (runId) => __async(this, null, function* () {
|
299
|
+
runs.delete(runId);
|
300
|
+
if (runs.size === 0) {
|
301
|
+
yield writer.ready;
|
302
|
+
yield writer.close();
|
303
|
+
}
|
304
|
+
});
|
289
305
|
return {
|
290
306
|
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)),
|
291
307
|
handlers: {
|
@@ -293,13 +309,32 @@ function LangChainStream(callbacks) {
|
|
293
309
|
yield writer.ready;
|
294
310
|
yield writer.write(token);
|
295
311
|
}),
|
296
|
-
|
297
|
-
|
298
|
-
yield writer.close();
|
312
|
+
handleLLMStart: (_llm, _prompts, runId) => __async(this, null, function* () {
|
313
|
+
handleStart(runId);
|
299
314
|
}),
|
300
|
-
|
301
|
-
yield
|
302
|
-
|
315
|
+
handleLLMEnd: (_output, runId) => __async(this, null, function* () {
|
316
|
+
yield handleEnd(runId);
|
317
|
+
}),
|
318
|
+
handleLLMError: (e, runId) => __async(this, null, function* () {
|
319
|
+
yield handleError(e, runId);
|
320
|
+
}),
|
321
|
+
handleChainStart: (_chain, _inputs, runId) => __async(this, null, function* () {
|
322
|
+
handleStart(runId);
|
323
|
+
}),
|
324
|
+
handleChainEnd: (_outputs, runId) => __async(this, null, function* () {
|
325
|
+
yield handleEnd(runId);
|
326
|
+
}),
|
327
|
+
handleChainError: (e, runId) => __async(this, null, function* () {
|
328
|
+
yield handleError(e, runId);
|
329
|
+
}),
|
330
|
+
handleToolStart: (_tool, _input, runId) => __async(this, null, function* () {
|
331
|
+
handleStart(runId);
|
332
|
+
}),
|
333
|
+
handleToolEnd: (_output, runId) => __async(this, null, function* () {
|
334
|
+
yield handleEnd(runId);
|
335
|
+
}),
|
336
|
+
handleToolError: (e, runId) => __async(this, null, function* () {
|
337
|
+
yield handleError(e, runId);
|
303
338
|
})
|
304
339
|
}
|
305
340
|
};
|
package/dist/index.mjs
CHANGED
@@ -253,6 +253,22 @@ function AnthropicStream(res, cb) {
|
|
253
253
|
function LangChainStream(callbacks) {
|
254
254
|
const stream = new TransformStream();
|
255
255
|
const writer = stream.writable.getWriter();
|
256
|
+
const runs = /* @__PURE__ */ new Set();
|
257
|
+
const handleError = (e, runId) => __async(this, null, function* () {
|
258
|
+
runs.delete(runId);
|
259
|
+
yield writer.ready;
|
260
|
+
yield writer.abort(e);
|
261
|
+
});
|
262
|
+
const handleStart = (runId) => __async(this, null, function* () {
|
263
|
+
runs.add(runId);
|
264
|
+
});
|
265
|
+
const handleEnd = (runId) => __async(this, null, function* () {
|
266
|
+
runs.delete(runId);
|
267
|
+
if (runs.size === 0) {
|
268
|
+
yield writer.ready;
|
269
|
+
yield writer.close();
|
270
|
+
}
|
271
|
+
});
|
256
272
|
return {
|
257
273
|
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)),
|
258
274
|
handlers: {
|
@@ -260,13 +276,32 @@ function LangChainStream(callbacks) {
|
|
260
276
|
yield writer.ready;
|
261
277
|
yield writer.write(token);
|
262
278
|
}),
|
263
|
-
|
264
|
-
|
265
|
-
yield writer.close();
|
279
|
+
handleLLMStart: (_llm, _prompts, runId) => __async(this, null, function* () {
|
280
|
+
handleStart(runId);
|
266
281
|
}),
|
267
|
-
|
268
|
-
yield
|
269
|
-
|
282
|
+
handleLLMEnd: (_output, runId) => __async(this, null, function* () {
|
283
|
+
yield handleEnd(runId);
|
284
|
+
}),
|
285
|
+
handleLLMError: (e, runId) => __async(this, null, function* () {
|
286
|
+
yield handleError(e, runId);
|
287
|
+
}),
|
288
|
+
handleChainStart: (_chain, _inputs, runId) => __async(this, null, function* () {
|
289
|
+
handleStart(runId);
|
290
|
+
}),
|
291
|
+
handleChainEnd: (_outputs, runId) => __async(this, null, function* () {
|
292
|
+
yield handleEnd(runId);
|
293
|
+
}),
|
294
|
+
handleChainError: (e, runId) => __async(this, null, function* () {
|
295
|
+
yield handleError(e, runId);
|
296
|
+
}),
|
297
|
+
handleToolStart: (_tool, _input, runId) => __async(this, null, function* () {
|
298
|
+
handleStart(runId);
|
299
|
+
}),
|
300
|
+
handleToolEnd: (_output, runId) => __async(this, null, function* () {
|
301
|
+
yield handleEnd(runId);
|
302
|
+
}),
|
303
|
+
handleToolError: (e, runId) => __async(this, null, function* () {
|
304
|
+
yield handleError(e, runId);
|
270
305
|
})
|
271
306
|
}
|
272
307
|
};
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "ai",
|
3
|
-
"version": "2.1.
|
3
|
+
"version": "2.1.11",
|
4
4
|
"license": "Apache-2.0",
|
5
5
|
"sideEffects": false,
|
6
6
|
"main": "./dist/index.js",
|
@@ -62,8 +62,8 @@
|
|
62
62
|
"ts-jest": "29.0.3",
|
63
63
|
"tsup": "^6.7.0",
|
64
64
|
"typescript": "5.1.3",
|
65
|
-
"
|
66
|
-
"
|
65
|
+
"@vercel/ai-tsconfig": "0.0.0",
|
66
|
+
"eslint-config-vercel-ai": "0.0.0"
|
67
67
|
},
|
68
68
|
"peerDependencies": {
|
69
69
|
"react": "^18.2.0",
|