ai 2.0.1 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai-stream.mjs +4 -2
- package/dist/anthropic-stream.mjs +6 -4
- package/dist/chunk-2JQWCLY2.mjs +70 -0
- package/dist/chunk-7KLTYB74.mjs +70 -0
- package/dist/{chunk-TJMME6CL.mjs → chunk-BJMBMGA3.mjs} +12 -2
- package/dist/{chunk-PEYAHBDF.mjs → chunk-KKQRUR3E.mjs} +12 -4
- package/dist/{chunk-NK2CVBLI.mjs → chunk-RBP6ONSV.mjs} +19 -12
- package/dist/{chunk-JGDC3BXD.mjs → chunk-TWW2ODJW.mjs} +13 -3
- package/dist/{chunk-GT4HKF2X.mjs → chunk-U2OQ6HW6.mjs} +13 -5
- package/dist/{chunk-265FSSO4.mjs → chunk-UJV6VDVU.mjs} +9 -3
- package/dist/huggingface-stream.mjs +6 -4
- package/dist/index.mjs +11 -7
- package/dist/index.test.js +562 -6
- package/dist/index.test.mjs +283 -7
- package/dist/langchain-stream.mjs +6 -4
- package/dist/openai-stream.mjs +6 -4
- package/dist/streaming-text-response.mjs +4 -2
- package/package.json +18 -6
- package/vue/dist/chunk-FT26CHLO.mjs +137 -0
- package/vue/dist/chunk-OYI6GFBM.mjs +178 -0
- package/{dist/chunk-2L3ZO4UM.mjs → vue/dist/chunk-WXH4YPZV.mjs} +14 -5
- package/vue/dist/index.d.ts +4 -0
- package/vue/dist/index.js +384 -0
- package/vue/dist/index.mjs +11 -0
- package/vue/dist/types-f862f74a.d.ts +123 -0
- package/vue/dist/use-chat.d.ts +39 -0
- package/vue/dist/use-chat.js +252 -0
- package/vue/dist/use-chat.mjs +7 -0
- package/vue/dist/use-completion.d.ts +38 -0
- package/vue/dist/use-completion.js +212 -0
- package/vue/dist/use-completion.mjs +7 -0
package/dist/index.test.mjs
CHANGED
@@ -1,10 +1,286 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
1
|
+
import {
|
2
|
+
init_streams,
|
3
|
+
streams_exports
|
4
|
+
} from "./chunk-7KLTYB74.mjs";
|
5
|
+
import "./chunk-TWW2ODJW.mjs";
|
6
|
+
import "./chunk-KKQRUR3E.mjs";
|
7
|
+
import "./chunk-U2OQ6HW6.mjs";
|
8
|
+
import "./chunk-BJMBMGA3.mjs";
|
9
|
+
import "./chunk-UJV6VDVU.mjs";
|
10
|
+
import "./chunk-RBP6ONSV.mjs";
|
11
|
+
import {
|
12
|
+
__async,
|
13
|
+
__commonJS,
|
14
|
+
__esm,
|
15
|
+
__toCommonJS
|
16
|
+
} from "./chunk-2JQWCLY2.mjs";
|
17
|
+
|
18
|
+
// tests/snapshots/openai-chat.ts
|
19
|
+
var openai_chat_default;
|
20
|
+
var init_openai_chat = __esm({
|
21
|
+
"tests/snapshots/openai-chat.ts"() {
|
22
|
+
"use strict";
|
23
|
+
openai_chat_default = [
|
24
|
+
{
|
25
|
+
id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
|
26
|
+
object: "chat.completion.chunk",
|
27
|
+
created: 1686901302,
|
28
|
+
model: "gpt-3.5-turbo-0301",
|
29
|
+
choices: [
|
30
|
+
{
|
31
|
+
delta: { role: "assistant" },
|
32
|
+
index: 0,
|
33
|
+
finish_reason: null
|
34
|
+
}
|
35
|
+
]
|
36
|
+
},
|
37
|
+
{
|
38
|
+
id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
|
39
|
+
object: "chat.completion.chunk",
|
40
|
+
created: 1686901302,
|
41
|
+
model: "gpt-3.5-turbo-0301",
|
42
|
+
choices: [
|
43
|
+
{
|
44
|
+
delta: { content: "Hello" },
|
45
|
+
index: 0,
|
46
|
+
finish_reason: null
|
47
|
+
}
|
48
|
+
]
|
49
|
+
},
|
50
|
+
{
|
51
|
+
id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
|
52
|
+
object: "chat.completion.chunk",
|
53
|
+
created: 1686901302,
|
54
|
+
model: "gpt-3.5-turbo-0301",
|
55
|
+
choices: [{ delta: { content: "," }, index: 0, finish_reason: null }]
|
56
|
+
},
|
57
|
+
{
|
58
|
+
id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
|
59
|
+
object: "chat.completion.chunk",
|
60
|
+
created: 1686901302,
|
61
|
+
model: "gpt-3.5-turbo-0301",
|
62
|
+
choices: [
|
63
|
+
{
|
64
|
+
delta: { content: " world" },
|
65
|
+
index: 0,
|
66
|
+
finish_reason: null
|
67
|
+
}
|
68
|
+
]
|
69
|
+
},
|
70
|
+
{
|
71
|
+
id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
|
72
|
+
object: "chat.completion.chunk",
|
73
|
+
created: 1686901302,
|
74
|
+
model: "gpt-3.5-turbo-0301",
|
75
|
+
choices: [{ delta: { content: "." }, index: 0, finish_reason: null }]
|
76
|
+
},
|
77
|
+
{
|
78
|
+
id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
|
79
|
+
object: "chat.completion.chunk",
|
80
|
+
created: 1686901302,
|
81
|
+
model: "gpt-3.5-turbo-0301",
|
82
|
+
choices: [{ delta: {}, index: 0, finish_reason: "stop" }]
|
83
|
+
}
|
84
|
+
];
|
85
|
+
}
|
86
|
+
});
|
87
|
+
|
88
|
+
// tests/utils/mock-service.ts
|
89
|
+
import { createServer } from "http";
|
90
|
+
function flushDataToResponse(res, chunks, suffix) {
|
91
|
+
return __async(this, null, function* () {
|
92
|
+
let resolve = () => {
|
93
|
+
};
|
94
|
+
let waitForDrain = new Promise((res2) => resolve = res2);
|
95
|
+
res.addListener("drain", () => {
|
96
|
+
resolve();
|
97
|
+
waitForDrain = new Promise((res2) => resolve = res2);
|
8
98
|
});
|
99
|
+
try {
|
100
|
+
for (const item of chunks) {
|
101
|
+
const data = `data: ${JSON.stringify(item.value)}
|
102
|
+
|
103
|
+
`;
|
104
|
+
const ok = res.write(data);
|
105
|
+
if (!ok) {
|
106
|
+
yield waitForDrain;
|
107
|
+
}
|
108
|
+
yield new Promise((r) => setTimeout(r, 100));
|
109
|
+
}
|
110
|
+
if (suffix) {
|
111
|
+
const data = `data: ${suffix}
|
112
|
+
|
113
|
+
`;
|
114
|
+
res.write(data);
|
115
|
+
}
|
116
|
+
} catch (e) {
|
117
|
+
}
|
118
|
+
res.end();
|
9
119
|
});
|
120
|
+
}
|
121
|
+
var setup;
|
122
|
+
var init_mock_service = __esm({
|
123
|
+
"tests/utils/mock-service.ts"() {
|
124
|
+
"use strict";
|
125
|
+
init_openai_chat();
|
126
|
+
setup = () => {
|
127
|
+
let recentFlushed = [];
|
128
|
+
const server = createServer((req, res) => {
|
129
|
+
const service = req.headers["x-mock-service"] || "openai";
|
130
|
+
const type = req.headers["x-mock-type"] || "chat";
|
131
|
+
switch (type) {
|
132
|
+
case "chat":
|
133
|
+
switch (service) {
|
134
|
+
case "openai":
|
135
|
+
res.writeHead(200, {
|
136
|
+
"Content-Type": "text/event-stream",
|
137
|
+
"Cache-Control": "no-cache",
|
138
|
+
Connection: "keep-alive"
|
139
|
+
});
|
140
|
+
res.flushHeaders();
|
141
|
+
recentFlushed = [];
|
142
|
+
flushDataToResponse(
|
143
|
+
res,
|
144
|
+
openai_chat_default.map(
|
145
|
+
(value) => new Proxy(
|
146
|
+
{ value },
|
147
|
+
{
|
148
|
+
get(target) {
|
149
|
+
recentFlushed.push(target.value);
|
150
|
+
return target.value;
|
151
|
+
}
|
152
|
+
}
|
153
|
+
)
|
154
|
+
),
|
155
|
+
"[DONE]"
|
156
|
+
);
|
157
|
+
break;
|
158
|
+
default:
|
159
|
+
throw new Error(`Unknown service: ${service}`);
|
160
|
+
}
|
161
|
+
break;
|
162
|
+
default:
|
163
|
+
throw new Error(`Unknown type: ${type}`);
|
164
|
+
}
|
165
|
+
});
|
166
|
+
server.listen(3030);
|
167
|
+
return {
|
168
|
+
port: 3030,
|
169
|
+
api: "http://localhost:3030",
|
170
|
+
teardown: () => {
|
171
|
+
server.close();
|
172
|
+
},
|
173
|
+
getRecentFlushed: () => recentFlushed
|
174
|
+
};
|
175
|
+
};
|
176
|
+
}
|
177
|
+
});
|
178
|
+
|
179
|
+
// tests/utils/mock-client.ts
|
180
|
+
var createClient;
|
181
|
+
var init_mock_client = __esm({
|
182
|
+
"tests/utils/mock-client.ts"() {
|
183
|
+
"use strict";
|
184
|
+
createClient = (response) => {
|
185
|
+
return {
|
186
|
+
readAll() {
|
187
|
+
return __async(this, null, function* () {
|
188
|
+
if (!response.body) {
|
189
|
+
throw new Error("Response body is not readable");
|
190
|
+
}
|
191
|
+
let chunks = [];
|
192
|
+
const reader = response.body.getReader();
|
193
|
+
while (true) {
|
194
|
+
const { done, value } = yield reader.read();
|
195
|
+
if (done) {
|
196
|
+
break;
|
197
|
+
}
|
198
|
+
chunks.push(new TextDecoder().decode(value));
|
199
|
+
}
|
200
|
+
return chunks;
|
201
|
+
});
|
202
|
+
},
|
203
|
+
readAndAbort(controller) {
|
204
|
+
return __async(this, null, function* () {
|
205
|
+
if (!response.body) {
|
206
|
+
throw new Error("Response body is not readable");
|
207
|
+
}
|
208
|
+
let chunks = [];
|
209
|
+
const reader = response.body.getReader();
|
210
|
+
const { done, value } = yield reader.read();
|
211
|
+
if (!done) {
|
212
|
+
chunks.push(new TextDecoder().decode(value));
|
213
|
+
}
|
214
|
+
controller.abort();
|
215
|
+
reader.cancel();
|
216
|
+
return chunks;
|
217
|
+
});
|
218
|
+
}
|
219
|
+
};
|
220
|
+
};
|
221
|
+
}
|
222
|
+
});
|
223
|
+
|
224
|
+
// streams/index.test.ts
|
225
|
+
var require_index_test = __commonJS({
|
226
|
+
"streams/index.test.ts"(exports) {
|
227
|
+
init_mock_service();
|
228
|
+
init_mock_client();
|
229
|
+
describe("AIStream", () => {
|
230
|
+
let server;
|
231
|
+
beforeAll(() => {
|
232
|
+
server = setup();
|
233
|
+
});
|
234
|
+
afterAll(() => {
|
235
|
+
server.teardown();
|
236
|
+
});
|
237
|
+
describe("OpenAIStream", () => {
|
238
|
+
if (typeof Response === "undefined") {
|
239
|
+
it("should skip this test on Node 16 because it doesn't support `Response`", () => {
|
240
|
+
});
|
241
|
+
} else {
|
242
|
+
const { OpenAIStream, StreamingTextResponse } = (init_streams(), __toCommonJS(streams_exports));
|
243
|
+
it("should be able to parse SSE and receive the streamed response", () => __async(exports, null, function* () {
|
244
|
+
const stream = OpenAIStream(
|
245
|
+
yield fetch(server.api, {
|
246
|
+
headers: {
|
247
|
+
"x-mock-service": "openai",
|
248
|
+
"x-mock-type": "chat"
|
249
|
+
}
|
250
|
+
})
|
251
|
+
);
|
252
|
+
const response = new StreamingTextResponse(stream);
|
253
|
+
const client = createClient(response);
|
254
|
+
const chunks = yield client.readAll();
|
255
|
+
expect(JSON.stringify(chunks)).toMatchInlineSnapshot(
|
256
|
+
`"["Hello",","," world","."]"`
|
257
|
+
);
|
258
|
+
expect(JSON.stringify(server.getRecentFlushed())).toMatchInlineSnapshot(
|
259
|
+
`"[{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"Hello"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":","},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":" world"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"."},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}]"`
|
260
|
+
);
|
261
|
+
}));
|
262
|
+
it("should handle backpressure on the server", () => __async(exports, null, function* () {
|
263
|
+
const controller = new AbortController();
|
264
|
+
const stream = OpenAIStream(
|
265
|
+
yield fetch(server.api, {
|
266
|
+
headers: {
|
267
|
+
"x-mock-service": "openai",
|
268
|
+
"x-mock-type": "chat"
|
269
|
+
},
|
270
|
+
signal: controller.signal
|
271
|
+
})
|
272
|
+
);
|
273
|
+
const response = new StreamingTextResponse(stream);
|
274
|
+
const client = createClient(response);
|
275
|
+
const chunks = yield client.readAndAbort(controller);
|
276
|
+
expect(JSON.stringify(chunks)).toMatchInlineSnapshot(`"["Hello"]"`);
|
277
|
+
expect(JSON.stringify(server.getRecentFlushed())).toMatchInlineSnapshot(
|
278
|
+
`"[{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"Hello"},"index":0,"finish_reason":null}]}]"`
|
279
|
+
);
|
280
|
+
}));
|
281
|
+
}
|
282
|
+
});
|
283
|
+
});
|
284
|
+
}
|
10
285
|
});
|
286
|
+
export default require_index_test();
|
@@ -1,8 +1,10 @@
|
|
1
1
|
import {
|
2
|
-
LangChainStream
|
3
|
-
|
4
|
-
|
5
|
-
import "./chunk-
|
2
|
+
LangChainStream,
|
3
|
+
init_langchain_stream
|
4
|
+
} from "./chunk-U2OQ6HW6.mjs";
|
5
|
+
import "./chunk-UJV6VDVU.mjs";
|
6
|
+
import "./chunk-2JQWCLY2.mjs";
|
7
|
+
init_langchain_stream();
|
6
8
|
export {
|
7
9
|
LangChainStream
|
8
10
|
};
|
package/dist/openai-stream.mjs
CHANGED
@@ -1,8 +1,10 @@
|
|
1
1
|
import {
|
2
|
-
OpenAIStream
|
3
|
-
|
4
|
-
|
5
|
-
import "./chunk-
|
2
|
+
OpenAIStream,
|
3
|
+
init_openai_stream
|
4
|
+
} from "./chunk-BJMBMGA3.mjs";
|
5
|
+
import "./chunk-UJV6VDVU.mjs";
|
6
|
+
import "./chunk-2JQWCLY2.mjs";
|
7
|
+
init_openai_stream();
|
6
8
|
export {
|
7
9
|
OpenAIStream
|
8
10
|
};
|
@@ -1,8 +1,10 @@
|
|
1
1
|
import {
|
2
2
|
StreamingTextResponse,
|
3
|
+
init_streaming_text_response,
|
3
4
|
streamToResponse
|
4
|
-
} from "./chunk-
|
5
|
-
import "./chunk-
|
5
|
+
} from "./chunk-RBP6ONSV.mjs";
|
6
|
+
import "./chunk-2JQWCLY2.mjs";
|
7
|
+
init_streaming_text_response();
|
6
8
|
export {
|
7
9
|
StreamingTextResponse,
|
8
10
|
streamToResponse
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "ai",
|
3
|
-
"version": "2.0
|
3
|
+
"version": "2.1.0",
|
4
4
|
"license": "Apache-2.0",
|
5
5
|
"sideEffects": false,
|
6
6
|
"main": "./dist/index.js",
|
@@ -9,7 +9,8 @@
|
|
9
9
|
"files": [
|
10
10
|
"dist/**/*",
|
11
11
|
"react/dist/**/*",
|
12
|
-
"svelte/dist/**/*"
|
12
|
+
"svelte/dist/**/*",
|
13
|
+
"vue/dist/**/*"
|
13
14
|
],
|
14
15
|
"exports": {
|
15
16
|
"./package.json": "./package.json",
|
@@ -30,6 +31,12 @@
|
|
30
31
|
"import": "./svelte/dist/index.mjs",
|
31
32
|
"module": "./svelte/dist/index.mjs",
|
32
33
|
"require": "./svelte/dist/index.js"
|
34
|
+
},
|
35
|
+
"./vue": {
|
36
|
+
"types": "./vue/dist/index.d.ts",
|
37
|
+
"import": "./vue/dist/index.mjs",
|
38
|
+
"module": "./vue/dist/index.mjs",
|
39
|
+
"require": "./vue/dist/index.js"
|
33
40
|
}
|
34
41
|
},
|
35
42
|
"jest": {
|
@@ -40,7 +47,8 @@
|
|
40
47
|
"eventsource-parser": "1.0.0",
|
41
48
|
"nanoid": "^3.3.6",
|
42
49
|
"sswr": "^1.10.0",
|
43
|
-
"swr": "2.1.5"
|
50
|
+
"swr": "2.1.5",
|
51
|
+
"swrv": "1.0.3"
|
44
52
|
},
|
45
53
|
"devDependencies": {
|
46
54
|
"@edge-runtime/jest-environment": "1.1.0-beta.31",
|
@@ -58,7 +66,8 @@
|
|
58
66
|
},
|
59
67
|
"peerDependencies": {
|
60
68
|
"react": "^18.0.0",
|
61
|
-
"svelte": "^3.29.0"
|
69
|
+
"svelte": "^3.29.0",
|
70
|
+
"vue": "^3.3.4"
|
62
71
|
},
|
63
72
|
"peerDependenciesMeta": {
|
64
73
|
"react": {
|
@@ -66,6 +75,9 @@
|
|
66
75
|
},
|
67
76
|
"svelte": {
|
68
77
|
"optional": true
|
78
|
+
},
|
79
|
+
"vue": {
|
80
|
+
"optional": true
|
69
81
|
}
|
70
82
|
},
|
71
83
|
"engines": {
|
@@ -76,11 +88,11 @@
|
|
76
88
|
},
|
77
89
|
"scripts": {
|
78
90
|
"build": "tsup",
|
79
|
-
"clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist && rm -rf react/dist && rm -rf svelte/dist",
|
91
|
+
"clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist && rm -rf react/dist && rm -rf svelte/dist && rm -rf vue/dist",
|
80
92
|
"dev": "tsup --watch",
|
81
93
|
"lint": "eslint \"./**/*.ts*\"",
|
82
94
|
"type-check": "tsc --noEmit",
|
83
95
|
"prettier-check": "prettier --check \"./**/*.ts*\"",
|
84
|
-
"test": "jest --env @edge-runtime/jest-environment .test.ts && jest --env node .test.ts"
|
96
|
+
"test": "jest --forceExit --env @edge-runtime/jest-environment .test.ts && jest --forceExit --env node .test.ts"
|
85
97
|
}
|
86
98
|
}
|
@@ -0,0 +1,137 @@
|
|
1
|
+
import {
|
2
|
+
__async,
|
3
|
+
__spreadValues,
|
4
|
+
decodeAIStreamChunk
|
5
|
+
} from "./chunk-WXH4YPZV.mjs";
|
6
|
+
|
7
|
+
// vue/use-completion.ts
|
8
|
+
import swrv from "swrv";
|
9
|
+
import { ref } from "vue";
|
10
|
+
var uniqueId = 0;
|
11
|
+
var useSWRV = swrv.default || swrv;
|
12
|
+
var store = {};
|
13
|
+
function useCompletion({
|
14
|
+
api = "/api/completion",
|
15
|
+
id,
|
16
|
+
initialCompletion = "",
|
17
|
+
initialInput = "",
|
18
|
+
headers,
|
19
|
+
body,
|
20
|
+
onResponse,
|
21
|
+
onFinish,
|
22
|
+
onError
|
23
|
+
} = {}) {
|
24
|
+
const completionId = id || `completion-${uniqueId++}`;
|
25
|
+
const key = `${api}|${completionId}`;
|
26
|
+
const { data, mutate: originalMutate } = useSWRV(
|
27
|
+
key,
|
28
|
+
() => store[key] || initialCompletion
|
29
|
+
);
|
30
|
+
data.value || (data.value = initialCompletion);
|
31
|
+
const mutate = (data2) => {
|
32
|
+
store[key] = data2;
|
33
|
+
return originalMutate();
|
34
|
+
};
|
35
|
+
const completion = data;
|
36
|
+
const error = ref(void 0);
|
37
|
+
const isLoading = ref(false);
|
38
|
+
let abortController = null;
|
39
|
+
function triggerRequest(prompt) {
|
40
|
+
return __async(this, null, function* () {
|
41
|
+
try {
|
42
|
+
isLoading.value = true;
|
43
|
+
abortController = new AbortController();
|
44
|
+
mutate("");
|
45
|
+
const res = yield fetch(api, {
|
46
|
+
method: "POST",
|
47
|
+
body: JSON.stringify(__spreadValues({
|
48
|
+
prompt
|
49
|
+
}, body)),
|
50
|
+
headers: headers || {},
|
51
|
+
signal: abortController.signal
|
52
|
+
}).catch((err) => {
|
53
|
+
throw err;
|
54
|
+
});
|
55
|
+
if (onResponse) {
|
56
|
+
try {
|
57
|
+
yield onResponse(res);
|
58
|
+
} catch (err) {
|
59
|
+
throw err;
|
60
|
+
}
|
61
|
+
}
|
62
|
+
if (!res.ok) {
|
63
|
+
throw new Error(
|
64
|
+
(yield res.text()) || "Failed to fetch the chat response."
|
65
|
+
);
|
66
|
+
}
|
67
|
+
if (!res.body) {
|
68
|
+
throw new Error("The response body is empty.");
|
69
|
+
}
|
70
|
+
let result = "";
|
71
|
+
const reader = res.body.getReader();
|
72
|
+
while (true) {
|
73
|
+
const { done, value } = yield reader.read();
|
74
|
+
if (done) {
|
75
|
+
break;
|
76
|
+
}
|
77
|
+
result += decodeAIStreamChunk(value);
|
78
|
+
mutate(result);
|
79
|
+
if (abortController === null) {
|
80
|
+
reader.cancel();
|
81
|
+
break;
|
82
|
+
}
|
83
|
+
}
|
84
|
+
if (onFinish) {
|
85
|
+
onFinish(prompt, result);
|
86
|
+
}
|
87
|
+
abortController = null;
|
88
|
+
return result;
|
89
|
+
} catch (err) {
|
90
|
+
if (err.name === "AbortError") {
|
91
|
+
abortController = null;
|
92
|
+
return null;
|
93
|
+
}
|
94
|
+
if (onError && error instanceof Error) {
|
95
|
+
onError(error);
|
96
|
+
}
|
97
|
+
error.value = err;
|
98
|
+
} finally {
|
99
|
+
isLoading.value = false;
|
100
|
+
}
|
101
|
+
});
|
102
|
+
}
|
103
|
+
const complete = (prompt) => __async(this, null, function* () {
|
104
|
+
return triggerRequest(prompt);
|
105
|
+
});
|
106
|
+
const stop = () => {
|
107
|
+
if (abortController) {
|
108
|
+
abortController.abort();
|
109
|
+
abortController = null;
|
110
|
+
}
|
111
|
+
};
|
112
|
+
const setCompletion = (completion2) => {
|
113
|
+
mutate(completion2);
|
114
|
+
};
|
115
|
+
const input = ref(initialInput);
|
116
|
+
const handleSubmit = (e) => {
|
117
|
+
e.preventDefault();
|
118
|
+
const inputValue = input.value;
|
119
|
+
if (!inputValue)
|
120
|
+
return;
|
121
|
+
return complete(inputValue);
|
122
|
+
};
|
123
|
+
return {
|
124
|
+
completion,
|
125
|
+
complete,
|
126
|
+
error,
|
127
|
+
stop,
|
128
|
+
setCompletion,
|
129
|
+
input,
|
130
|
+
handleSubmit,
|
131
|
+
isLoading
|
132
|
+
};
|
133
|
+
}
|
134
|
+
|
135
|
+
export {
|
136
|
+
useCompletion
|
137
|
+
};
|