ai 2.0.0 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai-stream.mjs +4 -2
- package/dist/anthropic-stream.mjs +6 -4
- package/dist/chunk-2JQWCLY2.mjs +70 -0
- package/dist/chunk-7KLTYB74.mjs +70 -0
- package/dist/{chunk-TJMME6CL.mjs → chunk-BJMBMGA3.mjs} +12 -2
- package/dist/{chunk-PEYAHBDF.mjs → chunk-KKQRUR3E.mjs} +12 -4
- package/dist/{chunk-NK2CVBLI.mjs → chunk-RBP6ONSV.mjs} +19 -12
- package/dist/{chunk-JGDC3BXD.mjs → chunk-TWW2ODJW.mjs} +13 -3
- package/dist/{chunk-EZJ7FC5E.mjs → chunk-U2OQ6HW6.mjs} +14 -6
- package/dist/{chunk-265FSSO4.mjs → chunk-UJV6VDVU.mjs} +9 -3
- package/dist/huggingface-stream.mjs +6 -4
- package/dist/index.js +1 -1
- package/dist/index.mjs +11 -7
- package/dist/index.test.js +562 -6
- package/dist/index.test.mjs +283 -7
- package/dist/langchain-stream.d.ts +1 -1
- package/dist/langchain-stream.js +1 -1
- package/dist/langchain-stream.mjs +6 -4
- package/dist/openai-stream.mjs +6 -4
- package/dist/streaming-text-response.mjs +4 -2
- package/package.json +18 -6
- package/vue/dist/chunk-FT26CHLO.mjs +137 -0
- package/vue/dist/chunk-OYI6GFBM.mjs +178 -0
- package/{dist/chunk-2L3ZO4UM.mjs → vue/dist/chunk-WXH4YPZV.mjs} +14 -5
- package/vue/dist/index.d.ts +4 -0
- package/vue/dist/index.js +384 -0
- package/vue/dist/index.mjs +11 -0
- package/vue/dist/types-f862f74a.d.ts +123 -0
- package/vue/dist/use-chat.d.ts +39 -0
- package/vue/dist/use-chat.js +252 -0
- package/vue/dist/use-chat.mjs +7 -0
- package/vue/dist/use-completion.d.ts +38 -0
- package/vue/dist/use-completion.js +212 -0
- package/vue/dist/use-completion.mjs +7 -0
package/dist/index.test.js
CHANGED
@@ -1,12 +1,568 @@
|
|
1
1
|
"use strict";
|
2
|
+
var __defProp = Object.defineProperty;
|
3
|
+
var __defProps = Object.defineProperties;
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
5
|
+
var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
|
6
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
7
|
+
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
|
8
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
9
|
+
var __propIsEnum = Object.prototype.propertyIsEnumerable;
|
10
|
+
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
11
|
+
var __spreadValues = (a, b) => {
|
12
|
+
for (var prop in b || (b = {}))
|
13
|
+
if (__hasOwnProp.call(b, prop))
|
14
|
+
__defNormalProp(a, prop, b[prop]);
|
15
|
+
if (__getOwnPropSymbols)
|
16
|
+
for (var prop of __getOwnPropSymbols(b)) {
|
17
|
+
if (__propIsEnum.call(b, prop))
|
18
|
+
__defNormalProp(a, prop, b[prop]);
|
19
|
+
}
|
20
|
+
return a;
|
21
|
+
};
|
22
|
+
var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
|
23
|
+
var __esm = (fn, res) => function __init() {
|
24
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
25
|
+
};
|
26
|
+
var __export = (target, all) => {
|
27
|
+
for (var name in all)
|
28
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
29
|
+
};
|
30
|
+
var __copyProps = (to, from, except, desc) => {
|
31
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
32
|
+
for (let key of __getOwnPropNames(from))
|
33
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
34
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
35
|
+
}
|
36
|
+
return to;
|
37
|
+
};
|
38
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
39
|
+
var __async = (__this, __arguments, generator) => {
|
40
|
+
return new Promise((resolve, reject) => {
|
41
|
+
var fulfilled = (value) => {
|
42
|
+
try {
|
43
|
+
step(generator.next(value));
|
44
|
+
} catch (e) {
|
45
|
+
reject(e);
|
46
|
+
}
|
47
|
+
};
|
48
|
+
var rejected = (value) => {
|
49
|
+
try {
|
50
|
+
step(generator.throw(value));
|
51
|
+
} catch (e) {
|
52
|
+
reject(e);
|
53
|
+
}
|
54
|
+
};
|
55
|
+
var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
|
56
|
+
step((generator = generator.apply(__this, __arguments)).next());
|
57
|
+
});
|
58
|
+
};
|
2
59
|
|
3
|
-
// streams/
|
4
|
-
|
5
|
-
|
60
|
+
// streams/ai-stream.ts
|
61
|
+
function createEventStreamTransformer(customParser) {
|
62
|
+
const decoder = new TextDecoder();
|
63
|
+
let parser;
|
64
|
+
return new TransformStream({
|
65
|
+
start(controller) {
|
66
|
+
return __async(this, null, function* () {
|
67
|
+
function onParse(event) {
|
68
|
+
if (event.type === "event") {
|
69
|
+
const data = event.data;
|
70
|
+
if (data === "[DONE]") {
|
71
|
+
controller.terminate();
|
72
|
+
return;
|
73
|
+
}
|
74
|
+
const message = customParser(data);
|
75
|
+
if (message)
|
76
|
+
controller.enqueue(message);
|
77
|
+
}
|
78
|
+
}
|
79
|
+
parser = (0, import_eventsource_parser.createParser)(onParse);
|
80
|
+
});
|
81
|
+
},
|
82
|
+
transform(chunk) {
|
83
|
+
parser.feed(decoder.decode(chunk));
|
84
|
+
}
|
85
|
+
});
|
86
|
+
}
|
87
|
+
function createCallbacksTransformer(callbacks) {
|
88
|
+
const encoder = new TextEncoder();
|
89
|
+
let fullResponse = "";
|
90
|
+
const { onStart, onToken, onCompletion } = callbacks || {};
|
91
|
+
return new TransformStream({
|
92
|
+
start() {
|
93
|
+
return __async(this, null, function* () {
|
94
|
+
if (onStart)
|
95
|
+
yield onStart();
|
96
|
+
});
|
97
|
+
},
|
98
|
+
transform(message, controller) {
|
99
|
+
return __async(this, null, function* () {
|
100
|
+
controller.enqueue(encoder.encode(message));
|
101
|
+
if (onToken)
|
102
|
+
yield onToken(message);
|
103
|
+
if (onCompletion)
|
104
|
+
fullResponse += message;
|
105
|
+
});
|
106
|
+
},
|
107
|
+
flush() {
|
108
|
+
return __async(this, null, function* () {
|
109
|
+
yield onCompletion == null ? void 0 : onCompletion(fullResponse);
|
110
|
+
});
|
111
|
+
}
|
6
112
|
});
|
7
|
-
|
8
|
-
|
9
|
-
|
113
|
+
}
|
114
|
+
function trimStartOfStreamHelper() {
|
115
|
+
let start = true;
|
116
|
+
return (text) => {
|
117
|
+
if (start)
|
118
|
+
text = text.trimStart();
|
119
|
+
if (text)
|
120
|
+
start = false;
|
121
|
+
return text;
|
122
|
+
};
|
123
|
+
}
|
124
|
+
function AIStream(res, customParser, callbacks) {
|
125
|
+
if (!res.ok) {
|
126
|
+
throw new Error(
|
127
|
+
`Failed to convert the response to stream. Received status code: ${res.status}.`
|
128
|
+
);
|
129
|
+
}
|
130
|
+
const stream = res.body || new ReadableStream({
|
131
|
+
start(controller) {
|
132
|
+
controller.close();
|
133
|
+
}
|
134
|
+
});
|
135
|
+
return stream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
136
|
+
}
|
137
|
+
var import_eventsource_parser;
|
138
|
+
var init_ai_stream = __esm({
|
139
|
+
"streams/ai-stream.ts"() {
|
140
|
+
"use strict";
|
141
|
+
import_eventsource_parser = require("eventsource-parser");
|
142
|
+
}
|
143
|
+
});
|
144
|
+
|
145
|
+
// streams/openai-stream.ts
|
146
|
+
function parseOpenAIStream() {
|
147
|
+
const trimStartOfStream = trimStartOfStreamHelper();
|
148
|
+
return (data) => {
|
149
|
+
var _a, _b, _c, _d, _e;
|
150
|
+
const json = JSON.parse(data);
|
151
|
+
const text = trimStartOfStream(
|
152
|
+
(_e = (_d = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.content) != null ? _d : (_c = json.choices[0]) == null ? void 0 : _c.text) != null ? _e : ""
|
153
|
+
);
|
154
|
+
return text;
|
155
|
+
};
|
156
|
+
}
|
157
|
+
function OpenAIStream(res, cb) {
|
158
|
+
return AIStream(res, parseOpenAIStream(), cb);
|
159
|
+
}
|
160
|
+
var init_openai_stream = __esm({
|
161
|
+
"streams/openai-stream.ts"() {
|
162
|
+
"use strict";
|
163
|
+
init_ai_stream();
|
164
|
+
}
|
165
|
+
});
|
166
|
+
|
167
|
+
// streams/streaming-text-response.ts
|
168
|
+
function streamToResponse(res, response, init) {
|
169
|
+
response.writeHead((init == null ? void 0 : init.status) || 200, __spreadValues({
|
170
|
+
"Content-Type": "text/plain; charset=utf-8"
|
171
|
+
}, init == null ? void 0 : init.headers));
|
172
|
+
const reader = res.getReader();
|
173
|
+
function read() {
|
174
|
+
reader.read().then(({ done, value }) => {
|
175
|
+
if (done) {
|
176
|
+
response.end();
|
177
|
+
return;
|
178
|
+
}
|
179
|
+
response.write(value);
|
180
|
+
read();
|
181
|
+
});
|
182
|
+
}
|
183
|
+
read();
|
184
|
+
}
|
185
|
+
var StreamingTextResponse;
|
186
|
+
var init_streaming_text_response = __esm({
|
187
|
+
"streams/streaming-text-response.ts"() {
|
188
|
+
"use strict";
|
189
|
+
StreamingTextResponse = class extends Response {
|
190
|
+
constructor(res, init) {
|
191
|
+
super(res, __spreadProps(__spreadValues({}, init), {
|
192
|
+
status: 200,
|
193
|
+
headers: __spreadValues({
|
194
|
+
"Content-Type": "text/plain; charset=utf-8"
|
195
|
+
}, init == null ? void 0 : init.headers)
|
196
|
+
}));
|
197
|
+
}
|
198
|
+
};
|
199
|
+
}
|
200
|
+
});
|
201
|
+
|
202
|
+
// streams/huggingface-stream.ts
|
203
|
+
function createParser2(res) {
|
204
|
+
const trimStartOfStream = trimStartOfStreamHelper();
|
205
|
+
return new ReadableStream({
|
206
|
+
pull(controller) {
|
207
|
+
return __async(this, null, function* () {
|
208
|
+
var _a2, _b;
|
209
|
+
const { value, done } = yield res.next();
|
210
|
+
if (done) {
|
211
|
+
controller.close();
|
212
|
+
return;
|
213
|
+
}
|
214
|
+
const text = trimStartOfStream((_b = (_a2 = value.token) == null ? void 0 : _a2.text) != null ? _b : "");
|
215
|
+
if (!text)
|
216
|
+
return;
|
217
|
+
if (value.generated_text != null && value.generated_text.length > 0) {
|
218
|
+
controller.close();
|
219
|
+
return;
|
220
|
+
}
|
221
|
+
if (text === "</s>" || text === "<|endoftext|>") {
|
222
|
+
controller.close();
|
223
|
+
} else {
|
224
|
+
controller.enqueue(text);
|
225
|
+
}
|
226
|
+
});
|
227
|
+
}
|
228
|
+
});
|
229
|
+
}
|
230
|
+
function HuggingFaceStream(res, callbacks) {
|
231
|
+
return createParser2(res).pipeThrough(createCallbacksTransformer(callbacks));
|
232
|
+
}
|
233
|
+
var init_huggingface_stream = __esm({
|
234
|
+
"streams/huggingface-stream.ts"() {
|
235
|
+
"use strict";
|
236
|
+
init_ai_stream();
|
237
|
+
}
|
238
|
+
});
|
239
|
+
|
240
|
+
// streams/anthropic-stream.ts
|
241
|
+
function parseAnthropicStream() {
|
242
|
+
let previous = "";
|
243
|
+
return (data) => {
|
244
|
+
const json = JSON.parse(data);
|
245
|
+
const text = json.completion;
|
246
|
+
const delta = text.slice(previous.length);
|
247
|
+
previous = text;
|
248
|
+
return delta;
|
249
|
+
};
|
250
|
+
}
|
251
|
+
function AnthropicStream(res, cb) {
|
252
|
+
return AIStream(res, parseAnthropicStream(), cb);
|
253
|
+
}
|
254
|
+
var init_anthropic_stream = __esm({
|
255
|
+
"streams/anthropic-stream.ts"() {
|
256
|
+
"use strict";
|
257
|
+
init_ai_stream();
|
258
|
+
}
|
259
|
+
});
|
260
|
+
|
261
|
+
// streams/langchain-stream.ts
|
262
|
+
function LangChainStream(callbacks) {
|
263
|
+
const stream = new TransformStream();
|
264
|
+
const writer = stream.writable.getWriter();
|
265
|
+
return {
|
266
|
+
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)),
|
267
|
+
handlers: {
|
268
|
+
handleLLMNewToken: (token) => __async(this, null, function* () {
|
269
|
+
yield writer.ready;
|
270
|
+
yield writer.write(token);
|
271
|
+
}),
|
272
|
+
handleChainEnd: () => __async(this, null, function* () {
|
273
|
+
yield writer.ready;
|
274
|
+
yield writer.close();
|
275
|
+
}),
|
276
|
+
handleLLMError: (e) => __async(this, null, function* () {
|
277
|
+
yield writer.ready;
|
278
|
+
yield writer.abort(e);
|
279
|
+
})
|
280
|
+
}
|
281
|
+
};
|
282
|
+
}
|
283
|
+
var init_langchain_stream = __esm({
|
284
|
+
"streams/langchain-stream.ts"() {
|
285
|
+
"use strict";
|
286
|
+
init_ai_stream();
|
287
|
+
}
|
288
|
+
});
|
289
|
+
|
290
|
+
// shared/types.ts
|
291
|
+
var init_types = __esm({
|
292
|
+
"shared/types.ts"() {
|
293
|
+
"use strict";
|
294
|
+
}
|
295
|
+
});
|
296
|
+
|
297
|
+
// streams/index.ts
|
298
|
+
var streams_exports = {};
|
299
|
+
__export(streams_exports, {
|
300
|
+
AIStream: () => AIStream,
|
301
|
+
AnthropicStream: () => AnthropicStream,
|
302
|
+
HuggingFaceStream: () => HuggingFaceStream,
|
303
|
+
LangChainStream: () => LangChainStream,
|
304
|
+
OpenAIStream: () => OpenAIStream,
|
305
|
+
StreamingTextResponse: () => StreamingTextResponse,
|
306
|
+
createCallbacksTransformer: () => createCallbacksTransformer,
|
307
|
+
createEventStreamTransformer: () => createEventStreamTransformer,
|
308
|
+
streamToResponse: () => streamToResponse,
|
309
|
+
trimStartOfStreamHelper: () => trimStartOfStreamHelper
|
310
|
+
});
|
311
|
+
var init_streams = __esm({
|
312
|
+
"streams/index.ts"() {
|
313
|
+
"use strict";
|
314
|
+
init_ai_stream();
|
315
|
+
init_openai_stream();
|
316
|
+
init_streaming_text_response();
|
317
|
+
init_huggingface_stream();
|
318
|
+
init_anthropic_stream();
|
319
|
+
init_langchain_stream();
|
320
|
+
init_types();
|
321
|
+
}
|
322
|
+
});
|
323
|
+
|
324
|
+
// tests/utils/mock-service.ts
|
325
|
+
var import_node_http = require("http");
|
326
|
+
|
327
|
+
// tests/snapshots/openai-chat.ts
|
328
|
+
var openai_chat_default = [
|
329
|
+
{
|
330
|
+
id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
|
331
|
+
object: "chat.completion.chunk",
|
332
|
+
created: 1686901302,
|
333
|
+
model: "gpt-3.5-turbo-0301",
|
334
|
+
choices: [
|
335
|
+
{
|
336
|
+
delta: { role: "assistant" },
|
337
|
+
index: 0,
|
338
|
+
finish_reason: null
|
339
|
+
}
|
340
|
+
]
|
341
|
+
},
|
342
|
+
{
|
343
|
+
id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
|
344
|
+
object: "chat.completion.chunk",
|
345
|
+
created: 1686901302,
|
346
|
+
model: "gpt-3.5-turbo-0301",
|
347
|
+
choices: [
|
348
|
+
{
|
349
|
+
delta: { content: "Hello" },
|
350
|
+
index: 0,
|
351
|
+
finish_reason: null
|
352
|
+
}
|
353
|
+
]
|
354
|
+
},
|
355
|
+
{
|
356
|
+
id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
|
357
|
+
object: "chat.completion.chunk",
|
358
|
+
created: 1686901302,
|
359
|
+
model: "gpt-3.5-turbo-0301",
|
360
|
+
choices: [{ delta: { content: "," }, index: 0, finish_reason: null }]
|
361
|
+
},
|
362
|
+
{
|
363
|
+
id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
|
364
|
+
object: "chat.completion.chunk",
|
365
|
+
created: 1686901302,
|
366
|
+
model: "gpt-3.5-turbo-0301",
|
367
|
+
choices: [
|
368
|
+
{
|
369
|
+
delta: { content: " world" },
|
370
|
+
index: 0,
|
371
|
+
finish_reason: null
|
372
|
+
}
|
373
|
+
]
|
374
|
+
},
|
375
|
+
{
|
376
|
+
id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
|
377
|
+
object: "chat.completion.chunk",
|
378
|
+
created: 1686901302,
|
379
|
+
model: "gpt-3.5-turbo-0301",
|
380
|
+
choices: [{ delta: { content: "." }, index: 0, finish_reason: null }]
|
381
|
+
},
|
382
|
+
{
|
383
|
+
id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
|
384
|
+
object: "chat.completion.chunk",
|
385
|
+
created: 1686901302,
|
386
|
+
model: "gpt-3.5-turbo-0301",
|
387
|
+
choices: [{ delta: {}, index: 0, finish_reason: "stop" }]
|
388
|
+
}
|
389
|
+
];
|
390
|
+
|
391
|
+
// tests/utils/mock-service.ts
|
392
|
+
function flushDataToResponse(res, chunks, suffix) {
|
393
|
+
return __async(this, null, function* () {
|
394
|
+
let resolve = () => {
|
395
|
+
};
|
396
|
+
let waitForDrain = new Promise((res2) => resolve = res2);
|
397
|
+
res.addListener("drain", () => {
|
398
|
+
resolve();
|
399
|
+
waitForDrain = new Promise((res2) => resolve = res2);
|
10
400
|
});
|
401
|
+
try {
|
402
|
+
for (const item of chunks) {
|
403
|
+
const data = `data: ${JSON.stringify(item.value)}
|
404
|
+
|
405
|
+
`;
|
406
|
+
const ok = res.write(data);
|
407
|
+
if (!ok) {
|
408
|
+
yield waitForDrain;
|
409
|
+
}
|
410
|
+
yield new Promise((r) => setTimeout(r, 100));
|
411
|
+
}
|
412
|
+
if (suffix) {
|
413
|
+
const data = `data: ${suffix}
|
414
|
+
|
415
|
+
`;
|
416
|
+
res.write(data);
|
417
|
+
}
|
418
|
+
} catch (e) {
|
419
|
+
}
|
420
|
+
res.end();
|
421
|
+
});
|
422
|
+
}
|
423
|
+
var setup = () => {
|
424
|
+
let recentFlushed = [];
|
425
|
+
const server = (0, import_node_http.createServer)((req, res) => {
|
426
|
+
const service = req.headers["x-mock-service"] || "openai";
|
427
|
+
const type = req.headers["x-mock-type"] || "chat";
|
428
|
+
switch (type) {
|
429
|
+
case "chat":
|
430
|
+
switch (service) {
|
431
|
+
case "openai":
|
432
|
+
res.writeHead(200, {
|
433
|
+
"Content-Type": "text/event-stream",
|
434
|
+
"Cache-Control": "no-cache",
|
435
|
+
Connection: "keep-alive"
|
436
|
+
});
|
437
|
+
res.flushHeaders();
|
438
|
+
recentFlushed = [];
|
439
|
+
flushDataToResponse(
|
440
|
+
res,
|
441
|
+
openai_chat_default.map(
|
442
|
+
(value) => new Proxy(
|
443
|
+
{ value },
|
444
|
+
{
|
445
|
+
get(target) {
|
446
|
+
recentFlushed.push(target.value);
|
447
|
+
return target.value;
|
448
|
+
}
|
449
|
+
}
|
450
|
+
)
|
451
|
+
),
|
452
|
+
"[DONE]"
|
453
|
+
);
|
454
|
+
break;
|
455
|
+
default:
|
456
|
+
throw new Error(`Unknown service: ${service}`);
|
457
|
+
}
|
458
|
+
break;
|
459
|
+
default:
|
460
|
+
throw new Error(`Unknown type: ${type}`);
|
461
|
+
}
|
462
|
+
});
|
463
|
+
server.listen(3030);
|
464
|
+
return {
|
465
|
+
port: 3030,
|
466
|
+
api: "http://localhost:3030",
|
467
|
+
teardown: () => {
|
468
|
+
server.close();
|
469
|
+
},
|
470
|
+
getRecentFlushed: () => recentFlushed
|
471
|
+
};
|
472
|
+
};
|
473
|
+
|
474
|
+
// tests/utils/mock-client.ts
|
475
|
+
var createClient = (response) => {
|
476
|
+
return {
|
477
|
+
readAll() {
|
478
|
+
return __async(this, null, function* () {
|
479
|
+
if (!response.body) {
|
480
|
+
throw new Error("Response body is not readable");
|
481
|
+
}
|
482
|
+
let chunks = [];
|
483
|
+
const reader = response.body.getReader();
|
484
|
+
while (true) {
|
485
|
+
const { done, value } = yield reader.read();
|
486
|
+
if (done) {
|
487
|
+
break;
|
488
|
+
}
|
489
|
+
chunks.push(new TextDecoder().decode(value));
|
490
|
+
}
|
491
|
+
return chunks;
|
492
|
+
});
|
493
|
+
},
|
494
|
+
readAndAbort(controller) {
|
495
|
+
return __async(this, null, function* () {
|
496
|
+
if (!response.body) {
|
497
|
+
throw new Error("Response body is not readable");
|
498
|
+
}
|
499
|
+
let chunks = [];
|
500
|
+
const reader = response.body.getReader();
|
501
|
+
const { done, value } = yield reader.read();
|
502
|
+
if (!done) {
|
503
|
+
chunks.push(new TextDecoder().decode(value));
|
504
|
+
}
|
505
|
+
controller.abort();
|
506
|
+
reader.cancel();
|
507
|
+
return chunks;
|
508
|
+
});
|
509
|
+
}
|
510
|
+
};
|
511
|
+
};
|
512
|
+
|
513
|
+
// streams/index.test.ts
|
514
|
+
describe("AIStream", () => {
|
515
|
+
let server;
|
516
|
+
beforeAll(() => {
|
517
|
+
server = setup();
|
518
|
+
});
|
519
|
+
afterAll(() => {
|
520
|
+
server.teardown();
|
521
|
+
});
|
522
|
+
describe("OpenAIStream", () => {
|
523
|
+
if (typeof Response === "undefined") {
|
524
|
+
it("should skip this test on Node 16 because it doesn't support `Response`", () => {
|
525
|
+
});
|
526
|
+
} else {
|
527
|
+
const { OpenAIStream: OpenAIStream2, StreamingTextResponse: StreamingTextResponse2 } = (init_streams(), __toCommonJS(streams_exports));
|
528
|
+
it("should be able to parse SSE and receive the streamed response", () => __async(exports, null, function* () {
|
529
|
+
const stream = OpenAIStream2(
|
530
|
+
yield fetch(server.api, {
|
531
|
+
headers: {
|
532
|
+
"x-mock-service": "openai",
|
533
|
+
"x-mock-type": "chat"
|
534
|
+
}
|
535
|
+
})
|
536
|
+
);
|
537
|
+
const response = new StreamingTextResponse2(stream);
|
538
|
+
const client = createClient(response);
|
539
|
+
const chunks = yield client.readAll();
|
540
|
+
expect(JSON.stringify(chunks)).toMatchInlineSnapshot(
|
541
|
+
`"["Hello",","," world","."]"`
|
542
|
+
);
|
543
|
+
expect(JSON.stringify(server.getRecentFlushed())).toMatchInlineSnapshot(
|
544
|
+
`"[{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"Hello"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":","},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":" world"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"."},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}]"`
|
545
|
+
);
|
546
|
+
}));
|
547
|
+
it("should handle backpressure on the server", () => __async(exports, null, function* () {
|
548
|
+
const controller = new AbortController();
|
549
|
+
const stream = OpenAIStream2(
|
550
|
+
yield fetch(server.api, {
|
551
|
+
headers: {
|
552
|
+
"x-mock-service": "openai",
|
553
|
+
"x-mock-type": "chat"
|
554
|
+
},
|
555
|
+
signal: controller.signal
|
556
|
+
})
|
557
|
+
);
|
558
|
+
const response = new StreamingTextResponse2(stream);
|
559
|
+
const client = createClient(response);
|
560
|
+
const chunks = yield client.readAndAbort(controller);
|
561
|
+
expect(JSON.stringify(chunks)).toMatchInlineSnapshot(`"["Hello"]"`);
|
562
|
+
expect(JSON.stringify(server.getRecentFlushed())).toMatchInlineSnapshot(
|
563
|
+
`"[{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"Hello"},"index":0,"finish_reason":null}]}]"`
|
564
|
+
);
|
565
|
+
}));
|
566
|
+
}
|
11
567
|
});
|
12
568
|
});
|