ai 2.1.0 → 2.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/dist/index.d.ts +48 -8
  2. package/dist/index.mjs +234 -26
  3. package/package.json +4 -4
  4. package/react/dist/index.d.ts +206 -3
  5. package/react/dist/index.mjs +384 -7
  6. package/svelte/dist/index.d.ts +194 -4
  7. package/svelte/dist/index.mjs +779 -7
  8. package/vue/dist/index.d.ts +194 -4
  9. package/vue/dist/index.mjs +345 -7
  10. package/dist/ai-stream.d.ts +0 -18
  11. package/dist/ai-stream.js +0 -132
  12. package/dist/ai-stream.mjs +0 -15
  13. package/dist/anthropic-stream.d.ts +0 -5
  14. package/dist/anthropic-stream.js +0 -133
  15. package/dist/anthropic-stream.mjs +0 -10
  16. package/dist/chunk-2JQWCLY2.mjs +0 -70
  17. package/dist/chunk-7KLTYB74.mjs +0 -70
  18. package/dist/chunk-BJMBMGA3.mjs +0 -34
  19. package/dist/chunk-KKQRUR3E.mjs +0 -51
  20. package/dist/chunk-RBP6ONSV.mjs +0 -45
  21. package/dist/chunk-TWW2ODJW.mjs +0 -32
  22. package/dist/chunk-U2OQ6HW6.mjs +0 -41
  23. package/dist/chunk-UJV6VDVU.mjs +0 -97
  24. package/dist/huggingface-stream.d.ts +0 -5
  25. package/dist/huggingface-stream.js +0 -121
  26. package/dist/huggingface-stream.mjs +0 -10
  27. package/dist/index.test.d.ts +0 -2
  28. package/dist/index.test.js +0 -568
  29. package/dist/index.test.mjs +0 -286
  30. package/dist/langchain-stream.d.ts +0 -12
  31. package/dist/langchain-stream.js +0 -102
  32. package/dist/langchain-stream.mjs +0 -10
  33. package/dist/openai-stream.d.ts +0 -5
  34. package/dist/openai-stream.js +0 -144
  35. package/dist/openai-stream.mjs +0 -10
  36. package/dist/streaming-text-response.d.ts +0 -17
  37. package/dist/streaming-text-response.js +0 -75
  38. package/dist/streaming-text-response.mjs +0 -11
  39. package/react/dist/chunk-5PP6W52J.mjs +0 -202
  40. package/react/dist/chunk-6EH3SWMP.mjs +0 -55
  41. package/react/dist/chunk-PW6HSU2N.mjs +0 -154
  42. package/react/dist/types-f862f74a.d.ts +0 -123
  43. package/react/dist/use-chat.d.ts +0 -42
  44. package/react/dist/use-chat.js +0 -276
  45. package/react/dist/use-chat.mjs +0 -8
  46. package/react/dist/use-completion.d.ts +0 -47
  47. package/react/dist/use-completion.js +0 -229
  48. package/react/dist/use-completion.mjs +0 -8
  49. package/svelte/dist/chunk-6USBQIV6.mjs +0 -177
  50. package/svelte/dist/chunk-BQ64GHZ3.mjs +0 -136
  51. package/svelte/dist/chunk-CENOSGDG.mjs +0 -493
  52. package/svelte/dist/types-f862f74a.d.ts +0 -123
  53. package/svelte/dist/use-chat.d.ts +0 -39
  54. package/svelte/dist/use-chat.js +0 -680
  55. package/svelte/dist/use-chat.mjs +0 -7
  56. package/svelte/dist/use-completion.d.ts +0 -38
  57. package/svelte/dist/use-completion.js +0 -640
  58. package/svelte/dist/use-completion.mjs +0 -7
  59. package/vue/dist/chunk-FT26CHLO.mjs +0 -137
  60. package/vue/dist/chunk-OYI6GFBM.mjs +0 -178
  61. package/vue/dist/chunk-WXH4YPZV.mjs +0 -54
  62. package/vue/dist/types-f862f74a.d.ts +0 -123
  63. package/vue/dist/use-chat.d.ts +0 -39
  64. package/vue/dist/use-chat.js +0 -252
  65. package/vue/dist/use-chat.mjs +0 -7
  66. package/vue/dist/use-completion.d.ts +0 -38
  67. package/vue/dist/use-completion.js +0 -212
  68. package/vue/dist/use-completion.mjs +0 -7
@@ -1,121 +0,0 @@
1
- "use strict";
2
- var __defProp = Object.defineProperty;
3
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
- var __getOwnPropNames = Object.getOwnPropertyNames;
5
- var __hasOwnProp = Object.prototype.hasOwnProperty;
6
- var __export = (target, all) => {
7
- for (var name in all)
8
- __defProp(target, name, { get: all[name], enumerable: true });
9
- };
10
- var __copyProps = (to, from, except, desc) => {
11
- if (from && typeof from === "object" || typeof from === "function") {
12
- for (let key of __getOwnPropNames(from))
13
- if (!__hasOwnProp.call(to, key) && key !== except)
14
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
- }
16
- return to;
17
- };
18
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
- var __async = (__this, __arguments, generator) => {
20
- return new Promise((resolve, reject) => {
21
- var fulfilled = (value) => {
22
- try {
23
- step(generator.next(value));
24
- } catch (e) {
25
- reject(e);
26
- }
27
- };
28
- var rejected = (value) => {
29
- try {
30
- step(generator.throw(value));
31
- } catch (e) {
32
- reject(e);
33
- }
34
- };
35
- var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
36
- step((generator = generator.apply(__this, __arguments)).next());
37
- });
38
- };
39
-
40
- // streams/huggingface-stream.ts
41
- var huggingface_stream_exports = {};
42
- __export(huggingface_stream_exports, {
43
- HuggingFaceStream: () => HuggingFaceStream
44
- });
45
- module.exports = __toCommonJS(huggingface_stream_exports);
46
-
47
- // streams/ai-stream.ts
48
- var import_eventsource_parser = require("eventsource-parser");
49
- function createCallbacksTransformer(callbacks) {
50
- const encoder = new TextEncoder();
51
- let fullResponse = "";
52
- const { onStart, onToken, onCompletion } = callbacks || {};
53
- return new TransformStream({
54
- start() {
55
- return __async(this, null, function* () {
56
- if (onStart)
57
- yield onStart();
58
- });
59
- },
60
- transform(message, controller) {
61
- return __async(this, null, function* () {
62
- controller.enqueue(encoder.encode(message));
63
- if (onToken)
64
- yield onToken(message);
65
- if (onCompletion)
66
- fullResponse += message;
67
- });
68
- },
69
- flush() {
70
- return __async(this, null, function* () {
71
- yield onCompletion == null ? void 0 : onCompletion(fullResponse);
72
- });
73
- }
74
- });
75
- }
76
- function trimStartOfStreamHelper() {
77
- let start = true;
78
- return (text) => {
79
- if (start)
80
- text = text.trimStart();
81
- if (text)
82
- start = false;
83
- return text;
84
- };
85
- }
86
-
87
- // streams/huggingface-stream.ts
88
- function createParser2(res) {
89
- const trimStartOfStream = trimStartOfStreamHelper();
90
- return new ReadableStream({
91
- pull(controller) {
92
- return __async(this, null, function* () {
93
- var _a2, _b;
94
- const { value, done } = yield res.next();
95
- if (done) {
96
- controller.close();
97
- return;
98
- }
99
- const text = trimStartOfStream((_b = (_a2 = value.token) == null ? void 0 : _a2.text) != null ? _b : "");
100
- if (!text)
101
- return;
102
- if (value.generated_text != null && value.generated_text.length > 0) {
103
- controller.close();
104
- return;
105
- }
106
- if (text === "</s>" || text === "<|endoftext|>") {
107
- controller.close();
108
- } else {
109
- controller.enqueue(text);
110
- }
111
- });
112
- }
113
- });
114
- }
115
- function HuggingFaceStream(res, callbacks) {
116
- return createParser2(res).pipeThrough(createCallbacksTransformer(callbacks));
117
- }
118
- // Annotate the CommonJS export names for ESM import in node:
119
- 0 && (module.exports = {
120
- HuggingFaceStream
121
- });
@@ -1,10 +0,0 @@
1
- import {
2
- HuggingFaceStream,
3
- init_huggingface_stream
4
- } from "./chunk-KKQRUR3E.mjs";
5
- import "./chunk-UJV6VDVU.mjs";
6
- import "./chunk-2JQWCLY2.mjs";
7
- init_huggingface_stream();
8
- export {
9
- HuggingFaceStream
10
- };
@@ -1,2 +0,0 @@
1
-
2
- export { }
@@ -1,568 +0,0 @@
1
- "use strict";
2
- var __defProp = Object.defineProperty;
3
- var __defProps = Object.defineProperties;
4
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
- var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
6
- var __getOwnPropNames = Object.getOwnPropertyNames;
7
- var __getOwnPropSymbols = Object.getOwnPropertySymbols;
8
- var __hasOwnProp = Object.prototype.hasOwnProperty;
9
- var __propIsEnum = Object.prototype.propertyIsEnumerable;
10
- var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
11
- var __spreadValues = (a, b) => {
12
- for (var prop in b || (b = {}))
13
- if (__hasOwnProp.call(b, prop))
14
- __defNormalProp(a, prop, b[prop]);
15
- if (__getOwnPropSymbols)
16
- for (var prop of __getOwnPropSymbols(b)) {
17
- if (__propIsEnum.call(b, prop))
18
- __defNormalProp(a, prop, b[prop]);
19
- }
20
- return a;
21
- };
22
- var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
23
- var __esm = (fn, res) => function __init() {
24
- return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
25
- };
26
- var __export = (target, all) => {
27
- for (var name in all)
28
- __defProp(target, name, { get: all[name], enumerable: true });
29
- };
30
- var __copyProps = (to, from, except, desc) => {
31
- if (from && typeof from === "object" || typeof from === "function") {
32
- for (let key of __getOwnPropNames(from))
33
- if (!__hasOwnProp.call(to, key) && key !== except)
34
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
35
- }
36
- return to;
37
- };
38
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
39
- var __async = (__this, __arguments, generator) => {
40
- return new Promise((resolve, reject) => {
41
- var fulfilled = (value) => {
42
- try {
43
- step(generator.next(value));
44
- } catch (e) {
45
- reject(e);
46
- }
47
- };
48
- var rejected = (value) => {
49
- try {
50
- step(generator.throw(value));
51
- } catch (e) {
52
- reject(e);
53
- }
54
- };
55
- var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
56
- step((generator = generator.apply(__this, __arguments)).next());
57
- });
58
- };
59
-
60
- // streams/ai-stream.ts
61
- function createEventStreamTransformer(customParser) {
62
- const decoder = new TextDecoder();
63
- let parser;
64
- return new TransformStream({
65
- start(controller) {
66
- return __async(this, null, function* () {
67
- function onParse(event) {
68
- if (event.type === "event") {
69
- const data = event.data;
70
- if (data === "[DONE]") {
71
- controller.terminate();
72
- return;
73
- }
74
- const message = customParser(data);
75
- if (message)
76
- controller.enqueue(message);
77
- }
78
- }
79
- parser = (0, import_eventsource_parser.createParser)(onParse);
80
- });
81
- },
82
- transform(chunk) {
83
- parser.feed(decoder.decode(chunk));
84
- }
85
- });
86
- }
87
- function createCallbacksTransformer(callbacks) {
88
- const encoder = new TextEncoder();
89
- let fullResponse = "";
90
- const { onStart, onToken, onCompletion } = callbacks || {};
91
- return new TransformStream({
92
- start() {
93
- return __async(this, null, function* () {
94
- if (onStart)
95
- yield onStart();
96
- });
97
- },
98
- transform(message, controller) {
99
- return __async(this, null, function* () {
100
- controller.enqueue(encoder.encode(message));
101
- if (onToken)
102
- yield onToken(message);
103
- if (onCompletion)
104
- fullResponse += message;
105
- });
106
- },
107
- flush() {
108
- return __async(this, null, function* () {
109
- yield onCompletion == null ? void 0 : onCompletion(fullResponse);
110
- });
111
- }
112
- });
113
- }
114
- function trimStartOfStreamHelper() {
115
- let start = true;
116
- return (text) => {
117
- if (start)
118
- text = text.trimStart();
119
- if (text)
120
- start = false;
121
- return text;
122
- };
123
- }
124
- function AIStream(res, customParser, callbacks) {
125
- if (!res.ok) {
126
- throw new Error(
127
- `Failed to convert the response to stream. Received status code: ${res.status}.`
128
- );
129
- }
130
- const stream = res.body || new ReadableStream({
131
- start(controller) {
132
- controller.close();
133
- }
134
- });
135
- return stream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
136
- }
137
- var import_eventsource_parser;
138
- var init_ai_stream = __esm({
139
- "streams/ai-stream.ts"() {
140
- "use strict";
141
- import_eventsource_parser = require("eventsource-parser");
142
- }
143
- });
144
-
145
- // streams/openai-stream.ts
146
- function parseOpenAIStream() {
147
- const trimStartOfStream = trimStartOfStreamHelper();
148
- return (data) => {
149
- var _a, _b, _c, _d, _e;
150
- const json = JSON.parse(data);
151
- const text = trimStartOfStream(
152
- (_e = (_d = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.content) != null ? _d : (_c = json.choices[0]) == null ? void 0 : _c.text) != null ? _e : ""
153
- );
154
- return text;
155
- };
156
- }
157
- function OpenAIStream(res, cb) {
158
- return AIStream(res, parseOpenAIStream(), cb);
159
- }
160
- var init_openai_stream = __esm({
161
- "streams/openai-stream.ts"() {
162
- "use strict";
163
- init_ai_stream();
164
- }
165
- });
166
-
167
- // streams/streaming-text-response.ts
168
- function streamToResponse(res, response, init) {
169
- response.writeHead((init == null ? void 0 : init.status) || 200, __spreadValues({
170
- "Content-Type": "text/plain; charset=utf-8"
171
- }, init == null ? void 0 : init.headers));
172
- const reader = res.getReader();
173
- function read() {
174
- reader.read().then(({ done, value }) => {
175
- if (done) {
176
- response.end();
177
- return;
178
- }
179
- response.write(value);
180
- read();
181
- });
182
- }
183
- read();
184
- }
185
- var StreamingTextResponse;
186
- var init_streaming_text_response = __esm({
187
- "streams/streaming-text-response.ts"() {
188
- "use strict";
189
- StreamingTextResponse = class extends Response {
190
- constructor(res, init) {
191
- super(res, __spreadProps(__spreadValues({}, init), {
192
- status: 200,
193
- headers: __spreadValues({
194
- "Content-Type": "text/plain; charset=utf-8"
195
- }, init == null ? void 0 : init.headers)
196
- }));
197
- }
198
- };
199
- }
200
- });
201
-
202
- // streams/huggingface-stream.ts
203
- function createParser2(res) {
204
- const trimStartOfStream = trimStartOfStreamHelper();
205
- return new ReadableStream({
206
- pull(controller) {
207
- return __async(this, null, function* () {
208
- var _a2, _b;
209
- const { value, done } = yield res.next();
210
- if (done) {
211
- controller.close();
212
- return;
213
- }
214
- const text = trimStartOfStream((_b = (_a2 = value.token) == null ? void 0 : _a2.text) != null ? _b : "");
215
- if (!text)
216
- return;
217
- if (value.generated_text != null && value.generated_text.length > 0) {
218
- controller.close();
219
- return;
220
- }
221
- if (text === "</s>" || text === "<|endoftext|>") {
222
- controller.close();
223
- } else {
224
- controller.enqueue(text);
225
- }
226
- });
227
- }
228
- });
229
- }
230
- function HuggingFaceStream(res, callbacks) {
231
- return createParser2(res).pipeThrough(createCallbacksTransformer(callbacks));
232
- }
233
- var init_huggingface_stream = __esm({
234
- "streams/huggingface-stream.ts"() {
235
- "use strict";
236
- init_ai_stream();
237
- }
238
- });
239
-
240
- // streams/anthropic-stream.ts
241
- function parseAnthropicStream() {
242
- let previous = "";
243
- return (data) => {
244
- const json = JSON.parse(data);
245
- const text = json.completion;
246
- const delta = text.slice(previous.length);
247
- previous = text;
248
- return delta;
249
- };
250
- }
251
- function AnthropicStream(res, cb) {
252
- return AIStream(res, parseAnthropicStream(), cb);
253
- }
254
- var init_anthropic_stream = __esm({
255
- "streams/anthropic-stream.ts"() {
256
- "use strict";
257
- init_ai_stream();
258
- }
259
- });
260
-
261
- // streams/langchain-stream.ts
262
- function LangChainStream(callbacks) {
263
- const stream = new TransformStream();
264
- const writer = stream.writable.getWriter();
265
- return {
266
- stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)),
267
- handlers: {
268
- handleLLMNewToken: (token) => __async(this, null, function* () {
269
- yield writer.ready;
270
- yield writer.write(token);
271
- }),
272
- handleChainEnd: () => __async(this, null, function* () {
273
- yield writer.ready;
274
- yield writer.close();
275
- }),
276
- handleLLMError: (e) => __async(this, null, function* () {
277
- yield writer.ready;
278
- yield writer.abort(e);
279
- })
280
- }
281
- };
282
- }
283
- var init_langchain_stream = __esm({
284
- "streams/langchain-stream.ts"() {
285
- "use strict";
286
- init_ai_stream();
287
- }
288
- });
289
-
290
- // shared/types.ts
291
- var init_types = __esm({
292
- "shared/types.ts"() {
293
- "use strict";
294
- }
295
- });
296
-
297
- // streams/index.ts
298
- var streams_exports = {};
299
- __export(streams_exports, {
300
- AIStream: () => AIStream,
301
- AnthropicStream: () => AnthropicStream,
302
- HuggingFaceStream: () => HuggingFaceStream,
303
- LangChainStream: () => LangChainStream,
304
- OpenAIStream: () => OpenAIStream,
305
- StreamingTextResponse: () => StreamingTextResponse,
306
- createCallbacksTransformer: () => createCallbacksTransformer,
307
- createEventStreamTransformer: () => createEventStreamTransformer,
308
- streamToResponse: () => streamToResponse,
309
- trimStartOfStreamHelper: () => trimStartOfStreamHelper
310
- });
311
- var init_streams = __esm({
312
- "streams/index.ts"() {
313
- "use strict";
314
- init_ai_stream();
315
- init_openai_stream();
316
- init_streaming_text_response();
317
- init_huggingface_stream();
318
- init_anthropic_stream();
319
- init_langchain_stream();
320
- init_types();
321
- }
322
- });
323
-
324
- // tests/utils/mock-service.ts
325
- var import_node_http = require("http");
326
-
327
- // tests/snapshots/openai-chat.ts
328
- var openai_chat_default = [
329
- {
330
- id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
331
- object: "chat.completion.chunk",
332
- created: 1686901302,
333
- model: "gpt-3.5-turbo-0301",
334
- choices: [
335
- {
336
- delta: { role: "assistant" },
337
- index: 0,
338
- finish_reason: null
339
- }
340
- ]
341
- },
342
- {
343
- id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
344
- object: "chat.completion.chunk",
345
- created: 1686901302,
346
- model: "gpt-3.5-turbo-0301",
347
- choices: [
348
- {
349
- delta: { content: "Hello" },
350
- index: 0,
351
- finish_reason: null
352
- }
353
- ]
354
- },
355
- {
356
- id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
357
- object: "chat.completion.chunk",
358
- created: 1686901302,
359
- model: "gpt-3.5-turbo-0301",
360
- choices: [{ delta: { content: "," }, index: 0, finish_reason: null }]
361
- },
362
- {
363
- id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
364
- object: "chat.completion.chunk",
365
- created: 1686901302,
366
- model: "gpt-3.5-turbo-0301",
367
- choices: [
368
- {
369
- delta: { content: " world" },
370
- index: 0,
371
- finish_reason: null
372
- }
373
- ]
374
- },
375
- {
376
- id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
377
- object: "chat.completion.chunk",
378
- created: 1686901302,
379
- model: "gpt-3.5-turbo-0301",
380
- choices: [{ delta: { content: "." }, index: 0, finish_reason: null }]
381
- },
382
- {
383
- id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
384
- object: "chat.completion.chunk",
385
- created: 1686901302,
386
- model: "gpt-3.5-turbo-0301",
387
- choices: [{ delta: {}, index: 0, finish_reason: "stop" }]
388
- }
389
- ];
390
-
391
- // tests/utils/mock-service.ts
392
- function flushDataToResponse(res, chunks, suffix) {
393
- return __async(this, null, function* () {
394
- let resolve = () => {
395
- };
396
- let waitForDrain = new Promise((res2) => resolve = res2);
397
- res.addListener("drain", () => {
398
- resolve();
399
- waitForDrain = new Promise((res2) => resolve = res2);
400
- });
401
- try {
402
- for (const item of chunks) {
403
- const data = `data: ${JSON.stringify(item.value)}
404
-
405
- `;
406
- const ok = res.write(data);
407
- if (!ok) {
408
- yield waitForDrain;
409
- }
410
- yield new Promise((r) => setTimeout(r, 100));
411
- }
412
- if (suffix) {
413
- const data = `data: ${suffix}
414
-
415
- `;
416
- res.write(data);
417
- }
418
- } catch (e) {
419
- }
420
- res.end();
421
- });
422
- }
423
- var setup = () => {
424
- let recentFlushed = [];
425
- const server = (0, import_node_http.createServer)((req, res) => {
426
- const service = req.headers["x-mock-service"] || "openai";
427
- const type = req.headers["x-mock-type"] || "chat";
428
- switch (type) {
429
- case "chat":
430
- switch (service) {
431
- case "openai":
432
- res.writeHead(200, {
433
- "Content-Type": "text/event-stream",
434
- "Cache-Control": "no-cache",
435
- Connection: "keep-alive"
436
- });
437
- res.flushHeaders();
438
- recentFlushed = [];
439
- flushDataToResponse(
440
- res,
441
- openai_chat_default.map(
442
- (value) => new Proxy(
443
- { value },
444
- {
445
- get(target) {
446
- recentFlushed.push(target.value);
447
- return target.value;
448
- }
449
- }
450
- )
451
- ),
452
- "[DONE]"
453
- );
454
- break;
455
- default:
456
- throw new Error(`Unknown service: ${service}`);
457
- }
458
- break;
459
- default:
460
- throw new Error(`Unknown type: ${type}`);
461
- }
462
- });
463
- server.listen(3030);
464
- return {
465
- port: 3030,
466
- api: "http://localhost:3030",
467
- teardown: () => {
468
- server.close();
469
- },
470
- getRecentFlushed: () => recentFlushed
471
- };
472
- };
473
-
474
- // tests/utils/mock-client.ts
475
- var createClient = (response) => {
476
- return {
477
- readAll() {
478
- return __async(this, null, function* () {
479
- if (!response.body) {
480
- throw new Error("Response body is not readable");
481
- }
482
- let chunks = [];
483
- const reader = response.body.getReader();
484
- while (true) {
485
- const { done, value } = yield reader.read();
486
- if (done) {
487
- break;
488
- }
489
- chunks.push(new TextDecoder().decode(value));
490
- }
491
- return chunks;
492
- });
493
- },
494
- readAndAbort(controller) {
495
- return __async(this, null, function* () {
496
- if (!response.body) {
497
- throw new Error("Response body is not readable");
498
- }
499
- let chunks = [];
500
- const reader = response.body.getReader();
501
- const { done, value } = yield reader.read();
502
- if (!done) {
503
- chunks.push(new TextDecoder().decode(value));
504
- }
505
- controller.abort();
506
- reader.cancel();
507
- return chunks;
508
- });
509
- }
510
- };
511
- };
512
-
513
- // streams/index.test.ts
514
- describe("AIStream", () => {
515
- let server;
516
- beforeAll(() => {
517
- server = setup();
518
- });
519
- afterAll(() => {
520
- server.teardown();
521
- });
522
- describe("OpenAIStream", () => {
523
- if (typeof Response === "undefined") {
524
- it("should skip this test on Node 16 because it doesn't support `Response`", () => {
525
- });
526
- } else {
527
- const { OpenAIStream: OpenAIStream2, StreamingTextResponse: StreamingTextResponse2 } = (init_streams(), __toCommonJS(streams_exports));
528
- it("should be able to parse SSE and receive the streamed response", () => __async(exports, null, function* () {
529
- const stream = OpenAIStream2(
530
- yield fetch(server.api, {
531
- headers: {
532
- "x-mock-service": "openai",
533
- "x-mock-type": "chat"
534
- }
535
- })
536
- );
537
- const response = new StreamingTextResponse2(stream);
538
- const client = createClient(response);
539
- const chunks = yield client.readAll();
540
- expect(JSON.stringify(chunks)).toMatchInlineSnapshot(
541
- `"["Hello",","," world","."]"`
542
- );
543
- expect(JSON.stringify(server.getRecentFlushed())).toMatchInlineSnapshot(
544
- `"[{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"Hello"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":","},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":" world"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"."},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}]"`
545
- );
546
- }));
547
- it("should handle backpressure on the server", () => __async(exports, null, function* () {
548
- const controller = new AbortController();
549
- const stream = OpenAIStream2(
550
- yield fetch(server.api, {
551
- headers: {
552
- "x-mock-service": "openai",
553
- "x-mock-type": "chat"
554
- },
555
- signal: controller.signal
556
- })
557
- );
558
- const response = new StreamingTextResponse2(stream);
559
- const client = createClient(response);
560
- const chunks = yield client.readAndAbort(controller);
561
- expect(JSON.stringify(chunks)).toMatchInlineSnapshot(`"["Hello"]"`);
562
- expect(JSON.stringify(server.getRecentFlushed())).toMatchInlineSnapshot(
563
- `"[{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"Hello"},"index":0,"finish_reason":null}]}]"`
564
- );
565
- }));
566
- }
567
- });
568
- });