ai 2.1.0 → 2.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/dist/index.d.ts +48 -8
  2. package/dist/index.mjs +234 -26
  3. package/package.json +4 -4
  4. package/react/dist/index.d.ts +206 -3
  5. package/react/dist/index.mjs +384 -7
  6. package/svelte/dist/index.d.ts +194 -4
  7. package/svelte/dist/index.mjs +779 -7
  8. package/vue/dist/index.d.ts +194 -4
  9. package/vue/dist/index.mjs +345 -7
  10. package/dist/ai-stream.d.ts +0 -18
  11. package/dist/ai-stream.js +0 -132
  12. package/dist/ai-stream.mjs +0 -15
  13. package/dist/anthropic-stream.d.ts +0 -5
  14. package/dist/anthropic-stream.js +0 -133
  15. package/dist/anthropic-stream.mjs +0 -10
  16. package/dist/chunk-2JQWCLY2.mjs +0 -70
  17. package/dist/chunk-7KLTYB74.mjs +0 -70
  18. package/dist/chunk-BJMBMGA3.mjs +0 -34
  19. package/dist/chunk-KKQRUR3E.mjs +0 -51
  20. package/dist/chunk-RBP6ONSV.mjs +0 -45
  21. package/dist/chunk-TWW2ODJW.mjs +0 -32
  22. package/dist/chunk-U2OQ6HW6.mjs +0 -41
  23. package/dist/chunk-UJV6VDVU.mjs +0 -97
  24. package/dist/huggingface-stream.d.ts +0 -5
  25. package/dist/huggingface-stream.js +0 -121
  26. package/dist/huggingface-stream.mjs +0 -10
  27. package/dist/index.test.d.ts +0 -2
  28. package/dist/index.test.js +0 -568
  29. package/dist/index.test.mjs +0 -286
  30. package/dist/langchain-stream.d.ts +0 -12
  31. package/dist/langchain-stream.js +0 -102
  32. package/dist/langchain-stream.mjs +0 -10
  33. package/dist/openai-stream.d.ts +0 -5
  34. package/dist/openai-stream.js +0 -144
  35. package/dist/openai-stream.mjs +0 -10
  36. package/dist/streaming-text-response.d.ts +0 -17
  37. package/dist/streaming-text-response.js +0 -75
  38. package/dist/streaming-text-response.mjs +0 -11
  39. package/react/dist/chunk-5PP6W52J.mjs +0 -202
  40. package/react/dist/chunk-6EH3SWMP.mjs +0 -55
  41. package/react/dist/chunk-PW6HSU2N.mjs +0 -154
  42. package/react/dist/types-f862f74a.d.ts +0 -123
  43. package/react/dist/use-chat.d.ts +0 -42
  44. package/react/dist/use-chat.js +0 -276
  45. package/react/dist/use-chat.mjs +0 -8
  46. package/react/dist/use-completion.d.ts +0 -47
  47. package/react/dist/use-completion.js +0 -229
  48. package/react/dist/use-completion.mjs +0 -8
  49. package/svelte/dist/chunk-6USBQIV6.mjs +0 -177
  50. package/svelte/dist/chunk-BQ64GHZ3.mjs +0 -136
  51. package/svelte/dist/chunk-CENOSGDG.mjs +0 -493
  52. package/svelte/dist/types-f862f74a.d.ts +0 -123
  53. package/svelte/dist/use-chat.d.ts +0 -39
  54. package/svelte/dist/use-chat.js +0 -680
  55. package/svelte/dist/use-chat.mjs +0 -7
  56. package/svelte/dist/use-completion.d.ts +0 -38
  57. package/svelte/dist/use-completion.js +0 -640
  58. package/svelte/dist/use-completion.mjs +0 -7
  59. package/vue/dist/chunk-FT26CHLO.mjs +0 -137
  60. package/vue/dist/chunk-OYI6GFBM.mjs +0 -178
  61. package/vue/dist/chunk-WXH4YPZV.mjs +0 -54
  62. package/vue/dist/types-f862f74a.d.ts +0 -123
  63. package/vue/dist/use-chat.d.ts +0 -39
  64. package/vue/dist/use-chat.js +0 -252
  65. package/vue/dist/use-chat.mjs +0 -7
  66. package/vue/dist/use-completion.d.ts +0 -38
  67. package/vue/dist/use-completion.js +0 -212
  68. package/vue/dist/use-completion.mjs +0 -7
@@ -1,286 +0,0 @@
1
- import {
2
- init_streams,
3
- streams_exports
4
- } from "./chunk-7KLTYB74.mjs";
5
- import "./chunk-TWW2ODJW.mjs";
6
- import "./chunk-KKQRUR3E.mjs";
7
- import "./chunk-U2OQ6HW6.mjs";
8
- import "./chunk-BJMBMGA3.mjs";
9
- import "./chunk-UJV6VDVU.mjs";
10
- import "./chunk-RBP6ONSV.mjs";
11
- import {
12
- __async,
13
- __commonJS,
14
- __esm,
15
- __toCommonJS
16
- } from "./chunk-2JQWCLY2.mjs";
17
-
18
- // tests/snapshots/openai-chat.ts
19
- var openai_chat_default;
20
- var init_openai_chat = __esm({
21
- "tests/snapshots/openai-chat.ts"() {
22
- "use strict";
23
- openai_chat_default = [
24
- {
25
- id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
26
- object: "chat.completion.chunk",
27
- created: 1686901302,
28
- model: "gpt-3.5-turbo-0301",
29
- choices: [
30
- {
31
- delta: { role: "assistant" },
32
- index: 0,
33
- finish_reason: null
34
- }
35
- ]
36
- },
37
- {
38
- id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
39
- object: "chat.completion.chunk",
40
- created: 1686901302,
41
- model: "gpt-3.5-turbo-0301",
42
- choices: [
43
- {
44
- delta: { content: "Hello" },
45
- index: 0,
46
- finish_reason: null
47
- }
48
- ]
49
- },
50
- {
51
- id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
52
- object: "chat.completion.chunk",
53
- created: 1686901302,
54
- model: "gpt-3.5-turbo-0301",
55
- choices: [{ delta: { content: "," }, index: 0, finish_reason: null }]
56
- },
57
- {
58
- id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
59
- object: "chat.completion.chunk",
60
- created: 1686901302,
61
- model: "gpt-3.5-turbo-0301",
62
- choices: [
63
- {
64
- delta: { content: " world" },
65
- index: 0,
66
- finish_reason: null
67
- }
68
- ]
69
- },
70
- {
71
- id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
72
- object: "chat.completion.chunk",
73
- created: 1686901302,
74
- model: "gpt-3.5-turbo-0301",
75
- choices: [{ delta: { content: "." }, index: 0, finish_reason: null }]
76
- },
77
- {
78
- id: "chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC",
79
- object: "chat.completion.chunk",
80
- created: 1686901302,
81
- model: "gpt-3.5-turbo-0301",
82
- choices: [{ delta: {}, index: 0, finish_reason: "stop" }]
83
- }
84
- ];
85
- }
86
- });
87
-
88
- // tests/utils/mock-service.ts
89
- import { createServer } from "http";
90
- function flushDataToResponse(res, chunks, suffix) {
91
- return __async(this, null, function* () {
92
- let resolve = () => {
93
- };
94
- let waitForDrain = new Promise((res2) => resolve = res2);
95
- res.addListener("drain", () => {
96
- resolve();
97
- waitForDrain = new Promise((res2) => resolve = res2);
98
- });
99
- try {
100
- for (const item of chunks) {
101
- const data = `data: ${JSON.stringify(item.value)}
102
-
103
- `;
104
- const ok = res.write(data);
105
- if (!ok) {
106
- yield waitForDrain;
107
- }
108
- yield new Promise((r) => setTimeout(r, 100));
109
- }
110
- if (suffix) {
111
- const data = `data: ${suffix}
112
-
113
- `;
114
- res.write(data);
115
- }
116
- } catch (e) {
117
- }
118
- res.end();
119
- });
120
- }
121
- var setup;
122
- var init_mock_service = __esm({
123
- "tests/utils/mock-service.ts"() {
124
- "use strict";
125
- init_openai_chat();
126
- setup = () => {
127
- let recentFlushed = [];
128
- const server = createServer((req, res) => {
129
- const service = req.headers["x-mock-service"] || "openai";
130
- const type = req.headers["x-mock-type"] || "chat";
131
- switch (type) {
132
- case "chat":
133
- switch (service) {
134
- case "openai":
135
- res.writeHead(200, {
136
- "Content-Type": "text/event-stream",
137
- "Cache-Control": "no-cache",
138
- Connection: "keep-alive"
139
- });
140
- res.flushHeaders();
141
- recentFlushed = [];
142
- flushDataToResponse(
143
- res,
144
- openai_chat_default.map(
145
- (value) => new Proxy(
146
- { value },
147
- {
148
- get(target) {
149
- recentFlushed.push(target.value);
150
- return target.value;
151
- }
152
- }
153
- )
154
- ),
155
- "[DONE]"
156
- );
157
- break;
158
- default:
159
- throw new Error(`Unknown service: ${service}`);
160
- }
161
- break;
162
- default:
163
- throw new Error(`Unknown type: ${type}`);
164
- }
165
- });
166
- server.listen(3030);
167
- return {
168
- port: 3030,
169
- api: "http://localhost:3030",
170
- teardown: () => {
171
- server.close();
172
- },
173
- getRecentFlushed: () => recentFlushed
174
- };
175
- };
176
- }
177
- });
178
-
179
- // tests/utils/mock-client.ts
180
- var createClient;
181
- var init_mock_client = __esm({
182
- "tests/utils/mock-client.ts"() {
183
- "use strict";
184
- createClient = (response) => {
185
- return {
186
- readAll() {
187
- return __async(this, null, function* () {
188
- if (!response.body) {
189
- throw new Error("Response body is not readable");
190
- }
191
- let chunks = [];
192
- const reader = response.body.getReader();
193
- while (true) {
194
- const { done, value } = yield reader.read();
195
- if (done) {
196
- break;
197
- }
198
- chunks.push(new TextDecoder().decode(value));
199
- }
200
- return chunks;
201
- });
202
- },
203
- readAndAbort(controller) {
204
- return __async(this, null, function* () {
205
- if (!response.body) {
206
- throw new Error("Response body is not readable");
207
- }
208
- let chunks = [];
209
- const reader = response.body.getReader();
210
- const { done, value } = yield reader.read();
211
- if (!done) {
212
- chunks.push(new TextDecoder().decode(value));
213
- }
214
- controller.abort();
215
- reader.cancel();
216
- return chunks;
217
- });
218
- }
219
- };
220
- };
221
- }
222
- });
223
-
224
- // streams/index.test.ts
225
- var require_index_test = __commonJS({
226
- "streams/index.test.ts"(exports) {
227
- init_mock_service();
228
- init_mock_client();
229
- describe("AIStream", () => {
230
- let server;
231
- beforeAll(() => {
232
- server = setup();
233
- });
234
- afterAll(() => {
235
- server.teardown();
236
- });
237
- describe("OpenAIStream", () => {
238
- if (typeof Response === "undefined") {
239
- it("should skip this test on Node 16 because it doesn't support `Response`", () => {
240
- });
241
- } else {
242
- const { OpenAIStream, StreamingTextResponse } = (init_streams(), __toCommonJS(streams_exports));
243
- it("should be able to parse SSE and receive the streamed response", () => __async(exports, null, function* () {
244
- const stream = OpenAIStream(
245
- yield fetch(server.api, {
246
- headers: {
247
- "x-mock-service": "openai",
248
- "x-mock-type": "chat"
249
- }
250
- })
251
- );
252
- const response = new StreamingTextResponse(stream);
253
- const client = createClient(response);
254
- const chunks = yield client.readAll();
255
- expect(JSON.stringify(chunks)).toMatchInlineSnapshot(
256
- `"["Hello",","," world","."]"`
257
- );
258
- expect(JSON.stringify(server.getRecentFlushed())).toMatchInlineSnapshot(
259
- `"[{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"Hello"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":","},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":" world"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"."},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}]"`
260
- );
261
- }));
262
- it("should handle backpressure on the server", () => __async(exports, null, function* () {
263
- const controller = new AbortController();
264
- const stream = OpenAIStream(
265
- yield fetch(server.api, {
266
- headers: {
267
- "x-mock-service": "openai",
268
- "x-mock-type": "chat"
269
- },
270
- signal: controller.signal
271
- })
272
- );
273
- const response = new StreamingTextResponse(stream);
274
- const client = createClient(response);
275
- const chunks = yield client.readAndAbort(controller);
276
- expect(JSON.stringify(chunks)).toMatchInlineSnapshot(`"["Hello"]"`);
277
- expect(JSON.stringify(server.getRecentFlushed())).toMatchInlineSnapshot(
278
- `"[{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]},{"id":"chatcmpl-7RyNSW2BXkOQQh7NlBc65j5kX8AjC","object":"chat.completion.chunk","created":1686901302,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"Hello"},"index":0,"finish_reason":null}]}]"`
279
- );
280
- }));
281
- }
282
- });
283
- });
284
- }
285
- });
286
- export default require_index_test();
@@ -1,12 +0,0 @@
1
- import { AIStreamCallbacks } from './ai-stream.js';
2
-
3
- declare function LangChainStream(callbacks?: AIStreamCallbacks): {
4
- stream: ReadableStream<Uint8Array>;
5
- handlers: {
6
- handleLLMNewToken: (token: string) => Promise<void>;
7
- handleChainEnd: () => Promise<void>;
8
- handleLLMError: (e: any) => Promise<void>;
9
- };
10
- };
11
-
12
- export { LangChainStream };
@@ -1,102 +0,0 @@
1
- "use strict";
2
- var __defProp = Object.defineProperty;
3
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
- var __getOwnPropNames = Object.getOwnPropertyNames;
5
- var __hasOwnProp = Object.prototype.hasOwnProperty;
6
- var __export = (target, all) => {
7
- for (var name in all)
8
- __defProp(target, name, { get: all[name], enumerable: true });
9
- };
10
- var __copyProps = (to, from, except, desc) => {
11
- if (from && typeof from === "object" || typeof from === "function") {
12
- for (let key of __getOwnPropNames(from))
13
- if (!__hasOwnProp.call(to, key) && key !== except)
14
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
- }
16
- return to;
17
- };
18
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
- var __async = (__this, __arguments, generator) => {
20
- return new Promise((resolve, reject) => {
21
- var fulfilled = (value) => {
22
- try {
23
- step(generator.next(value));
24
- } catch (e) {
25
- reject(e);
26
- }
27
- };
28
- var rejected = (value) => {
29
- try {
30
- step(generator.throw(value));
31
- } catch (e) {
32
- reject(e);
33
- }
34
- };
35
- var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
36
- step((generator = generator.apply(__this, __arguments)).next());
37
- });
38
- };
39
-
40
- // streams/langchain-stream.ts
41
- var langchain_stream_exports = {};
42
- __export(langchain_stream_exports, {
43
- LangChainStream: () => LangChainStream
44
- });
45
- module.exports = __toCommonJS(langchain_stream_exports);
46
-
47
- // streams/ai-stream.ts
48
- var import_eventsource_parser = require("eventsource-parser");
49
- function createCallbacksTransformer(callbacks) {
50
- const encoder = new TextEncoder();
51
- let fullResponse = "";
52
- const { onStart, onToken, onCompletion } = callbacks || {};
53
- return new TransformStream({
54
- start() {
55
- return __async(this, null, function* () {
56
- if (onStart)
57
- yield onStart();
58
- });
59
- },
60
- transform(message, controller) {
61
- return __async(this, null, function* () {
62
- controller.enqueue(encoder.encode(message));
63
- if (onToken)
64
- yield onToken(message);
65
- if (onCompletion)
66
- fullResponse += message;
67
- });
68
- },
69
- flush() {
70
- return __async(this, null, function* () {
71
- yield onCompletion == null ? void 0 : onCompletion(fullResponse);
72
- });
73
- }
74
- });
75
- }
76
-
77
- // streams/langchain-stream.ts
78
- function LangChainStream(callbacks) {
79
- const stream = new TransformStream();
80
- const writer = stream.writable.getWriter();
81
- return {
82
- stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)),
83
- handlers: {
84
- handleLLMNewToken: (token) => __async(this, null, function* () {
85
- yield writer.ready;
86
- yield writer.write(token);
87
- }),
88
- handleChainEnd: () => __async(this, null, function* () {
89
- yield writer.ready;
90
- yield writer.close();
91
- }),
92
- handleLLMError: (e) => __async(this, null, function* () {
93
- yield writer.ready;
94
- yield writer.abort(e);
95
- })
96
- }
97
- };
98
- }
99
- // Annotate the CommonJS export names for ESM import in node:
100
- 0 && (module.exports = {
101
- LangChainStream
102
- });
@@ -1,10 +0,0 @@
1
- import {
2
- LangChainStream,
3
- init_langchain_stream
4
- } from "./chunk-U2OQ6HW6.mjs";
5
- import "./chunk-UJV6VDVU.mjs";
6
- import "./chunk-2JQWCLY2.mjs";
7
- init_langchain_stream();
8
- export {
9
- LangChainStream
10
- };
@@ -1,5 +0,0 @@
1
- import { AIStreamCallbacks } from './ai-stream.js';
2
-
3
- declare function OpenAIStream(res: Response, cb?: AIStreamCallbacks): ReadableStream;
4
-
5
- export { OpenAIStream };
@@ -1,144 +0,0 @@
1
- "use strict";
2
- var __defProp = Object.defineProperty;
3
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
- var __getOwnPropNames = Object.getOwnPropertyNames;
5
- var __hasOwnProp = Object.prototype.hasOwnProperty;
6
- var __export = (target, all) => {
7
- for (var name in all)
8
- __defProp(target, name, { get: all[name], enumerable: true });
9
- };
10
- var __copyProps = (to, from, except, desc) => {
11
- if (from && typeof from === "object" || typeof from === "function") {
12
- for (let key of __getOwnPropNames(from))
13
- if (!__hasOwnProp.call(to, key) && key !== except)
14
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
- }
16
- return to;
17
- };
18
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
- var __async = (__this, __arguments, generator) => {
20
- return new Promise((resolve, reject) => {
21
- var fulfilled = (value) => {
22
- try {
23
- step(generator.next(value));
24
- } catch (e) {
25
- reject(e);
26
- }
27
- };
28
- var rejected = (value) => {
29
- try {
30
- step(generator.throw(value));
31
- } catch (e) {
32
- reject(e);
33
- }
34
- };
35
- var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
36
- step((generator = generator.apply(__this, __arguments)).next());
37
- });
38
- };
39
-
40
- // streams/openai-stream.ts
41
- var openai_stream_exports = {};
42
- __export(openai_stream_exports, {
43
- OpenAIStream: () => OpenAIStream
44
- });
45
- module.exports = __toCommonJS(openai_stream_exports);
46
-
47
- // streams/ai-stream.ts
48
- var import_eventsource_parser = require("eventsource-parser");
49
- function createEventStreamTransformer(customParser) {
50
- const decoder = new TextDecoder();
51
- let parser;
52
- return new TransformStream({
53
- start(controller) {
54
- return __async(this, null, function* () {
55
- function onParse(event) {
56
- if (event.type === "event") {
57
- const data = event.data;
58
- if (data === "[DONE]") {
59
- controller.terminate();
60
- return;
61
- }
62
- const message = customParser(data);
63
- if (message)
64
- controller.enqueue(message);
65
- }
66
- }
67
- parser = (0, import_eventsource_parser.createParser)(onParse);
68
- });
69
- },
70
- transform(chunk) {
71
- parser.feed(decoder.decode(chunk));
72
- }
73
- });
74
- }
75
- function createCallbacksTransformer(callbacks) {
76
- const encoder = new TextEncoder();
77
- let fullResponse = "";
78
- const { onStart, onToken, onCompletion } = callbacks || {};
79
- return new TransformStream({
80
- start() {
81
- return __async(this, null, function* () {
82
- if (onStart)
83
- yield onStart();
84
- });
85
- },
86
- transform(message, controller) {
87
- return __async(this, null, function* () {
88
- controller.enqueue(encoder.encode(message));
89
- if (onToken)
90
- yield onToken(message);
91
- if (onCompletion)
92
- fullResponse += message;
93
- });
94
- },
95
- flush() {
96
- return __async(this, null, function* () {
97
- yield onCompletion == null ? void 0 : onCompletion(fullResponse);
98
- });
99
- }
100
- });
101
- }
102
- function trimStartOfStreamHelper() {
103
- let start = true;
104
- return (text) => {
105
- if (start)
106
- text = text.trimStart();
107
- if (text)
108
- start = false;
109
- return text;
110
- };
111
- }
112
- function AIStream(res, customParser, callbacks) {
113
- if (!res.ok) {
114
- throw new Error(
115
- `Failed to convert the response to stream. Received status code: ${res.status}.`
116
- );
117
- }
118
- const stream = res.body || new ReadableStream({
119
- start(controller) {
120
- controller.close();
121
- }
122
- });
123
- return stream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
124
- }
125
-
126
- // streams/openai-stream.ts
127
- function parseOpenAIStream() {
128
- const trimStartOfStream = trimStartOfStreamHelper();
129
- return (data) => {
130
- var _a, _b, _c, _d, _e;
131
- const json = JSON.parse(data);
132
- const text = trimStartOfStream(
133
- (_e = (_d = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.content) != null ? _d : (_c = json.choices[0]) == null ? void 0 : _c.text) != null ? _e : ""
134
- );
135
- return text;
136
- };
137
- }
138
- function OpenAIStream(res, cb) {
139
- return AIStream(res, parseOpenAIStream(), cb);
140
- }
141
- // Annotate the CommonJS export names for ESM import in node:
142
- 0 && (module.exports = {
143
- OpenAIStream
144
- });
@@ -1,10 +0,0 @@
1
- import {
2
- OpenAIStream,
3
- init_openai_stream
4
- } from "./chunk-BJMBMGA3.mjs";
5
- import "./chunk-UJV6VDVU.mjs";
6
- import "./chunk-2JQWCLY2.mjs";
7
- init_openai_stream();
8
- export {
9
- OpenAIStream
10
- };
@@ -1,17 +0,0 @@
1
- import { ServerResponse } from 'node:http';
2
-
3
- /**
4
- * A utility class for streaming text responses.
5
- */
6
- declare class StreamingTextResponse extends Response {
7
- constructor(res: ReadableStream, init?: ResponseInit);
8
- }
9
- /**
10
- * A utility function to stream a ReadableStream to a Node.js response-like object.
11
- */
12
- declare function streamToResponse(res: ReadableStream, response: ServerResponse, init?: {
13
- headers?: Record<string, string>;
14
- status?: number;
15
- }): void;
16
-
17
- export { StreamingTextResponse, streamToResponse };
@@ -1,75 +0,0 @@
1
- "use strict";
2
- var __defProp = Object.defineProperty;
3
- var __defProps = Object.defineProperties;
4
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
- var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
6
- var __getOwnPropNames = Object.getOwnPropertyNames;
7
- var __getOwnPropSymbols = Object.getOwnPropertySymbols;
8
- var __hasOwnProp = Object.prototype.hasOwnProperty;
9
- var __propIsEnum = Object.prototype.propertyIsEnumerable;
10
- var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
11
- var __spreadValues = (a, b) => {
12
- for (var prop in b || (b = {}))
13
- if (__hasOwnProp.call(b, prop))
14
- __defNormalProp(a, prop, b[prop]);
15
- if (__getOwnPropSymbols)
16
- for (var prop of __getOwnPropSymbols(b)) {
17
- if (__propIsEnum.call(b, prop))
18
- __defNormalProp(a, prop, b[prop]);
19
- }
20
- return a;
21
- };
22
- var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
23
- var __export = (target, all) => {
24
- for (var name in all)
25
- __defProp(target, name, { get: all[name], enumerable: true });
26
- };
27
- var __copyProps = (to, from, except, desc) => {
28
- if (from && typeof from === "object" || typeof from === "function") {
29
- for (let key of __getOwnPropNames(from))
30
- if (!__hasOwnProp.call(to, key) && key !== except)
31
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
32
- }
33
- return to;
34
- };
35
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
36
-
37
- // streams/streaming-text-response.ts
38
- var streaming_text_response_exports = {};
39
- __export(streaming_text_response_exports, {
40
- StreamingTextResponse: () => StreamingTextResponse,
41
- streamToResponse: () => streamToResponse
42
- });
43
- module.exports = __toCommonJS(streaming_text_response_exports);
44
- var StreamingTextResponse = class extends Response {
45
- constructor(res, init) {
46
- super(res, __spreadProps(__spreadValues({}, init), {
47
- status: 200,
48
- headers: __spreadValues({
49
- "Content-Type": "text/plain; charset=utf-8"
50
- }, init == null ? void 0 : init.headers)
51
- }));
52
- }
53
- };
54
- function streamToResponse(res, response, init) {
55
- response.writeHead((init == null ? void 0 : init.status) || 200, __spreadValues({
56
- "Content-Type": "text/plain; charset=utf-8"
57
- }, init == null ? void 0 : init.headers));
58
- const reader = res.getReader();
59
- function read() {
60
- reader.read().then(({ done, value }) => {
61
- if (done) {
62
- response.end();
63
- return;
64
- }
65
- response.write(value);
66
- read();
67
- });
68
- }
69
- read();
70
- }
71
- // Annotate the CommonJS export names for ESM import in node:
72
- 0 && (module.exports = {
73
- StreamingTextResponse,
74
- streamToResponse
75
- });
@@ -1,11 +0,0 @@
1
- import {
2
- StreamingTextResponse,
3
- init_streaming_text_response,
4
- streamToResponse
5
- } from "./chunk-RBP6ONSV.mjs";
6
- import "./chunk-2JQWCLY2.mjs";
7
- init_streaming_text_response();
8
- export {
9
- StreamingTextResponse,
10
- streamToResponse
11
- };