ai 2.1.26 → 2.1.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,25 +1,8 @@
1
1
  "use strict";
2
2
  var __defProp = Object.defineProperty;
3
- var __defProps = Object.defineProperties;
4
3
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
- var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
6
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
7
- var __getOwnPropSymbols = Object.getOwnPropertySymbols;
8
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
9
- var __propIsEnum = Object.prototype.propertyIsEnumerable;
10
- var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
11
- var __spreadValues = (a, b) => {
12
- for (var prop in b || (b = {}))
13
- if (__hasOwnProp.call(b, prop))
14
- __defNormalProp(a, prop, b[prop]);
15
- if (__getOwnPropSymbols)
16
- for (var prop of __getOwnPropSymbols(b)) {
17
- if (__propIsEnum.call(b, prop))
18
- __defNormalProp(a, prop, b[prop]);
19
- }
20
- return a;
21
- };
22
- var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
23
6
  var __export = (target, all) => {
24
7
  for (var name in all)
25
8
  __defProp(target, name, { get: all[name], enumerable: true });
@@ -33,26 +16,6 @@ var __copyProps = (to, from, except, desc) => {
33
16
  return to;
34
17
  };
35
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
36
- var __async = (__this, __arguments, generator) => {
37
- return new Promise((resolve, reject) => {
38
- var fulfilled = (value) => {
39
- try {
40
- step(generator.next(value));
41
- } catch (e) {
42
- reject(e);
43
- }
44
- };
45
- var rejected = (value) => {
46
- try {
47
- step(generator.throw(value));
48
- } catch (e) {
49
- reject(e);
50
- }
51
- };
52
- var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
53
- step((generator = generator.apply(__this, __arguments)).next());
54
- });
55
- };
56
19
 
57
20
  // streams/index.ts
58
21
  var streams_exports = {};
@@ -68,6 +31,7 @@ __export(streams_exports, {
68
31
  createChunkDecoder: () => createChunkDecoder,
69
32
  createEventStreamTransformer: () => createEventStreamTransformer,
70
33
  nanoid: () => nanoid,
34
+ readableFromAsyncIterable: () => readableFromAsyncIterable,
71
35
  streamToResponse: () => streamToResponse,
72
36
  trimStartOfStreamHelper: () => trimStartOfStreamHelper
73
37
  });
@@ -79,22 +43,20 @@ function createEventStreamTransformer(customParser) {
79
43
  const textDecoder = new TextDecoder();
80
44
  let eventSourceParser;
81
45
  return new TransformStream({
82
- start(controller) {
83
- return __async(this, null, function* () {
84
- eventSourceParser = (0, import_eventsource_parser.createParser)(
85
- (event) => {
86
- if ("data" in event && event.type === "event" && event.data === "[DONE]") {
87
- controller.terminate();
88
- return;
89
- }
90
- if ("data" in event) {
91
- const parsedMessage = customParser(event.data);
92
- if (parsedMessage)
93
- controller.enqueue(parsedMessage);
94
- }
46
+ async start(controller) {
47
+ eventSourceParser = (0, import_eventsource_parser.createParser)(
48
+ (event) => {
49
+ if ("data" in event && event.type === "event" && event.data === "[DONE]") {
50
+ controller.terminate();
51
+ return;
95
52
  }
96
- );
97
- });
53
+ if ("data" in event) {
54
+ const parsedMessage = customParser(event.data);
55
+ if (parsedMessage)
56
+ controller.enqueue(parsedMessage);
57
+ }
58
+ }
59
+ );
98
60
  },
99
61
  transform(chunk) {
100
62
  eventSourceParser.feed(textDecoder.decode(chunk));
@@ -106,26 +68,20 @@ function createCallbacksTransformer(callbacks) {
106
68
  let aggregatedResponse = "";
107
69
  const { onStart, onToken, onCompletion } = callbacks || {};
108
70
  return new TransformStream({
109
- start() {
110
- return __async(this, null, function* () {
111
- if (onStart)
112
- yield onStart();
113
- });
71
+ async start() {
72
+ if (onStart)
73
+ await onStart();
114
74
  },
115
- transform(message, controller) {
116
- return __async(this, null, function* () {
117
- controller.enqueue(textEncoder.encode(message));
118
- if (onToken)
119
- yield onToken(message);
120
- if (onCompletion)
121
- aggregatedResponse += message;
122
- });
75
+ async transform(message, controller) {
76
+ controller.enqueue(textEncoder.encode(message));
77
+ if (onToken)
78
+ await onToken(message);
79
+ if (onCompletion)
80
+ aggregatedResponse += message;
123
81
  },
124
- flush() {
125
- return __async(this, null, function* () {
126
- if (onCompletion)
127
- yield onCompletion(aggregatedResponse);
128
- });
82
+ async flush() {
83
+ if (onCompletion)
84
+ await onCompletion(aggregatedResponse);
129
85
  }
130
86
  });
131
87
  }
@@ -143,17 +99,14 @@ function trimStartOfStreamHelper() {
143
99
  function AIStream(response, customParser, callbacks) {
144
100
  if (!response.ok) {
145
101
  if (response.body) {
146
- let _a;
147
102
  const reader = response.body.getReader();
148
103
  return new ReadableStream({
149
- start(controller) {
150
- return __async(this, null, function* () {
151
- const { done, value } = yield reader.read();
152
- if (!done) {
153
- const errorText = new TextDecoder().decode(value);
154
- controller.error(new Error(`Response error: ${errorText}`));
155
- }
156
- });
104
+ async start(controller) {
105
+ const { done, value } = await reader.read();
106
+ if (!done) {
107
+ const errorText = new TextDecoder().decode(value);
108
+ controller.error(new Error(`Response error: ${errorText}`));
109
+ }
157
110
  }
158
111
  });
159
112
  } else {
@@ -174,14 +127,43 @@ function createEmptyReadableStream() {
174
127
  }
175
128
  });
176
129
  }
130
+ function readableFromAsyncIterable(iterable) {
131
+ let it = iterable[Symbol.asyncIterator]();
132
+ return new ReadableStream({
133
+ async pull(controller) {
134
+ const { done, value } = await it.next();
135
+ if (done)
136
+ controller.close();
137
+ else
138
+ controller.enqueue(value);
139
+ },
140
+ async cancel(reason) {
141
+ var _a;
142
+ await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
143
+ }
144
+ });
145
+ }
177
146
 
178
147
  // streams/openai-stream.ts
179
148
  function parseOpenAIStream() {
149
+ const extract = chunkToText();
150
+ return (data) => {
151
+ return extract(JSON.parse(data));
152
+ };
153
+ }
154
+ async function* streamable(stream) {
155
+ const extract = chunkToText();
156
+ for await (const chunk of stream) {
157
+ const text = extract(chunk);
158
+ if (text)
159
+ yield text;
160
+ }
161
+ }
162
+ function chunkToText() {
180
163
  const trimStartOfStream = trimStartOfStreamHelper();
181
164
  let isFunctionStreamingIn;
182
- return (data) => {
165
+ return (json) => {
183
166
  var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o;
184
- const json = JSON.parse(data);
185
167
  if ((_c = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.function_call) == null ? void 0 : _c.name) {
186
168
  isFunctionStreamingIn = true;
187
169
  return `{"function_call": {"name": "${(_e = (_d = json.choices[0]) == null ? void 0 : _d.delta) == null ? void 0 : _e.function_call.name}", "arguments": "`;
@@ -189,7 +171,7 @@ function parseOpenAIStream() {
189
171
  const argumentChunk = json.choices[0].delta.function_call.arguments;
190
172
  let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
191
173
  return `${escapedPartialJson}`;
192
- } else if ((((_i = json.choices[0]) == null ? void 0 : _i.finish_reason) === "function_call" || ((_j = json.choices[0]) == null ? void 0 : _j.finish_reason) === "stop") && isFunctionStreamingIn) {
174
+ } else if (isFunctionStreamingIn && (((_i = json.choices[0]) == null ? void 0 : _i.finish_reason) === "function_call" || ((_j = json.choices[0]) == null ? void 0 : _j.finish_reason) === "stop")) {
193
175
  isFunctionStreamingIn = false;
194
176
  return '"}}';
195
177
  }
@@ -202,7 +184,14 @@ function parseOpenAIStream() {
202
184
  var __internal__OpenAIFnMessagesSymbol = Symbol("internal_openai_fn_messages");
203
185
  function OpenAIStream(res, callbacks) {
204
186
  const cb = callbacks;
205
- const stream = AIStream(res, parseOpenAIStream(), cb);
187
+ let stream;
188
+ if (Symbol.asyncIterator in res) {
189
+ stream = readableFromAsyncIterable(streamable(res)).pipeThrough(
190
+ createCallbacksTransformer(cb)
191
+ );
192
+ } else {
193
+ stream = AIStream(res, parseOpenAIStream(), cb);
194
+ }
206
195
  if (cb && cb.experimental_onFunctionCall) {
207
196
  const functionCallTransformer = createFunctionCallTransformer(cb);
208
197
  return stream.pipeThrough(functionCallTransformer);
@@ -217,78 +206,76 @@ function createFunctionCallTransformer(callbacks) {
217
206
  let isFunctionStreamingIn = false;
218
207
  let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
219
208
  return new TransformStream({
220
- transform(chunk, controller) {
221
- return __async(this, null, function* () {
222
- const message = new TextDecoder().decode(chunk);
223
- const shouldHandleAsFunction = isFirstChunk && message.startsWith('{"function_call":');
224
- if (shouldHandleAsFunction) {
225
- isFunctionStreamingIn = true;
226
- aggregatedResponse += message;
227
- isFirstChunk = false;
209
+ async transform(chunk, controller) {
210
+ const message = new TextDecoder().decode(chunk);
211
+ const shouldHandleAsFunction = isFirstChunk && message.startsWith('{"function_call":');
212
+ if (shouldHandleAsFunction) {
213
+ isFunctionStreamingIn = true;
214
+ aggregatedResponse += message;
215
+ isFirstChunk = false;
216
+ return;
217
+ }
218
+ if (!isFunctionStreamingIn) {
219
+ controller.enqueue(chunk);
220
+ return;
221
+ } else {
222
+ aggregatedResponse += message;
223
+ }
224
+ },
225
+ async flush(controller) {
226
+ const isEndOfFunction = !isFirstChunk && callbacks.experimental_onFunctionCall && isFunctionStreamingIn;
227
+ if (isEndOfFunction && callbacks.experimental_onFunctionCall) {
228
+ isFunctionStreamingIn = false;
229
+ const payload = JSON.parse(aggregatedResponse);
230
+ const argumentsPayload = JSON.parse(payload.function_call.arguments);
231
+ let newFunctionCallMessages = [...functionCallMessages];
232
+ const functionResponse = await callbacks.experimental_onFunctionCall(
233
+ {
234
+ name: payload.function_call.name,
235
+ arguments: argumentsPayload
236
+ },
237
+ (result) => {
238
+ newFunctionCallMessages = [
239
+ ...functionCallMessages,
240
+ {
241
+ role: "assistant",
242
+ content: "",
243
+ function_call: payload.function_call
244
+ },
245
+ {
246
+ role: "function",
247
+ name: payload.function_call.name,
248
+ content: JSON.stringify(result)
249
+ }
250
+ ];
251
+ return newFunctionCallMessages;
252
+ }
253
+ );
254
+ if (!functionResponse) {
255
+ controller.enqueue(textEncoder.encode(aggregatedResponse));
228
256
  return;
229
- }
230
- if (!isFunctionStreamingIn) {
231
- controller.enqueue(chunk);
257
+ } else if (typeof functionResponse === "string") {
258
+ controller.enqueue(textEncoder.encode(functionResponse));
232
259
  return;
233
- } else {
234
- aggregatedResponse += message;
235
260
  }
236
- });
237
- },
238
- flush(controller) {
239
- return __async(this, null, function* () {
240
- const isEndOfFunction = !isFirstChunk && callbacks.experimental_onFunctionCall && isFunctionStreamingIn;
241
- if (isEndOfFunction && callbacks.experimental_onFunctionCall) {
242
- isFunctionStreamingIn = false;
243
- const payload = JSON.parse(aggregatedResponse);
244
- const argumentsPayload = JSON.parse(payload.function_call.arguments);
245
- let newFunctionCallMessages = [...functionCallMessages];
246
- const functionResponse = yield callbacks.experimental_onFunctionCall(
247
- {
248
- name: payload.function_call.name,
249
- arguments: argumentsPayload
250
- },
251
- (result) => {
252
- newFunctionCallMessages = [
253
- ...functionCallMessages,
254
- {
255
- role: "assistant",
256
- content: "",
257
- function_call: payload.function_call
258
- },
259
- {
260
- role: "function",
261
- name: payload.function_call.name,
262
- content: JSON.stringify(result)
263
- }
264
- ];
265
- return newFunctionCallMessages;
266
- }
267
- );
268
- if (!functionResponse) {
269
- controller.enqueue(textEncoder.encode(aggregatedResponse));
270
- return;
271
- } else if (typeof functionResponse === "string") {
272
- controller.enqueue(textEncoder.encode(functionResponse));
273
- return;
274
- }
275
- const filteredCallbacks = __spreadProps(__spreadValues({}, callbacks), {
276
- onStart: void 0,
277
- onCompletion: void 0
278
- });
279
- const openAIStream = OpenAIStream(functionResponse, __spreadProps(__spreadValues({}, filteredCallbacks), {
280
- [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
281
- }));
282
- const reader = openAIStream.getReader();
283
- while (true) {
284
- const { done, value } = yield reader.read();
285
- if (done) {
286
- break;
287
- }
288
- controller.enqueue(value);
261
+ const filteredCallbacks = {
262
+ ...callbacks,
263
+ onStart: void 0,
264
+ onCompletion: void 0
265
+ };
266
+ const openAIStream = OpenAIStream(functionResponse, {
267
+ ...filteredCallbacks,
268
+ [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
269
+ });
270
+ const reader = openAIStream.getReader();
271
+ while (true) {
272
+ const { done, value } = await reader.read();
273
+ if (done) {
274
+ break;
289
275
  }
276
+ controller.enqueue(value);
290
277
  }
291
- });
278
+ }
292
279
  }
293
280
  });
294
281
  }
@@ -296,18 +283,21 @@ function createFunctionCallTransformer(callbacks) {
296
283
  // streams/streaming-text-response.ts
297
284
  var StreamingTextResponse = class extends Response {
298
285
  constructor(res, init) {
299
- super(res, __spreadProps(__spreadValues({}, init), {
286
+ super(res, {
287
+ ...init,
300
288
  status: 200,
301
- headers: __spreadValues({
302
- "Content-Type": "text/plain; charset=utf-8"
303
- }, init == null ? void 0 : init.headers)
304
- }));
289
+ headers: {
290
+ "Content-Type": "text/plain; charset=utf-8",
291
+ ...init == null ? void 0 : init.headers
292
+ }
293
+ });
305
294
  }
306
295
  };
307
296
  function streamToResponse(res, response, init) {
308
- response.writeHead((init == null ? void 0 : init.status) || 200, __spreadValues({
309
- "Content-Type": "text/plain; charset=utf-8"
310
- }, init == null ? void 0 : init.headers));
297
+ response.writeHead((init == null ? void 0 : init.status) || 200, {
298
+ "Content-Type": "text/plain; charset=utf-8",
299
+ ...init == null ? void 0 : init.headers
300
+ });
311
301
  const reader = res.getReader();
312
302
  function read() {
313
303
  reader.read().then(({ done, value }) => {
@@ -326,27 +316,25 @@ function streamToResponse(res, response, init) {
326
316
  function createParser2(res) {
327
317
  const trimStartOfStream = trimStartOfStreamHelper();
328
318
  return new ReadableStream({
329
- pull(controller) {
330
- return __async(this, null, function* () {
331
- var _a2, _b;
332
- const { value, done } = yield res.next();
333
- if (done) {
334
- controller.close();
335
- return;
336
- }
337
- const text = trimStartOfStream((_b = (_a2 = value.token) == null ? void 0 : _a2.text) != null ? _b : "");
338
- if (!text)
339
- return;
340
- if (value.generated_text != null && value.generated_text.length > 0) {
341
- controller.close();
342
- return;
343
- }
344
- if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
345
- controller.close();
346
- } else {
347
- controller.enqueue(text);
348
- }
349
- });
319
+ async pull(controller) {
320
+ var _a, _b;
321
+ const { value, done } = await res.next();
322
+ if (done) {
323
+ controller.close();
324
+ return;
325
+ }
326
+ const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
327
+ if (!text)
328
+ return;
329
+ if (value.generated_text != null && value.generated_text.length > 0) {
330
+ controller.close();
331
+ return;
332
+ }
333
+ if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
334
+ controller.close();
335
+ } else {
336
+ controller.enqueue(text);
337
+ }
350
338
  }
351
339
  });
352
340
  }
@@ -356,50 +344,44 @@ function HuggingFaceStream(res, callbacks) {
356
344
 
357
345
  // streams/cohere-stream.ts
358
346
  var utf8Decoder = new TextDecoder("utf-8");
359
- function processLines(lines, controller) {
360
- return __async(this, null, function* () {
361
- for (const line of lines) {
362
- const { text, is_finished } = JSON.parse(line);
363
- if (is_finished === true) {
364
- controller.close();
365
- } else {
366
- controller.enqueue(text);
367
- }
347
+ async function processLines(lines, controller) {
348
+ for (const line of lines) {
349
+ const { text, is_finished } = JSON.parse(line);
350
+ if (is_finished === true) {
351
+ controller.close();
352
+ } else {
353
+ controller.enqueue(text);
368
354
  }
369
- });
355
+ }
370
356
  }
371
- function readAndProcessLines(reader, controller) {
372
- return __async(this, null, function* () {
373
- let segment = "";
374
- while (true) {
375
- const { value: chunk, done } = yield reader.read();
376
- if (done) {
377
- break;
378
- }
379
- segment += utf8Decoder.decode(chunk, { stream: true });
380
- const linesArray = segment.split(/\r\n|\n|\r/g);
381
- segment = linesArray.pop() || "";
382
- yield processLines(linesArray, controller);
357
+ async function readAndProcessLines(reader, controller) {
358
+ let segment = "";
359
+ while (true) {
360
+ const { value: chunk, done } = await reader.read();
361
+ if (done) {
362
+ break;
383
363
  }
384
- if (segment) {
385
- const linesArray = [segment];
386
- yield processLines(linesArray, controller);
387
- }
388
- controller.close();
389
- });
364
+ segment += utf8Decoder.decode(chunk, { stream: true });
365
+ const linesArray = segment.split(/\r\n|\n|\r/g);
366
+ segment = linesArray.pop() || "";
367
+ await processLines(linesArray, controller);
368
+ }
369
+ if (segment) {
370
+ const linesArray = [segment];
371
+ await processLines(linesArray, controller);
372
+ }
373
+ controller.close();
390
374
  }
391
375
  function createParser3(res) {
392
376
  var _a;
393
377
  const reader = (_a = res.body) == null ? void 0 : _a.getReader();
394
378
  return new ReadableStream({
395
- start(controller) {
396
- return __async(this, null, function* () {
397
- if (!reader) {
398
- controller.close();
399
- return;
400
- }
401
- yield readAndProcessLines(reader, controller);
402
- });
379
+ async start(controller) {
380
+ if (!reader) {
381
+ controller.close();
382
+ return;
383
+ }
384
+ await readAndProcessLines(reader, controller);
403
385
  }
404
386
  });
405
387
  }
@@ -427,55 +409,55 @@ function LangChainStream(callbacks) {
427
409
  const stream = new TransformStream();
428
410
  const writer = stream.writable.getWriter();
429
411
  const runs = /* @__PURE__ */ new Set();
430
- const handleError = (e, runId) => __async(this, null, function* () {
412
+ const handleError = async (e, runId) => {
431
413
  runs.delete(runId);
432
- yield writer.ready;
433
- yield writer.abort(e);
434
- });
435
- const handleStart = (runId) => __async(this, null, function* () {
414
+ await writer.ready;
415
+ await writer.abort(e);
416
+ };
417
+ const handleStart = async (runId) => {
436
418
  runs.add(runId);
437
- });
438
- const handleEnd = (runId) => __async(this, null, function* () {
419
+ };
420
+ const handleEnd = async (runId) => {
439
421
  runs.delete(runId);
440
422
  if (runs.size === 0) {
441
- yield writer.ready;
442
- yield writer.close();
423
+ await writer.ready;
424
+ await writer.close();
443
425
  }
444
- });
426
+ };
445
427
  return {
446
428
  stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)),
447
429
  handlers: {
448
- handleLLMNewToken: (token) => __async(this, null, function* () {
449
- yield writer.ready;
450
- yield writer.write(token);
451
- }),
452
- handleLLMStart: (_llm, _prompts, runId) => __async(this, null, function* () {
430
+ handleLLMNewToken: async (token) => {
431
+ await writer.ready;
432
+ await writer.write(token);
433
+ },
434
+ handleLLMStart: async (_llm, _prompts, runId) => {
453
435
  handleStart(runId);
454
- }),
455
- handleLLMEnd: (_output, runId) => __async(this, null, function* () {
456
- yield handleEnd(runId);
457
- }),
458
- handleLLMError: (e, runId) => __async(this, null, function* () {
459
- yield handleError(e, runId);
460
- }),
461
- handleChainStart: (_chain, _inputs, runId) => __async(this, null, function* () {
436
+ },
437
+ handleLLMEnd: async (_output, runId) => {
438
+ await handleEnd(runId);
439
+ },
440
+ handleLLMError: async (e, runId) => {
441
+ await handleError(e, runId);
442
+ },
443
+ handleChainStart: async (_chain, _inputs, runId) => {
462
444
  handleStart(runId);
463
- }),
464
- handleChainEnd: (_outputs, runId) => __async(this, null, function* () {
465
- yield handleEnd(runId);
466
- }),
467
- handleChainError: (e, runId) => __async(this, null, function* () {
468
- yield handleError(e, runId);
469
- }),
470
- handleToolStart: (_tool, _input, runId) => __async(this, null, function* () {
445
+ },
446
+ handleChainEnd: async (_outputs, runId) => {
447
+ await handleEnd(runId);
448
+ },
449
+ handleChainError: async (e, runId) => {
450
+ await handleError(e, runId);
451
+ },
452
+ handleToolStart: async (_tool, _input, runId) => {
471
453
  handleStart(runId);
472
- }),
473
- handleToolEnd: (_output, runId) => __async(this, null, function* () {
474
- yield handleEnd(runId);
475
- }),
476
- handleToolError: (e, runId) => __async(this, null, function* () {
477
- yield handleError(e, runId);
478
- })
454
+ },
455
+ handleToolEnd: async (_output, runId) => {
456
+ await handleEnd(runId);
457
+ },
458
+ handleToolError: async (e, runId) => {
459
+ await handleError(e, runId);
460
+ }
479
461
  }
480
462
  };
481
463
  }
@@ -507,6 +489,7 @@ function createChunkDecoder() {
507
489
  createChunkDecoder,
508
490
  createEventStreamTransformer,
509
491
  nanoid,
492
+ readableFromAsyncIterable,
510
493
  streamToResponse,
511
494
  trimStartOfStreamHelper
512
495
  });