ai 2.2.34 → 2.2.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +169 -158
- package/dist/index.js +536 -498
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +537 -500
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
@@ -30,6 +30,7 @@ __export(streams_exports, {
|
|
30
30
|
CohereStream: () => CohereStream,
|
31
31
|
GoogleGenerativeAIStream: () => GoogleGenerativeAIStream,
|
32
32
|
HuggingFaceStream: () => HuggingFaceStream,
|
33
|
+
InkeepStream: () => InkeepStream,
|
33
34
|
LangChainStream: () => LangChainStream,
|
34
35
|
OpenAIStream: () => OpenAIStream,
|
35
36
|
ReplicateStream: () => ReplicateStream,
|
@@ -49,8 +50,123 @@ __export(streams_exports, {
|
|
49
50
|
});
|
50
51
|
module.exports = __toCommonJS(streams_exports);
|
51
52
|
|
52
|
-
//
|
53
|
-
var
|
53
|
+
// streams/ai-stream.ts
|
54
|
+
var import_eventsource_parser = require("eventsource-parser");
|
55
|
+
function createEventStreamTransformer(customParser) {
|
56
|
+
const textDecoder = new TextDecoder();
|
57
|
+
let eventSourceParser;
|
58
|
+
return new TransformStream({
|
59
|
+
async start(controller) {
|
60
|
+
eventSourceParser = (0, import_eventsource_parser.createParser)(
|
61
|
+
(event) => {
|
62
|
+
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
63
|
+
// @see https://replicate.com/docs/streaming
|
64
|
+
event.event === "done") {
|
65
|
+
controller.terminate();
|
66
|
+
return;
|
67
|
+
}
|
68
|
+
if ("data" in event) {
|
69
|
+
const parsedMessage = customParser ? customParser(event.data, {
|
70
|
+
event: event.event
|
71
|
+
}) : event.data;
|
72
|
+
if (parsedMessage)
|
73
|
+
controller.enqueue(parsedMessage);
|
74
|
+
}
|
75
|
+
}
|
76
|
+
);
|
77
|
+
},
|
78
|
+
transform(chunk) {
|
79
|
+
eventSourceParser.feed(textDecoder.decode(chunk));
|
80
|
+
}
|
81
|
+
});
|
82
|
+
}
|
83
|
+
function createCallbacksTransformer(cb) {
|
84
|
+
const textEncoder = new TextEncoder();
|
85
|
+
let aggregatedResponse = "";
|
86
|
+
const callbacks = cb || {};
|
87
|
+
return new TransformStream({
|
88
|
+
async start() {
|
89
|
+
if (callbacks.onStart)
|
90
|
+
await callbacks.onStart();
|
91
|
+
},
|
92
|
+
async transform(message, controller) {
|
93
|
+
controller.enqueue(textEncoder.encode(message));
|
94
|
+
aggregatedResponse += message;
|
95
|
+
if (callbacks.onToken)
|
96
|
+
await callbacks.onToken(message);
|
97
|
+
},
|
98
|
+
async flush() {
|
99
|
+
const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
|
100
|
+
if (callbacks.onCompletion) {
|
101
|
+
await callbacks.onCompletion(aggregatedResponse);
|
102
|
+
}
|
103
|
+
if (callbacks.onFinal && !isOpenAICallbacks) {
|
104
|
+
await callbacks.onFinal(aggregatedResponse);
|
105
|
+
}
|
106
|
+
}
|
107
|
+
});
|
108
|
+
}
|
109
|
+
function isOfTypeOpenAIStreamCallbacks(callbacks) {
|
110
|
+
return "experimental_onFunctionCall" in callbacks;
|
111
|
+
}
|
112
|
+
function trimStartOfStreamHelper() {
|
113
|
+
let isStreamStart = true;
|
114
|
+
return (text) => {
|
115
|
+
if (isStreamStart) {
|
116
|
+
text = text.trimStart();
|
117
|
+
if (text)
|
118
|
+
isStreamStart = false;
|
119
|
+
}
|
120
|
+
return text;
|
121
|
+
};
|
122
|
+
}
|
123
|
+
function AIStream(response, customParser, callbacks) {
|
124
|
+
if (!response.ok) {
|
125
|
+
if (response.body) {
|
126
|
+
const reader = response.body.getReader();
|
127
|
+
return new ReadableStream({
|
128
|
+
async start(controller) {
|
129
|
+
const { done, value } = await reader.read();
|
130
|
+
if (!done) {
|
131
|
+
const errorText = new TextDecoder().decode(value);
|
132
|
+
controller.error(new Error(`Response error: ${errorText}`));
|
133
|
+
}
|
134
|
+
}
|
135
|
+
});
|
136
|
+
} else {
|
137
|
+
return new ReadableStream({
|
138
|
+
start(controller) {
|
139
|
+
controller.error(new Error("Response error: No response body"));
|
140
|
+
}
|
141
|
+
});
|
142
|
+
}
|
143
|
+
}
|
144
|
+
const responseBodyStream = response.body || createEmptyReadableStream();
|
145
|
+
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
146
|
+
}
|
147
|
+
function createEmptyReadableStream() {
|
148
|
+
return new ReadableStream({
|
149
|
+
start(controller) {
|
150
|
+
controller.close();
|
151
|
+
}
|
152
|
+
});
|
153
|
+
}
|
154
|
+
function readableFromAsyncIterable(iterable) {
|
155
|
+
let it = iterable[Symbol.asyncIterator]();
|
156
|
+
return new ReadableStream({
|
157
|
+
async pull(controller) {
|
158
|
+
const { done, value } = await it.next();
|
159
|
+
if (done)
|
160
|
+
controller.close();
|
161
|
+
else
|
162
|
+
controller.enqueue(value);
|
163
|
+
},
|
164
|
+
async cancel(reason) {
|
165
|
+
var _a;
|
166
|
+
await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
|
167
|
+
}
|
168
|
+
});
|
169
|
+
}
|
54
170
|
|
55
171
|
// shared/stream-parts.ts
|
56
172
|
var textStreamPart = {
|
@@ -232,146 +348,6 @@ function formatStreamPart(type, value) {
|
|
232
348
|
`;
|
233
349
|
}
|
234
350
|
|
235
|
-
// shared/utils.ts
|
236
|
-
var nanoid = (0, import_non_secure.customAlphabet)(
|
237
|
-
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
238
|
-
7
|
239
|
-
);
|
240
|
-
function createChunkDecoder(complex) {
|
241
|
-
const decoder = new TextDecoder();
|
242
|
-
if (!complex) {
|
243
|
-
return function(chunk) {
|
244
|
-
if (!chunk)
|
245
|
-
return "";
|
246
|
-
return decoder.decode(chunk, { stream: true });
|
247
|
-
};
|
248
|
-
}
|
249
|
-
return function(chunk) {
|
250
|
-
const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
|
251
|
-
return decoded.map(parseStreamPart).filter(Boolean);
|
252
|
-
};
|
253
|
-
}
|
254
|
-
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
255
|
-
var COMPLEX_HEADER = "X-Experimental-Stream-Data";
|
256
|
-
|
257
|
-
// streams/ai-stream.ts
|
258
|
-
var import_eventsource_parser = require("eventsource-parser");
|
259
|
-
function createEventStreamTransformer(customParser) {
|
260
|
-
const textDecoder = new TextDecoder();
|
261
|
-
let eventSourceParser;
|
262
|
-
return new TransformStream({
|
263
|
-
async start(controller) {
|
264
|
-
eventSourceParser = (0, import_eventsource_parser.createParser)(
|
265
|
-
(event) => {
|
266
|
-
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
267
|
-
// @see https://replicate.com/docs/streaming
|
268
|
-
event.event === "done") {
|
269
|
-
controller.terminate();
|
270
|
-
return;
|
271
|
-
}
|
272
|
-
if ("data" in event) {
|
273
|
-
const parsedMessage = customParser ? customParser(event.data, {
|
274
|
-
event: event.event
|
275
|
-
}) : event.data;
|
276
|
-
if (parsedMessage)
|
277
|
-
controller.enqueue(parsedMessage);
|
278
|
-
}
|
279
|
-
}
|
280
|
-
);
|
281
|
-
},
|
282
|
-
transform(chunk) {
|
283
|
-
eventSourceParser.feed(textDecoder.decode(chunk));
|
284
|
-
}
|
285
|
-
});
|
286
|
-
}
|
287
|
-
function createCallbacksTransformer(cb) {
|
288
|
-
const textEncoder = new TextEncoder();
|
289
|
-
let aggregatedResponse = "";
|
290
|
-
const callbacks = cb || {};
|
291
|
-
return new TransformStream({
|
292
|
-
async start() {
|
293
|
-
if (callbacks.onStart)
|
294
|
-
await callbacks.onStart();
|
295
|
-
},
|
296
|
-
async transform(message, controller) {
|
297
|
-
controller.enqueue(textEncoder.encode(message));
|
298
|
-
aggregatedResponse += message;
|
299
|
-
if (callbacks.onToken)
|
300
|
-
await callbacks.onToken(message);
|
301
|
-
},
|
302
|
-
async flush() {
|
303
|
-
const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
|
304
|
-
if (callbacks.onCompletion) {
|
305
|
-
await callbacks.onCompletion(aggregatedResponse);
|
306
|
-
}
|
307
|
-
if (callbacks.onFinal && !isOpenAICallbacks) {
|
308
|
-
await callbacks.onFinal(aggregatedResponse);
|
309
|
-
}
|
310
|
-
}
|
311
|
-
});
|
312
|
-
}
|
313
|
-
function isOfTypeOpenAIStreamCallbacks(callbacks) {
|
314
|
-
return "experimental_onFunctionCall" in callbacks;
|
315
|
-
}
|
316
|
-
function trimStartOfStreamHelper() {
|
317
|
-
let isStreamStart = true;
|
318
|
-
return (text) => {
|
319
|
-
if (isStreamStart) {
|
320
|
-
text = text.trimStart();
|
321
|
-
if (text)
|
322
|
-
isStreamStart = false;
|
323
|
-
}
|
324
|
-
return text;
|
325
|
-
};
|
326
|
-
}
|
327
|
-
function AIStream(response, customParser, callbacks) {
|
328
|
-
if (!response.ok) {
|
329
|
-
if (response.body) {
|
330
|
-
const reader = response.body.getReader();
|
331
|
-
return new ReadableStream({
|
332
|
-
async start(controller) {
|
333
|
-
const { done, value } = await reader.read();
|
334
|
-
if (!done) {
|
335
|
-
const errorText = new TextDecoder().decode(value);
|
336
|
-
controller.error(new Error(`Response error: ${errorText}`));
|
337
|
-
}
|
338
|
-
}
|
339
|
-
});
|
340
|
-
} else {
|
341
|
-
return new ReadableStream({
|
342
|
-
start(controller) {
|
343
|
-
controller.error(new Error("Response error: No response body"));
|
344
|
-
}
|
345
|
-
});
|
346
|
-
}
|
347
|
-
}
|
348
|
-
const responseBodyStream = response.body || createEmptyReadableStream();
|
349
|
-
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
350
|
-
}
|
351
|
-
function createEmptyReadableStream() {
|
352
|
-
return new ReadableStream({
|
353
|
-
start(controller) {
|
354
|
-
controller.close();
|
355
|
-
}
|
356
|
-
});
|
357
|
-
}
|
358
|
-
function readableFromAsyncIterable(iterable) {
|
359
|
-
let it = iterable[Symbol.asyncIterator]();
|
360
|
-
return new ReadableStream({
|
361
|
-
async pull(controller) {
|
362
|
-
const { done, value } = await it.next();
|
363
|
-
if (done)
|
364
|
-
controller.close();
|
365
|
-
else
|
366
|
-
controller.enqueue(value);
|
367
|
-
},
|
368
|
-
async cancel(reason) {
|
369
|
-
var _a;
|
370
|
-
await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
|
371
|
-
}
|
372
|
-
});
|
373
|
-
}
|
374
|
-
|
375
351
|
// streams/stream-data.ts
|
376
352
|
var experimental_StreamData = class {
|
377
353
|
constructor() {
|
@@ -458,345 +434,96 @@ var experimental_StreamData = class {
|
|
458
434
|
}
|
459
435
|
this.messageAnnotations.push(value);
|
460
436
|
}
|
461
|
-
};
|
462
|
-
function createStreamDataTransformer(experimental_streamData) {
|
463
|
-
if (!experimental_streamData) {
|
464
|
-
return new TransformStream({
|
465
|
-
transform: async (chunk, controller) => {
|
466
|
-
controller.enqueue(chunk);
|
467
|
-
}
|
468
|
-
});
|
469
|
-
}
|
470
|
-
const encoder = new TextEncoder();
|
471
|
-
const decoder = new TextDecoder();
|
472
|
-
return new TransformStream({
|
473
|
-
transform: async (chunk, controller) => {
|
474
|
-
const message = decoder.decode(chunk);
|
475
|
-
controller.enqueue(encoder.encode(formatStreamPart("text", message)));
|
476
|
-
}
|
477
|
-
});
|
478
|
-
}
|
479
|
-
|
480
|
-
// streams/anthropic-stream.ts
|
481
|
-
function parseAnthropicStream() {
|
482
|
-
let previous = "";
|
483
|
-
return (data) => {
|
484
|
-
const json = JSON.parse(data);
|
485
|
-
if ("error" in json) {
|
486
|
-
throw new Error(`${json.error.type}: ${json.error.message}`);
|
487
|
-
}
|
488
|
-
if (!("completion" in json)) {
|
489
|
-
return;
|
490
|
-
}
|
491
|
-
const text = json.completion;
|
492
|
-
if (!previous || text.length > previous.length && text.startsWith(previous)) {
|
493
|
-
const delta = text.slice(previous.length);
|
494
|
-
previous = text;
|
495
|
-
return delta;
|
496
|
-
}
|
497
|
-
return text;
|
498
|
-
};
|
499
|
-
}
|
500
|
-
async function* streamable(stream) {
|
501
|
-
for await (const chunk of stream) {
|
502
|
-
if ("completion" in chunk) {
|
503
|
-
const text = chunk.completion;
|
504
|
-
if (text)
|
505
|
-
yield text;
|
506
|
-
} else if ("delta" in chunk) {
|
507
|
-
const { delta } = chunk;
|
508
|
-
if ("text" in delta) {
|
509
|
-
const text = delta.text;
|
510
|
-
if (text)
|
511
|
-
yield text;
|
512
|
-
}
|
513
|
-
}
|
514
|
-
}
|
515
|
-
}
|
516
|
-
function AnthropicStream(res, cb) {
|
517
|
-
if (Symbol.asyncIterator in res) {
|
518
|
-
return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
|
519
|
-
} else {
|
520
|
-
return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
|
521
|
-
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
522
|
-
);
|
523
|
-
}
|
524
|
-
}
|
525
|
-
|
526
|
-
// streams/assistant-response.ts
|
527
|
-
function experimental_AssistantResponse({ threadId, messageId }, process2) {
|
528
|
-
const stream = new ReadableStream({
|
529
|
-
async start(controller) {
|
530
|
-
var _a;
|
531
|
-
const textEncoder = new TextEncoder();
|
532
|
-
const sendMessage = (message) => {
|
533
|
-
controller.enqueue(
|
534
|
-
textEncoder.encode(formatStreamPart("assistant_message", message))
|
535
|
-
);
|
536
|
-
};
|
537
|
-
const sendDataMessage = (message) => {
|
538
|
-
controller.enqueue(
|
539
|
-
textEncoder.encode(formatStreamPart("data_message", message))
|
540
|
-
);
|
541
|
-
};
|
542
|
-
const sendError = (errorMessage) => {
|
543
|
-
controller.enqueue(
|
544
|
-
textEncoder.encode(formatStreamPart("error", errorMessage))
|
545
|
-
);
|
546
|
-
};
|
547
|
-
controller.enqueue(
|
548
|
-
textEncoder.encode(
|
549
|
-
formatStreamPart("assistant_control_data", {
|
550
|
-
threadId,
|
551
|
-
messageId
|
552
|
-
})
|
553
|
-
)
|
554
|
-
);
|
555
|
-
try {
|
556
|
-
await process2({
|
557
|
-
threadId,
|
558
|
-
messageId,
|
559
|
-
sendMessage,
|
560
|
-
sendDataMessage
|
561
|
-
});
|
562
|
-
} catch (error) {
|
563
|
-
sendError((_a = error.message) != null ? _a : `${error}`);
|
564
|
-
} finally {
|
565
|
-
controller.close();
|
566
|
-
}
|
567
|
-
},
|
568
|
-
pull(controller) {
|
569
|
-
},
|
570
|
-
cancel() {
|
571
|
-
}
|
572
|
-
});
|
573
|
-
return new Response(stream, {
|
574
|
-
status: 200,
|
575
|
-
headers: {
|
576
|
-
"Content-Type": "text/plain; charset=utf-8"
|
577
|
-
}
|
578
|
-
});
|
579
|
-
}
|
580
|
-
|
581
|
-
// streams/aws-bedrock-stream.ts
|
582
|
-
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
583
|
-
var _a, _b;
|
584
|
-
const decoder = new TextDecoder();
|
585
|
-
for await (const chunk of (_a = response.body) != null ? _a : []) {
|
586
|
-
const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
|
587
|
-
if (bytes != null) {
|
588
|
-
const chunkText = decoder.decode(bytes);
|
589
|
-
const chunkJSON = JSON.parse(chunkText);
|
590
|
-
const delta = extractTextDeltaFromChunk(chunkJSON);
|
591
|
-
if (delta != null) {
|
592
|
-
yield delta;
|
593
|
-
}
|
594
|
-
}
|
595
|
-
}
|
596
|
-
}
|
597
|
-
function AWSBedrockAnthropicStream(response, callbacks) {
|
598
|
-
return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
|
599
|
-
}
|
600
|
-
function AWSBedrockCohereStream(response, callbacks) {
|
601
|
-
return AWSBedrockStream(
|
602
|
-
response,
|
603
|
-
callbacks,
|
604
|
-
// As of 2023-11-17, Bedrock does not support streaming for Cohere,
|
605
|
-
// so we take the full generation:
|
606
|
-
(chunk) => {
|
607
|
-
var _a, _b;
|
608
|
-
return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
|
609
|
-
}
|
610
|
-
);
|
611
|
-
}
|
612
|
-
function AWSBedrockLlama2Stream(response, callbacks) {
|
613
|
-
return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
|
614
|
-
}
|
615
|
-
function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
|
616
|
-
return readableFromAsyncIterable(
|
617
|
-
asDeltaIterable(response, extractTextDeltaFromChunk)
|
618
|
-
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
619
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
620
|
-
);
|
621
|
-
}
|
622
|
-
|
623
|
-
// streams/cohere-stream.ts
|
624
|
-
var utf8Decoder = new TextDecoder("utf-8");
|
625
|
-
async function processLines(lines, controller) {
|
626
|
-
for (const line of lines) {
|
627
|
-
const { text, is_finished } = JSON.parse(line);
|
628
|
-
if (!is_finished) {
|
629
|
-
controller.enqueue(text);
|
630
|
-
}
|
631
|
-
}
|
632
|
-
}
|
633
|
-
async function readAndProcessLines(reader, controller) {
|
634
|
-
let segment = "";
|
635
|
-
while (true) {
|
636
|
-
const { value: chunk, done } = await reader.read();
|
637
|
-
if (done) {
|
638
|
-
break;
|
639
|
-
}
|
640
|
-
segment += utf8Decoder.decode(chunk, { stream: true });
|
641
|
-
const linesArray = segment.split(/\r\n|\n|\r/g);
|
642
|
-
segment = linesArray.pop() || "";
|
643
|
-
await processLines(linesArray, controller);
|
644
|
-
}
|
645
|
-
if (segment) {
|
646
|
-
const linesArray = [segment];
|
647
|
-
await processLines(linesArray, controller);
|
648
|
-
}
|
649
|
-
controller.close();
|
650
|
-
}
|
651
|
-
function createParser2(res) {
|
652
|
-
var _a;
|
653
|
-
const reader = (_a = res.body) == null ? void 0 : _a.getReader();
|
654
|
-
return new ReadableStream({
|
655
|
-
async start(controller) {
|
656
|
-
if (!reader) {
|
657
|
-
controller.close();
|
658
|
-
return;
|
659
|
-
}
|
660
|
-
await readAndProcessLines(reader, controller);
|
661
|
-
}
|
662
|
-
});
|
663
|
-
}
|
664
|
-
async function* streamable2(stream) {
|
665
|
-
for await (const chunk of stream) {
|
666
|
-
if (chunk.eventType === "text-generation") {
|
667
|
-
const text = chunk.text;
|
668
|
-
if (text)
|
669
|
-
yield text;
|
670
|
-
}
|
671
|
-
}
|
672
|
-
}
|
673
|
-
function CohereStream(reader, callbacks) {
|
674
|
-
if (Symbol.asyncIterator in reader) {
|
675
|
-
return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
676
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
677
|
-
);
|
678
|
-
} else {
|
679
|
-
return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
680
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
681
|
-
);
|
682
|
-
}
|
437
|
+
};
|
438
|
+
function createStreamDataTransformer(experimental_streamData) {
|
439
|
+
if (!experimental_streamData) {
|
440
|
+
return new TransformStream({
|
441
|
+
transform: async (chunk, controller) => {
|
442
|
+
controller.enqueue(chunk);
|
443
|
+
}
|
444
|
+
});
|
445
|
+
}
|
446
|
+
const encoder = new TextEncoder();
|
447
|
+
const decoder = new TextDecoder();
|
448
|
+
return new TransformStream({
|
449
|
+
transform: async (chunk, controller) => {
|
450
|
+
const message = decoder.decode(chunk);
|
451
|
+
controller.enqueue(encoder.encode(formatStreamPart("text", message)));
|
452
|
+
}
|
453
|
+
});
|
683
454
|
}
|
684
455
|
|
685
|
-
// streams/
|
686
|
-
async function*
|
687
|
-
var _a, _b
|
688
|
-
|
689
|
-
|
690
|
-
|
691
|
-
|
692
|
-
|
693
|
-
|
694
|
-
|
695
|
-
|
456
|
+
// streams/aws-bedrock-stream.ts
|
457
|
+
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
458
|
+
var _a, _b;
|
459
|
+
const decoder = new TextDecoder();
|
460
|
+
for await (const chunk of (_a = response.body) != null ? _a : []) {
|
461
|
+
const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
|
462
|
+
if (bytes != null) {
|
463
|
+
const chunkText = decoder.decode(bytes);
|
464
|
+
const chunkJSON = JSON.parse(chunkText);
|
465
|
+
const delta = extractTextDeltaFromChunk(chunkJSON);
|
466
|
+
if (delta != null) {
|
467
|
+
yield delta;
|
468
|
+
}
|
696
469
|
}
|
697
470
|
}
|
698
471
|
}
|
699
|
-
function
|
700
|
-
return
|
472
|
+
function AWSBedrockAnthropicStream(response, callbacks) {
|
473
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
|
701
474
|
}
|
702
|
-
|
703
|
-
|
704
|
-
|
705
|
-
|
706
|
-
|
707
|
-
|
475
|
+
function AWSBedrockCohereStream(response, callbacks) {
|
476
|
+
return AWSBedrockStream(
|
477
|
+
response,
|
478
|
+
callbacks,
|
479
|
+
// As of 2023-11-17, Bedrock does not support streaming for Cohere,
|
480
|
+
// so we take the full generation:
|
481
|
+
(chunk) => {
|
708
482
|
var _a, _b;
|
709
|
-
|
710
|
-
if (done) {
|
711
|
-
controller.close();
|
712
|
-
return;
|
713
|
-
}
|
714
|
-
const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
|
715
|
-
if (!text)
|
716
|
-
return;
|
717
|
-
if (value.generated_text != null && value.generated_text.length > 0) {
|
718
|
-
return;
|
719
|
-
}
|
720
|
-
if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
|
721
|
-
return;
|
722
|
-
}
|
723
|
-
controller.enqueue(text);
|
483
|
+
return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
|
724
484
|
}
|
725
|
-
|
485
|
+
);
|
726
486
|
}
|
727
|
-
function
|
728
|
-
return
|
487
|
+
function AWSBedrockLlama2Stream(response, callbacks) {
|
488
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
|
489
|
+
}
|
490
|
+
function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
|
491
|
+
return readableFromAsyncIterable(
|
492
|
+
asDeltaIterable(response, extractTextDeltaFromChunk)
|
493
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
729
494
|
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
730
495
|
);
|
731
496
|
}
|
732
497
|
|
733
|
-
//
|
734
|
-
|
735
|
-
|
736
|
-
|
737
|
-
|
738
|
-
|
739
|
-
|
740
|
-
|
741
|
-
|
742
|
-
|
743
|
-
|
744
|
-
|
745
|
-
|
746
|
-
|
747
|
-
|
748
|
-
|
749
|
-
|
750
|
-
|
751
|
-
}
|
752
|
-
};
|
753
|
-
return {
|
754
|
-
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
755
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
756
|
-
),
|
757
|
-
writer,
|
758
|
-
handlers: {
|
759
|
-
handleLLMNewToken: async (token) => {
|
760
|
-
await writer.ready;
|
761
|
-
await writer.write(token);
|
762
|
-
},
|
763
|
-
handleLLMStart: async (_llm, _prompts, runId) => {
|
764
|
-
handleStart(runId);
|
765
|
-
},
|
766
|
-
handleLLMEnd: async (_output, runId) => {
|
767
|
-
await handleEnd(runId);
|
768
|
-
},
|
769
|
-
handleLLMError: async (e, runId) => {
|
770
|
-
await handleError(e, runId);
|
771
|
-
},
|
772
|
-
handleChainStart: async (_chain, _inputs, runId) => {
|
773
|
-
handleStart(runId);
|
774
|
-
},
|
775
|
-
handleChainEnd: async (_outputs, runId) => {
|
776
|
-
await handleEnd(runId);
|
777
|
-
},
|
778
|
-
handleChainError: async (e, runId) => {
|
779
|
-
await handleError(e, runId);
|
780
|
-
},
|
781
|
-
handleToolStart: async (_tool, _input, runId) => {
|
782
|
-
handleStart(runId);
|
783
|
-
},
|
784
|
-
handleToolEnd: async (_output, runId) => {
|
785
|
-
await handleEnd(runId);
|
786
|
-
},
|
787
|
-
handleToolError: async (e, runId) => {
|
788
|
-
await handleError(e, runId);
|
789
|
-
}
|
790
|
-
}
|
498
|
+
// shared/utils.ts
|
499
|
+
var import_non_secure = require("nanoid/non-secure");
|
500
|
+
var nanoid = (0, import_non_secure.customAlphabet)(
|
501
|
+
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
502
|
+
7
|
503
|
+
);
|
504
|
+
function createChunkDecoder(complex) {
|
505
|
+
const decoder = new TextDecoder();
|
506
|
+
if (!complex) {
|
507
|
+
return function(chunk) {
|
508
|
+
if (!chunk)
|
509
|
+
return "";
|
510
|
+
return decoder.decode(chunk, { stream: true });
|
511
|
+
};
|
512
|
+
}
|
513
|
+
return function(chunk) {
|
514
|
+
const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
|
515
|
+
return decoded.map(parseStreamPart).filter(Boolean);
|
791
516
|
};
|
792
517
|
}
|
518
|
+
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
519
|
+
var COMPLEX_HEADER = "X-Experimental-Stream-Data";
|
793
520
|
|
794
521
|
// streams/openai-stream.ts
|
795
522
|
function parseOpenAIStream() {
|
796
523
|
const extract = chunkToText();
|
797
524
|
return (data) => extract(JSON.parse(data));
|
798
525
|
}
|
799
|
-
async function*
|
526
|
+
async function* streamable(stream) {
|
800
527
|
const extract = chunkToText();
|
801
528
|
for await (let chunk of stream) {
|
802
529
|
if ("promptFilterResults" in chunk) {
|
@@ -885,7 +612,7 @@ function OpenAIStream(res, callbacks) {
|
|
885
612
|
const cb = callbacks;
|
886
613
|
let stream;
|
887
614
|
if (Symbol.asyncIterator in res) {
|
888
|
-
stream = readableFromAsyncIterable(
|
615
|
+
stream = readableFromAsyncIterable(streamable(res)).pipeThrough(
|
889
616
|
createCallbacksTransformer(
|
890
617
|
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
891
618
|
...cb,
|
@@ -1090,6 +817,280 @@ function createFunctionCallTransformer(callbacks) {
|
|
1090
817
|
});
|
1091
818
|
}
|
1092
819
|
|
820
|
+
// streams/streaming-text-response.ts
|
821
|
+
var StreamingTextResponse = class extends Response {
|
822
|
+
constructor(res, init, data) {
|
823
|
+
let processedStream = res;
|
824
|
+
if (data) {
|
825
|
+
processedStream = res.pipeThrough(data.stream);
|
826
|
+
}
|
827
|
+
super(processedStream, {
|
828
|
+
...init,
|
829
|
+
status: 200,
|
830
|
+
headers: {
|
831
|
+
"Content-Type": "text/plain; charset=utf-8",
|
832
|
+
[COMPLEX_HEADER]: data ? "true" : "false",
|
833
|
+
...init == null ? void 0 : init.headers
|
834
|
+
}
|
835
|
+
});
|
836
|
+
}
|
837
|
+
};
|
838
|
+
function streamToResponse(res, response, init) {
|
839
|
+
response.writeHead((init == null ? void 0 : init.status) || 200, {
|
840
|
+
"Content-Type": "text/plain; charset=utf-8",
|
841
|
+
...init == null ? void 0 : init.headers
|
842
|
+
});
|
843
|
+
const reader = res.getReader();
|
844
|
+
function read() {
|
845
|
+
reader.read().then(({ done, value }) => {
|
846
|
+
if (done) {
|
847
|
+
response.end();
|
848
|
+
return;
|
849
|
+
}
|
850
|
+
response.write(value);
|
851
|
+
read();
|
852
|
+
});
|
853
|
+
}
|
854
|
+
read();
|
855
|
+
}
|
856
|
+
|
857
|
+
// streams/huggingface-stream.ts
|
858
|
+
function createParser2(res) {
|
859
|
+
const trimStartOfStream = trimStartOfStreamHelper();
|
860
|
+
return new ReadableStream({
|
861
|
+
async pull(controller) {
|
862
|
+
var _a, _b;
|
863
|
+
const { value, done } = await res.next();
|
864
|
+
if (done) {
|
865
|
+
controller.close();
|
866
|
+
return;
|
867
|
+
}
|
868
|
+
const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
|
869
|
+
if (!text)
|
870
|
+
return;
|
871
|
+
if (value.generated_text != null && value.generated_text.length > 0) {
|
872
|
+
return;
|
873
|
+
}
|
874
|
+
if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
|
875
|
+
return;
|
876
|
+
}
|
877
|
+
controller.enqueue(text);
|
878
|
+
}
|
879
|
+
});
|
880
|
+
}
|
881
|
+
function HuggingFaceStream(res, callbacks) {
|
882
|
+
return createParser2(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
883
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
884
|
+
);
|
885
|
+
}
|
886
|
+
|
887
|
+
// streams/cohere-stream.ts
|
888
|
+
var utf8Decoder = new TextDecoder("utf-8");
|
889
|
+
async function processLines(lines, controller) {
|
890
|
+
for (const line of lines) {
|
891
|
+
const { text, is_finished } = JSON.parse(line);
|
892
|
+
if (!is_finished) {
|
893
|
+
controller.enqueue(text);
|
894
|
+
}
|
895
|
+
}
|
896
|
+
}
|
897
|
+
async function readAndProcessLines(reader, controller) {
|
898
|
+
let segment = "";
|
899
|
+
while (true) {
|
900
|
+
const { value: chunk, done } = await reader.read();
|
901
|
+
if (done) {
|
902
|
+
break;
|
903
|
+
}
|
904
|
+
segment += utf8Decoder.decode(chunk, { stream: true });
|
905
|
+
const linesArray = segment.split(/\r\n|\n|\r/g);
|
906
|
+
segment = linesArray.pop() || "";
|
907
|
+
await processLines(linesArray, controller);
|
908
|
+
}
|
909
|
+
if (segment) {
|
910
|
+
const linesArray = [segment];
|
911
|
+
await processLines(linesArray, controller);
|
912
|
+
}
|
913
|
+
controller.close();
|
914
|
+
}
|
915
|
+
function createParser3(res) {
|
916
|
+
var _a;
|
917
|
+
const reader = (_a = res.body) == null ? void 0 : _a.getReader();
|
918
|
+
return new ReadableStream({
|
919
|
+
async start(controller) {
|
920
|
+
if (!reader) {
|
921
|
+
controller.close();
|
922
|
+
return;
|
923
|
+
}
|
924
|
+
await readAndProcessLines(reader, controller);
|
925
|
+
}
|
926
|
+
});
|
927
|
+
}
|
928
|
+
async function* streamable2(stream) {
|
929
|
+
for await (const chunk of stream) {
|
930
|
+
if (chunk.eventType === "text-generation") {
|
931
|
+
const text = chunk.text;
|
932
|
+
if (text)
|
933
|
+
yield text;
|
934
|
+
}
|
935
|
+
}
|
936
|
+
}
|
937
|
+
function CohereStream(reader, callbacks) {
|
938
|
+
if (Symbol.asyncIterator in reader) {
|
939
|
+
return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
940
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
941
|
+
);
|
942
|
+
} else {
|
943
|
+
return createParser3(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
944
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
945
|
+
);
|
946
|
+
}
|
947
|
+
}
|
948
|
+
|
949
|
+
// streams/anthropic-stream.ts
|
950
|
+
function parseAnthropicStream() {
|
951
|
+
let previous = "";
|
952
|
+
return (data) => {
|
953
|
+
const json = JSON.parse(data);
|
954
|
+
if ("error" in json) {
|
955
|
+
throw new Error(`${json.error.type}: ${json.error.message}`);
|
956
|
+
}
|
957
|
+
if (!("completion" in json)) {
|
958
|
+
return;
|
959
|
+
}
|
960
|
+
const text = json.completion;
|
961
|
+
if (!previous || text.length > previous.length && text.startsWith(previous)) {
|
962
|
+
const delta = text.slice(previous.length);
|
963
|
+
previous = text;
|
964
|
+
return delta;
|
965
|
+
}
|
966
|
+
return text;
|
967
|
+
};
|
968
|
+
}
|
969
|
+
async function* streamable3(stream) {
|
970
|
+
for await (const chunk of stream) {
|
971
|
+
if ("completion" in chunk) {
|
972
|
+
const text = chunk.completion;
|
973
|
+
if (text)
|
974
|
+
yield text;
|
975
|
+
} else if ("delta" in chunk) {
|
976
|
+
const { delta } = chunk;
|
977
|
+
if ("text" in delta) {
|
978
|
+
const text = delta.text;
|
979
|
+
if (text)
|
980
|
+
yield text;
|
981
|
+
}
|
982
|
+
}
|
983
|
+
}
|
984
|
+
}
|
985
|
+
function AnthropicStream(res, cb) {
|
986
|
+
if (Symbol.asyncIterator in res) {
|
987
|
+
return readableFromAsyncIterable(streamable3(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
|
988
|
+
} else {
|
989
|
+
return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
|
990
|
+
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
991
|
+
);
|
992
|
+
}
|
993
|
+
}
|
994
|
+
|
995
|
+
// streams/inkeep-stream.ts
|
996
|
+
function InkeepStream(res, callbacks) {
|
997
|
+
if (!res.body) {
|
998
|
+
throw new Error("Response body is null");
|
999
|
+
}
|
1000
|
+
let chat_session_id = "";
|
1001
|
+
let records_cited;
|
1002
|
+
const inkeepEventParser = (data, options) => {
|
1003
|
+
var _a, _b;
|
1004
|
+
const { event } = options;
|
1005
|
+
if (event === "records_cited") {
|
1006
|
+
records_cited = JSON.parse(data);
|
1007
|
+
(_a = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a.call(callbacks, records_cited);
|
1008
|
+
}
|
1009
|
+
if (event === "message_chunk") {
|
1010
|
+
const inkeepMessageChunk = JSON.parse(data);
|
1011
|
+
chat_session_id = (_b = inkeepMessageChunk.chat_session_id) != null ? _b : chat_session_id;
|
1012
|
+
return inkeepMessageChunk.content_chunk;
|
1013
|
+
}
|
1014
|
+
return;
|
1015
|
+
};
|
1016
|
+
let { onRecordsCited, ...passThroughCallbacks } = callbacks || {};
|
1017
|
+
passThroughCallbacks = {
|
1018
|
+
...passThroughCallbacks,
|
1019
|
+
onFinal: (completion) => {
|
1020
|
+
var _a;
|
1021
|
+
const inkeepOnFinalMetadata = {
|
1022
|
+
chat_session_id,
|
1023
|
+
records_cited
|
1024
|
+
};
|
1025
|
+
(_a = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a.call(callbacks, completion, inkeepOnFinalMetadata);
|
1026
|
+
}
|
1027
|
+
};
|
1028
|
+
return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
|
1029
|
+
createStreamDataTransformer(passThroughCallbacks == null ? void 0 : passThroughCallbacks.experimental_streamData)
|
1030
|
+
);
|
1031
|
+
}
|
1032
|
+
|
1033
|
+
// streams/langchain-stream.ts
|
1034
|
+
function LangChainStream(callbacks) {
|
1035
|
+
const stream = new TransformStream();
|
1036
|
+
const writer = stream.writable.getWriter();
|
1037
|
+
const runs = /* @__PURE__ */ new Set();
|
1038
|
+
const handleError = async (e, runId) => {
|
1039
|
+
runs.delete(runId);
|
1040
|
+
await writer.ready;
|
1041
|
+
await writer.abort(e);
|
1042
|
+
};
|
1043
|
+
const handleStart = async (runId) => {
|
1044
|
+
runs.add(runId);
|
1045
|
+
};
|
1046
|
+
const handleEnd = async (runId) => {
|
1047
|
+
runs.delete(runId);
|
1048
|
+
if (runs.size === 0) {
|
1049
|
+
await writer.ready;
|
1050
|
+
await writer.close();
|
1051
|
+
}
|
1052
|
+
};
|
1053
|
+
return {
|
1054
|
+
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
1055
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
1056
|
+
),
|
1057
|
+
writer,
|
1058
|
+
handlers: {
|
1059
|
+
handleLLMNewToken: async (token) => {
|
1060
|
+
await writer.ready;
|
1061
|
+
await writer.write(token);
|
1062
|
+
},
|
1063
|
+
handleLLMStart: async (_llm, _prompts, runId) => {
|
1064
|
+
handleStart(runId);
|
1065
|
+
},
|
1066
|
+
handleLLMEnd: async (_output, runId) => {
|
1067
|
+
await handleEnd(runId);
|
1068
|
+
},
|
1069
|
+
handleLLMError: async (e, runId) => {
|
1070
|
+
await handleError(e, runId);
|
1071
|
+
},
|
1072
|
+
handleChainStart: async (_chain, _inputs, runId) => {
|
1073
|
+
handleStart(runId);
|
1074
|
+
},
|
1075
|
+
handleChainEnd: async (_outputs, runId) => {
|
1076
|
+
await handleEnd(runId);
|
1077
|
+
},
|
1078
|
+
handleChainError: async (e, runId) => {
|
1079
|
+
await handleError(e, runId);
|
1080
|
+
},
|
1081
|
+
handleToolStart: async (_tool, _input, runId) => {
|
1082
|
+
handleStart(runId);
|
1083
|
+
},
|
1084
|
+
handleToolEnd: async (_output, runId) => {
|
1085
|
+
await handleEnd(runId);
|
1086
|
+
},
|
1087
|
+
handleToolError: async (e, runId) => {
|
1088
|
+
await handleError(e, runId);
|
1089
|
+
}
|
1090
|
+
}
|
1091
|
+
};
|
1092
|
+
}
|
1093
|
+
|
1093
1094
|
// streams/replicate-stream.ts
|
1094
1095
|
async function ReplicateStream(res, cb, options) {
|
1095
1096
|
var _a;
|
@@ -1112,6 +1113,79 @@ async function ReplicateStream(res, cb, options) {
|
|
1112
1113
|
);
|
1113
1114
|
}
|
1114
1115
|
|
1116
|
+
// streams/assistant-response.ts
|
1117
|
+
function experimental_AssistantResponse({ threadId, messageId }, process2) {
|
1118
|
+
const stream = new ReadableStream({
|
1119
|
+
async start(controller) {
|
1120
|
+
var _a;
|
1121
|
+
const textEncoder = new TextEncoder();
|
1122
|
+
const sendMessage = (message) => {
|
1123
|
+
controller.enqueue(
|
1124
|
+
textEncoder.encode(formatStreamPart("assistant_message", message))
|
1125
|
+
);
|
1126
|
+
};
|
1127
|
+
const sendDataMessage = (message) => {
|
1128
|
+
controller.enqueue(
|
1129
|
+
textEncoder.encode(formatStreamPart("data_message", message))
|
1130
|
+
);
|
1131
|
+
};
|
1132
|
+
const sendError = (errorMessage) => {
|
1133
|
+
controller.enqueue(
|
1134
|
+
textEncoder.encode(formatStreamPart("error", errorMessage))
|
1135
|
+
);
|
1136
|
+
};
|
1137
|
+
controller.enqueue(
|
1138
|
+
textEncoder.encode(
|
1139
|
+
formatStreamPart("assistant_control_data", {
|
1140
|
+
threadId,
|
1141
|
+
messageId
|
1142
|
+
})
|
1143
|
+
)
|
1144
|
+
);
|
1145
|
+
try {
|
1146
|
+
await process2({
|
1147
|
+
threadId,
|
1148
|
+
messageId,
|
1149
|
+
sendMessage,
|
1150
|
+
sendDataMessage
|
1151
|
+
});
|
1152
|
+
} catch (error) {
|
1153
|
+
sendError((_a = error.message) != null ? _a : `${error}`);
|
1154
|
+
} finally {
|
1155
|
+
controller.close();
|
1156
|
+
}
|
1157
|
+
},
|
1158
|
+
pull(controller) {
|
1159
|
+
},
|
1160
|
+
cancel() {
|
1161
|
+
}
|
1162
|
+
});
|
1163
|
+
return new Response(stream, {
|
1164
|
+
status: 200,
|
1165
|
+
headers: {
|
1166
|
+
"Content-Type": "text/plain; charset=utf-8"
|
1167
|
+
}
|
1168
|
+
});
|
1169
|
+
}
|
1170
|
+
|
1171
|
+
// streams/google-generative-ai-stream.ts
|
1172
|
+
async function* streamable4(response) {
|
1173
|
+
var _a, _b, _c;
|
1174
|
+
for await (const chunk of response.stream) {
|
1175
|
+
const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
|
1176
|
+
if (parts === void 0) {
|
1177
|
+
continue;
|
1178
|
+
}
|
1179
|
+
const firstPart = parts[0];
|
1180
|
+
if (typeof firstPart.text === "string") {
|
1181
|
+
yield firstPart.text;
|
1182
|
+
}
|
1183
|
+
}
|
1184
|
+
}
|
1185
|
+
function GoogleGenerativeAIStream(response, cb) {
|
1186
|
+
return readableFromAsyncIterable(streamable4(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
|
1187
|
+
}
|
1188
|
+
|
1115
1189
|
// shared/read-data-stream.ts
|
1116
1190
|
var NEWLINE = "\n".charCodeAt(0);
|
1117
1191
|
function concatChunks(chunks, totalLength) {
|
@@ -1340,43 +1414,6 @@ var experimental_StreamingReactResponse = class {
|
|
1340
1414
|
return next;
|
1341
1415
|
}
|
1342
1416
|
};
|
1343
|
-
|
1344
|
-
// streams/streaming-text-response.ts
|
1345
|
-
var StreamingTextResponse = class extends Response {
|
1346
|
-
constructor(res, init, data) {
|
1347
|
-
let processedStream = res;
|
1348
|
-
if (data) {
|
1349
|
-
processedStream = res.pipeThrough(data.stream);
|
1350
|
-
}
|
1351
|
-
super(processedStream, {
|
1352
|
-
...init,
|
1353
|
-
status: 200,
|
1354
|
-
headers: {
|
1355
|
-
"Content-Type": "text/plain; charset=utf-8",
|
1356
|
-
[COMPLEX_HEADER]: data ? "true" : "false",
|
1357
|
-
...init == null ? void 0 : init.headers
|
1358
|
-
}
|
1359
|
-
});
|
1360
|
-
}
|
1361
|
-
};
|
1362
|
-
function streamToResponse(res, response, init) {
|
1363
|
-
response.writeHead((init == null ? void 0 : init.status) || 200, {
|
1364
|
-
"Content-Type": "text/plain; charset=utf-8",
|
1365
|
-
...init == null ? void 0 : init.headers
|
1366
|
-
});
|
1367
|
-
const reader = res.getReader();
|
1368
|
-
function read() {
|
1369
|
-
reader.read().then(({ done, value }) => {
|
1370
|
-
if (done) {
|
1371
|
-
response.end();
|
1372
|
-
return;
|
1373
|
-
}
|
1374
|
-
response.write(value);
|
1375
|
-
read();
|
1376
|
-
});
|
1377
|
-
}
|
1378
|
-
read();
|
1379
|
-
}
|
1380
1417
|
// Annotate the CommonJS export names for ESM import in node:
|
1381
1418
|
0 && (module.exports = {
|
1382
1419
|
AIStream,
|
@@ -1389,6 +1426,7 @@ function streamToResponse(res, response, init) {
|
|
1389
1426
|
CohereStream,
|
1390
1427
|
GoogleGenerativeAIStream,
|
1391
1428
|
HuggingFaceStream,
|
1429
|
+
InkeepStream,
|
1392
1430
|
LangChainStream,
|
1393
1431
|
OpenAIStream,
|
1394
1432
|
ReplicateStream,
|