ai 2.2.34 → 2.2.35

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,5 +1,122 @@
1
- // shared/utils.ts
2
- import { customAlphabet } from "nanoid/non-secure";
1
+ // streams/ai-stream.ts
2
+ import {
3
+ createParser
4
+ } from "eventsource-parser";
5
+ function createEventStreamTransformer(customParser) {
6
+ const textDecoder = new TextDecoder();
7
+ let eventSourceParser;
8
+ return new TransformStream({
9
+ async start(controller) {
10
+ eventSourceParser = createParser(
11
+ (event) => {
12
+ if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
13
+ // @see https://replicate.com/docs/streaming
14
+ event.event === "done") {
15
+ controller.terminate();
16
+ return;
17
+ }
18
+ if ("data" in event) {
19
+ const parsedMessage = customParser ? customParser(event.data, {
20
+ event: event.event
21
+ }) : event.data;
22
+ if (parsedMessage)
23
+ controller.enqueue(parsedMessage);
24
+ }
25
+ }
26
+ );
27
+ },
28
+ transform(chunk) {
29
+ eventSourceParser.feed(textDecoder.decode(chunk));
30
+ }
31
+ });
32
+ }
33
+ function createCallbacksTransformer(cb) {
34
+ const textEncoder = new TextEncoder();
35
+ let aggregatedResponse = "";
36
+ const callbacks = cb || {};
37
+ return new TransformStream({
38
+ async start() {
39
+ if (callbacks.onStart)
40
+ await callbacks.onStart();
41
+ },
42
+ async transform(message, controller) {
43
+ controller.enqueue(textEncoder.encode(message));
44
+ aggregatedResponse += message;
45
+ if (callbacks.onToken)
46
+ await callbacks.onToken(message);
47
+ },
48
+ async flush() {
49
+ const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
50
+ if (callbacks.onCompletion) {
51
+ await callbacks.onCompletion(aggregatedResponse);
52
+ }
53
+ if (callbacks.onFinal && !isOpenAICallbacks) {
54
+ await callbacks.onFinal(aggregatedResponse);
55
+ }
56
+ }
57
+ });
58
+ }
59
+ function isOfTypeOpenAIStreamCallbacks(callbacks) {
60
+ return "experimental_onFunctionCall" in callbacks;
61
+ }
62
+ function trimStartOfStreamHelper() {
63
+ let isStreamStart = true;
64
+ return (text) => {
65
+ if (isStreamStart) {
66
+ text = text.trimStart();
67
+ if (text)
68
+ isStreamStart = false;
69
+ }
70
+ return text;
71
+ };
72
+ }
73
+ function AIStream(response, customParser, callbacks) {
74
+ if (!response.ok) {
75
+ if (response.body) {
76
+ const reader = response.body.getReader();
77
+ return new ReadableStream({
78
+ async start(controller) {
79
+ const { done, value } = await reader.read();
80
+ if (!done) {
81
+ const errorText = new TextDecoder().decode(value);
82
+ controller.error(new Error(`Response error: ${errorText}`));
83
+ }
84
+ }
85
+ });
86
+ } else {
87
+ return new ReadableStream({
88
+ start(controller) {
89
+ controller.error(new Error("Response error: No response body"));
90
+ }
91
+ });
92
+ }
93
+ }
94
+ const responseBodyStream = response.body || createEmptyReadableStream();
95
+ return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
96
+ }
97
+ function createEmptyReadableStream() {
98
+ return new ReadableStream({
99
+ start(controller) {
100
+ controller.close();
101
+ }
102
+ });
103
+ }
104
+ function readableFromAsyncIterable(iterable) {
105
+ let it = iterable[Symbol.asyncIterator]();
106
+ return new ReadableStream({
107
+ async pull(controller) {
108
+ const { done, value } = await it.next();
109
+ if (done)
110
+ controller.close();
111
+ else
112
+ controller.enqueue(value);
113
+ },
114
+ async cancel(reason) {
115
+ var _a;
116
+ await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
117
+ }
118
+ });
119
+ }
3
120
 
4
121
  // shared/stream-parts.ts
5
122
  var textStreamPart = {
@@ -181,148 +298,6 @@ function formatStreamPart(type, value) {
181
298
  `;
182
299
  }
183
300
 
184
- // shared/utils.ts
185
- var nanoid = customAlphabet(
186
- "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
187
- 7
188
- );
189
- function createChunkDecoder(complex) {
190
- const decoder = new TextDecoder();
191
- if (!complex) {
192
- return function(chunk) {
193
- if (!chunk)
194
- return "";
195
- return decoder.decode(chunk, { stream: true });
196
- };
197
- }
198
- return function(chunk) {
199
- const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
200
- return decoded.map(parseStreamPart).filter(Boolean);
201
- };
202
- }
203
- var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
204
- var COMPLEX_HEADER = "X-Experimental-Stream-Data";
205
-
206
- // streams/ai-stream.ts
207
- import {
208
- createParser
209
- } from "eventsource-parser";
210
- function createEventStreamTransformer(customParser) {
211
- const textDecoder = new TextDecoder();
212
- let eventSourceParser;
213
- return new TransformStream({
214
- async start(controller) {
215
- eventSourceParser = createParser(
216
- (event) => {
217
- if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
218
- // @see https://replicate.com/docs/streaming
219
- event.event === "done") {
220
- controller.terminate();
221
- return;
222
- }
223
- if ("data" in event) {
224
- const parsedMessage = customParser ? customParser(event.data, {
225
- event: event.event
226
- }) : event.data;
227
- if (parsedMessage)
228
- controller.enqueue(parsedMessage);
229
- }
230
- }
231
- );
232
- },
233
- transform(chunk) {
234
- eventSourceParser.feed(textDecoder.decode(chunk));
235
- }
236
- });
237
- }
238
- function createCallbacksTransformer(cb) {
239
- const textEncoder = new TextEncoder();
240
- let aggregatedResponse = "";
241
- const callbacks = cb || {};
242
- return new TransformStream({
243
- async start() {
244
- if (callbacks.onStart)
245
- await callbacks.onStart();
246
- },
247
- async transform(message, controller) {
248
- controller.enqueue(textEncoder.encode(message));
249
- aggregatedResponse += message;
250
- if (callbacks.onToken)
251
- await callbacks.onToken(message);
252
- },
253
- async flush() {
254
- const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
255
- if (callbacks.onCompletion) {
256
- await callbacks.onCompletion(aggregatedResponse);
257
- }
258
- if (callbacks.onFinal && !isOpenAICallbacks) {
259
- await callbacks.onFinal(aggregatedResponse);
260
- }
261
- }
262
- });
263
- }
264
- function isOfTypeOpenAIStreamCallbacks(callbacks) {
265
- return "experimental_onFunctionCall" in callbacks;
266
- }
267
- function trimStartOfStreamHelper() {
268
- let isStreamStart = true;
269
- return (text) => {
270
- if (isStreamStart) {
271
- text = text.trimStart();
272
- if (text)
273
- isStreamStart = false;
274
- }
275
- return text;
276
- };
277
- }
278
- function AIStream(response, customParser, callbacks) {
279
- if (!response.ok) {
280
- if (response.body) {
281
- const reader = response.body.getReader();
282
- return new ReadableStream({
283
- async start(controller) {
284
- const { done, value } = await reader.read();
285
- if (!done) {
286
- const errorText = new TextDecoder().decode(value);
287
- controller.error(new Error(`Response error: ${errorText}`));
288
- }
289
- }
290
- });
291
- } else {
292
- return new ReadableStream({
293
- start(controller) {
294
- controller.error(new Error("Response error: No response body"));
295
- }
296
- });
297
- }
298
- }
299
- const responseBodyStream = response.body || createEmptyReadableStream();
300
- return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
301
- }
302
- function createEmptyReadableStream() {
303
- return new ReadableStream({
304
- start(controller) {
305
- controller.close();
306
- }
307
- });
308
- }
309
- function readableFromAsyncIterable(iterable) {
310
- let it = iterable[Symbol.asyncIterator]();
311
- return new ReadableStream({
312
- async pull(controller) {
313
- const { done, value } = await it.next();
314
- if (done)
315
- controller.close();
316
- else
317
- controller.enqueue(value);
318
- },
319
- async cancel(reason) {
320
- var _a;
321
- await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
322
- }
323
- });
324
- }
325
-
326
301
  // streams/stream-data.ts
327
302
  var experimental_StreamData = class {
328
303
  constructor() {
@@ -409,345 +384,96 @@ var experimental_StreamData = class {
409
384
  }
410
385
  this.messageAnnotations.push(value);
411
386
  }
412
- };
413
- function createStreamDataTransformer(experimental_streamData) {
414
- if (!experimental_streamData) {
415
- return new TransformStream({
416
- transform: async (chunk, controller) => {
417
- controller.enqueue(chunk);
418
- }
419
- });
420
- }
421
- const encoder = new TextEncoder();
422
- const decoder = new TextDecoder();
423
- return new TransformStream({
424
- transform: async (chunk, controller) => {
425
- const message = decoder.decode(chunk);
426
- controller.enqueue(encoder.encode(formatStreamPart("text", message)));
427
- }
428
- });
429
- }
430
-
431
- // streams/anthropic-stream.ts
432
- function parseAnthropicStream() {
433
- let previous = "";
434
- return (data) => {
435
- const json = JSON.parse(data);
436
- if ("error" in json) {
437
- throw new Error(`${json.error.type}: ${json.error.message}`);
438
- }
439
- if (!("completion" in json)) {
440
- return;
441
- }
442
- const text = json.completion;
443
- if (!previous || text.length > previous.length && text.startsWith(previous)) {
444
- const delta = text.slice(previous.length);
445
- previous = text;
446
- return delta;
447
- }
448
- return text;
449
- };
450
- }
451
- async function* streamable(stream) {
452
- for await (const chunk of stream) {
453
- if ("completion" in chunk) {
454
- const text = chunk.completion;
455
- if (text)
456
- yield text;
457
- } else if ("delta" in chunk) {
458
- const { delta } = chunk;
459
- if ("text" in delta) {
460
- const text = delta.text;
461
- if (text)
462
- yield text;
463
- }
464
- }
465
- }
466
- }
467
- function AnthropicStream(res, cb) {
468
- if (Symbol.asyncIterator in res) {
469
- return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
470
- } else {
471
- return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
472
- createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
473
- );
474
- }
475
- }
476
-
477
- // streams/assistant-response.ts
478
- function experimental_AssistantResponse({ threadId, messageId }, process2) {
479
- const stream = new ReadableStream({
480
- async start(controller) {
481
- var _a;
482
- const textEncoder = new TextEncoder();
483
- const sendMessage = (message) => {
484
- controller.enqueue(
485
- textEncoder.encode(formatStreamPart("assistant_message", message))
486
- );
487
- };
488
- const sendDataMessage = (message) => {
489
- controller.enqueue(
490
- textEncoder.encode(formatStreamPart("data_message", message))
491
- );
492
- };
493
- const sendError = (errorMessage) => {
494
- controller.enqueue(
495
- textEncoder.encode(formatStreamPart("error", errorMessage))
496
- );
497
- };
498
- controller.enqueue(
499
- textEncoder.encode(
500
- formatStreamPart("assistant_control_data", {
501
- threadId,
502
- messageId
503
- })
504
- )
505
- );
506
- try {
507
- await process2({
508
- threadId,
509
- messageId,
510
- sendMessage,
511
- sendDataMessage
512
- });
513
- } catch (error) {
514
- sendError((_a = error.message) != null ? _a : `${error}`);
515
- } finally {
516
- controller.close();
517
- }
518
- },
519
- pull(controller) {
520
- },
521
- cancel() {
522
- }
523
- });
524
- return new Response(stream, {
525
- status: 200,
526
- headers: {
527
- "Content-Type": "text/plain; charset=utf-8"
528
- }
529
- });
530
- }
531
-
532
- // streams/aws-bedrock-stream.ts
533
- async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
534
- var _a, _b;
535
- const decoder = new TextDecoder();
536
- for await (const chunk of (_a = response.body) != null ? _a : []) {
537
- const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
538
- if (bytes != null) {
539
- const chunkText = decoder.decode(bytes);
540
- const chunkJSON = JSON.parse(chunkText);
541
- const delta = extractTextDeltaFromChunk(chunkJSON);
542
- if (delta != null) {
543
- yield delta;
544
- }
545
- }
546
- }
547
- }
548
- function AWSBedrockAnthropicStream(response, callbacks) {
549
- return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
550
- }
551
- function AWSBedrockCohereStream(response, callbacks) {
552
- return AWSBedrockStream(
553
- response,
554
- callbacks,
555
- // As of 2023-11-17, Bedrock does not support streaming for Cohere,
556
- // so we take the full generation:
557
- (chunk) => {
558
- var _a, _b;
559
- return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
560
- }
561
- );
562
- }
563
- function AWSBedrockLlama2Stream(response, callbacks) {
564
- return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
565
- }
566
- function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
567
- return readableFromAsyncIterable(
568
- asDeltaIterable(response, extractTextDeltaFromChunk)
569
- ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
570
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
571
- );
572
- }
573
-
574
- // streams/cohere-stream.ts
575
- var utf8Decoder = new TextDecoder("utf-8");
576
- async function processLines(lines, controller) {
577
- for (const line of lines) {
578
- const { text, is_finished } = JSON.parse(line);
579
- if (!is_finished) {
580
- controller.enqueue(text);
581
- }
582
- }
583
- }
584
- async function readAndProcessLines(reader, controller) {
585
- let segment = "";
586
- while (true) {
587
- const { value: chunk, done } = await reader.read();
588
- if (done) {
589
- break;
590
- }
591
- segment += utf8Decoder.decode(chunk, { stream: true });
592
- const linesArray = segment.split(/\r\n|\n|\r/g);
593
- segment = linesArray.pop() || "";
594
- await processLines(linesArray, controller);
595
- }
596
- if (segment) {
597
- const linesArray = [segment];
598
- await processLines(linesArray, controller);
599
- }
600
- controller.close();
601
- }
602
- function createParser2(res) {
603
- var _a;
604
- const reader = (_a = res.body) == null ? void 0 : _a.getReader();
605
- return new ReadableStream({
606
- async start(controller) {
607
- if (!reader) {
608
- controller.close();
609
- return;
610
- }
611
- await readAndProcessLines(reader, controller);
612
- }
613
- });
614
- }
615
- async function* streamable2(stream) {
616
- for await (const chunk of stream) {
617
- if (chunk.eventType === "text-generation") {
618
- const text = chunk.text;
619
- if (text)
620
- yield text;
621
- }
622
- }
623
- }
624
- function CohereStream(reader, callbacks) {
625
- if (Symbol.asyncIterator in reader) {
626
- return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
627
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
628
- );
629
- } else {
630
- return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
631
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
632
- );
633
- }
387
+ };
388
+ function createStreamDataTransformer(experimental_streamData) {
389
+ if (!experimental_streamData) {
390
+ return new TransformStream({
391
+ transform: async (chunk, controller) => {
392
+ controller.enqueue(chunk);
393
+ }
394
+ });
395
+ }
396
+ const encoder = new TextEncoder();
397
+ const decoder = new TextDecoder();
398
+ return new TransformStream({
399
+ transform: async (chunk, controller) => {
400
+ const message = decoder.decode(chunk);
401
+ controller.enqueue(encoder.encode(formatStreamPart("text", message)));
402
+ }
403
+ });
634
404
  }
635
405
 
636
- // streams/google-generative-ai-stream.ts
637
- async function* streamable3(response) {
638
- var _a, _b, _c;
639
- for await (const chunk of response.stream) {
640
- const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
641
- if (parts === void 0) {
642
- continue;
643
- }
644
- const firstPart = parts[0];
645
- if (typeof firstPart.text === "string") {
646
- yield firstPart.text;
406
+ // streams/aws-bedrock-stream.ts
407
+ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
408
+ var _a, _b;
409
+ const decoder = new TextDecoder();
410
+ for await (const chunk of (_a = response.body) != null ? _a : []) {
411
+ const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
412
+ if (bytes != null) {
413
+ const chunkText = decoder.decode(bytes);
414
+ const chunkJSON = JSON.parse(chunkText);
415
+ const delta = extractTextDeltaFromChunk(chunkJSON);
416
+ if (delta != null) {
417
+ yield delta;
418
+ }
647
419
  }
648
420
  }
649
421
  }
650
- function GoogleGenerativeAIStream(response, cb) {
651
- return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
422
+ function AWSBedrockAnthropicStream(response, callbacks) {
423
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
652
424
  }
653
-
654
- // streams/huggingface-stream.ts
655
- function createParser3(res) {
656
- const trimStartOfStream = trimStartOfStreamHelper();
657
- return new ReadableStream({
658
- async pull(controller) {
425
+ function AWSBedrockCohereStream(response, callbacks) {
426
+ return AWSBedrockStream(
427
+ response,
428
+ callbacks,
429
+ // As of 2023-11-17, Bedrock does not support streaming for Cohere,
430
+ // so we take the full generation:
431
+ (chunk) => {
659
432
  var _a, _b;
660
- const { value, done } = await res.next();
661
- if (done) {
662
- controller.close();
663
- return;
664
- }
665
- const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
666
- if (!text)
667
- return;
668
- if (value.generated_text != null && value.generated_text.length > 0) {
669
- return;
670
- }
671
- if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
672
- return;
673
- }
674
- controller.enqueue(text);
433
+ return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
675
434
  }
676
- });
435
+ );
677
436
  }
678
- function HuggingFaceStream(res, callbacks) {
679
- return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
437
+ function AWSBedrockLlama2Stream(response, callbacks) {
438
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
439
+ }
440
+ function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
441
+ return readableFromAsyncIterable(
442
+ asDeltaIterable(response, extractTextDeltaFromChunk)
443
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
680
444
  createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
681
445
  );
682
446
  }
683
447
 
684
- // streams/langchain-stream.ts
685
- function LangChainStream(callbacks) {
686
- const stream = new TransformStream();
687
- const writer = stream.writable.getWriter();
688
- const runs = /* @__PURE__ */ new Set();
689
- const handleError = async (e, runId) => {
690
- runs.delete(runId);
691
- await writer.ready;
692
- await writer.abort(e);
693
- };
694
- const handleStart = async (runId) => {
695
- runs.add(runId);
696
- };
697
- const handleEnd = async (runId) => {
698
- runs.delete(runId);
699
- if (runs.size === 0) {
700
- await writer.ready;
701
- await writer.close();
702
- }
703
- };
704
- return {
705
- stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
706
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
707
- ),
708
- writer,
709
- handlers: {
710
- handleLLMNewToken: async (token) => {
711
- await writer.ready;
712
- await writer.write(token);
713
- },
714
- handleLLMStart: async (_llm, _prompts, runId) => {
715
- handleStart(runId);
716
- },
717
- handleLLMEnd: async (_output, runId) => {
718
- await handleEnd(runId);
719
- },
720
- handleLLMError: async (e, runId) => {
721
- await handleError(e, runId);
722
- },
723
- handleChainStart: async (_chain, _inputs, runId) => {
724
- handleStart(runId);
725
- },
726
- handleChainEnd: async (_outputs, runId) => {
727
- await handleEnd(runId);
728
- },
729
- handleChainError: async (e, runId) => {
730
- await handleError(e, runId);
731
- },
732
- handleToolStart: async (_tool, _input, runId) => {
733
- handleStart(runId);
734
- },
735
- handleToolEnd: async (_output, runId) => {
736
- await handleEnd(runId);
737
- },
738
- handleToolError: async (e, runId) => {
739
- await handleError(e, runId);
740
- }
741
- }
448
+ // shared/utils.ts
449
+ import { customAlphabet } from "nanoid/non-secure";
450
+ var nanoid = customAlphabet(
451
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
452
+ 7
453
+ );
454
+ function createChunkDecoder(complex) {
455
+ const decoder = new TextDecoder();
456
+ if (!complex) {
457
+ return function(chunk) {
458
+ if (!chunk)
459
+ return "";
460
+ return decoder.decode(chunk, { stream: true });
461
+ };
462
+ }
463
+ return function(chunk) {
464
+ const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
465
+ return decoded.map(parseStreamPart).filter(Boolean);
742
466
  };
743
467
  }
468
+ var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
469
+ var COMPLEX_HEADER = "X-Experimental-Stream-Data";
744
470
 
745
471
  // streams/openai-stream.ts
746
472
  function parseOpenAIStream() {
747
473
  const extract = chunkToText();
748
474
  return (data) => extract(JSON.parse(data));
749
475
  }
750
- async function* streamable4(stream) {
476
+ async function* streamable(stream) {
751
477
  const extract = chunkToText();
752
478
  for await (let chunk of stream) {
753
479
  if ("promptFilterResults" in chunk) {
@@ -836,7 +562,7 @@ function OpenAIStream(res, callbacks) {
836
562
  const cb = callbacks;
837
563
  let stream;
838
564
  if (Symbol.asyncIterator in res) {
839
- stream = readableFromAsyncIterable(streamable4(res)).pipeThrough(
565
+ stream = readableFromAsyncIterable(streamable(res)).pipeThrough(
840
566
  createCallbacksTransformer(
841
567
  (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
842
568
  ...cb,
@@ -1041,6 +767,280 @@ function createFunctionCallTransformer(callbacks) {
1041
767
  });
1042
768
  }
1043
769
 
770
+ // streams/streaming-text-response.ts
771
+ var StreamingTextResponse = class extends Response {
772
+ constructor(res, init, data) {
773
+ let processedStream = res;
774
+ if (data) {
775
+ processedStream = res.pipeThrough(data.stream);
776
+ }
777
+ super(processedStream, {
778
+ ...init,
779
+ status: 200,
780
+ headers: {
781
+ "Content-Type": "text/plain; charset=utf-8",
782
+ [COMPLEX_HEADER]: data ? "true" : "false",
783
+ ...init == null ? void 0 : init.headers
784
+ }
785
+ });
786
+ }
787
+ };
788
+ function streamToResponse(res, response, init) {
789
+ response.writeHead((init == null ? void 0 : init.status) || 200, {
790
+ "Content-Type": "text/plain; charset=utf-8",
791
+ ...init == null ? void 0 : init.headers
792
+ });
793
+ const reader = res.getReader();
794
+ function read() {
795
+ reader.read().then(({ done, value }) => {
796
+ if (done) {
797
+ response.end();
798
+ return;
799
+ }
800
+ response.write(value);
801
+ read();
802
+ });
803
+ }
804
+ read();
805
+ }
806
+
807
+ // streams/huggingface-stream.ts
808
+ function createParser2(res) {
809
+ const trimStartOfStream = trimStartOfStreamHelper();
810
+ return new ReadableStream({
811
+ async pull(controller) {
812
+ var _a, _b;
813
+ const { value, done } = await res.next();
814
+ if (done) {
815
+ controller.close();
816
+ return;
817
+ }
818
+ const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
819
+ if (!text)
820
+ return;
821
+ if (value.generated_text != null && value.generated_text.length > 0) {
822
+ return;
823
+ }
824
+ if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
825
+ return;
826
+ }
827
+ controller.enqueue(text);
828
+ }
829
+ });
830
+ }
831
+ function HuggingFaceStream(res, callbacks) {
832
+ return createParser2(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
833
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
834
+ );
835
+ }
836
+
837
+ // streams/cohere-stream.ts
838
+ var utf8Decoder = new TextDecoder("utf-8");
839
+ async function processLines(lines, controller) {
840
+ for (const line of lines) {
841
+ const { text, is_finished } = JSON.parse(line);
842
+ if (!is_finished) {
843
+ controller.enqueue(text);
844
+ }
845
+ }
846
+ }
847
+ async function readAndProcessLines(reader, controller) {
848
+ let segment = "";
849
+ while (true) {
850
+ const { value: chunk, done } = await reader.read();
851
+ if (done) {
852
+ break;
853
+ }
854
+ segment += utf8Decoder.decode(chunk, { stream: true });
855
+ const linesArray = segment.split(/\r\n|\n|\r/g);
856
+ segment = linesArray.pop() || "";
857
+ await processLines(linesArray, controller);
858
+ }
859
+ if (segment) {
860
+ const linesArray = [segment];
861
+ await processLines(linesArray, controller);
862
+ }
863
+ controller.close();
864
+ }
865
+ function createParser3(res) {
866
+ var _a;
867
+ const reader = (_a = res.body) == null ? void 0 : _a.getReader();
868
+ return new ReadableStream({
869
+ async start(controller) {
870
+ if (!reader) {
871
+ controller.close();
872
+ return;
873
+ }
874
+ await readAndProcessLines(reader, controller);
875
+ }
876
+ });
877
+ }
878
+ async function* streamable2(stream) {
879
+ for await (const chunk of stream) {
880
+ if (chunk.eventType === "text-generation") {
881
+ const text = chunk.text;
882
+ if (text)
883
+ yield text;
884
+ }
885
+ }
886
+ }
887
+ function CohereStream(reader, callbacks) {
888
+ if (Symbol.asyncIterator in reader) {
889
+ return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
890
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
891
+ );
892
+ } else {
893
+ return createParser3(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
894
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
895
+ );
896
+ }
897
+ }
898
+
899
+ // streams/anthropic-stream.ts
900
+ function parseAnthropicStream() {
901
+ let previous = "";
902
+ return (data) => {
903
+ const json = JSON.parse(data);
904
+ if ("error" in json) {
905
+ throw new Error(`${json.error.type}: ${json.error.message}`);
906
+ }
907
+ if (!("completion" in json)) {
908
+ return;
909
+ }
910
+ const text = json.completion;
911
+ if (!previous || text.length > previous.length && text.startsWith(previous)) {
912
+ const delta = text.slice(previous.length);
913
+ previous = text;
914
+ return delta;
915
+ }
916
+ return text;
917
+ };
918
+ }
919
+ async function* streamable3(stream) {
920
+ for await (const chunk of stream) {
921
+ if ("completion" in chunk) {
922
+ const text = chunk.completion;
923
+ if (text)
924
+ yield text;
925
+ } else if ("delta" in chunk) {
926
+ const { delta } = chunk;
927
+ if ("text" in delta) {
928
+ const text = delta.text;
929
+ if (text)
930
+ yield text;
931
+ }
932
+ }
933
+ }
934
+ }
935
+ function AnthropicStream(res, cb) {
936
+ if (Symbol.asyncIterator in res) {
937
+ return readableFromAsyncIterable(streamable3(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
938
+ } else {
939
+ return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
940
+ createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
941
+ );
942
+ }
943
+ }
944
+
945
+ // streams/inkeep-stream.ts
946
+ function InkeepStream(res, callbacks) {
947
+ if (!res.body) {
948
+ throw new Error("Response body is null");
949
+ }
950
+ let chat_session_id = "";
951
+ let records_cited;
952
+ const inkeepEventParser = (data, options) => {
953
+ var _a, _b;
954
+ const { event } = options;
955
+ if (event === "records_cited") {
956
+ records_cited = JSON.parse(data);
957
+ (_a = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a.call(callbacks, records_cited);
958
+ }
959
+ if (event === "message_chunk") {
960
+ const inkeepMessageChunk = JSON.parse(data);
961
+ chat_session_id = (_b = inkeepMessageChunk.chat_session_id) != null ? _b : chat_session_id;
962
+ return inkeepMessageChunk.content_chunk;
963
+ }
964
+ return;
965
+ };
966
+ let { onRecordsCited, ...passThroughCallbacks } = callbacks || {};
967
+ passThroughCallbacks = {
968
+ ...passThroughCallbacks,
969
+ onFinal: (completion) => {
970
+ var _a;
971
+ const inkeepOnFinalMetadata = {
972
+ chat_session_id,
973
+ records_cited
974
+ };
975
+ (_a = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a.call(callbacks, completion, inkeepOnFinalMetadata);
976
+ }
977
+ };
978
+ return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
979
+ createStreamDataTransformer(passThroughCallbacks == null ? void 0 : passThroughCallbacks.experimental_streamData)
980
+ );
981
+ }
982
+
983
+ // streams/langchain-stream.ts
984
+ function LangChainStream(callbacks) {
985
+ const stream = new TransformStream();
986
+ const writer = stream.writable.getWriter();
987
+ const runs = /* @__PURE__ */ new Set();
988
+ const handleError = async (e, runId) => {
989
+ runs.delete(runId);
990
+ await writer.ready;
991
+ await writer.abort(e);
992
+ };
993
+ const handleStart = async (runId) => {
994
+ runs.add(runId);
995
+ };
996
+ const handleEnd = async (runId) => {
997
+ runs.delete(runId);
998
+ if (runs.size === 0) {
999
+ await writer.ready;
1000
+ await writer.close();
1001
+ }
1002
+ };
1003
+ return {
1004
+ stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
1005
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
1006
+ ),
1007
+ writer,
1008
+ handlers: {
1009
+ handleLLMNewToken: async (token) => {
1010
+ await writer.ready;
1011
+ await writer.write(token);
1012
+ },
1013
+ handleLLMStart: async (_llm, _prompts, runId) => {
1014
+ handleStart(runId);
1015
+ },
1016
+ handleLLMEnd: async (_output, runId) => {
1017
+ await handleEnd(runId);
1018
+ },
1019
+ handleLLMError: async (e, runId) => {
1020
+ await handleError(e, runId);
1021
+ },
1022
+ handleChainStart: async (_chain, _inputs, runId) => {
1023
+ handleStart(runId);
1024
+ },
1025
+ handleChainEnd: async (_outputs, runId) => {
1026
+ await handleEnd(runId);
1027
+ },
1028
+ handleChainError: async (e, runId) => {
1029
+ await handleError(e, runId);
1030
+ },
1031
+ handleToolStart: async (_tool, _input, runId) => {
1032
+ handleStart(runId);
1033
+ },
1034
+ handleToolEnd: async (_output, runId) => {
1035
+ await handleEnd(runId);
1036
+ },
1037
+ handleToolError: async (e, runId) => {
1038
+ await handleError(e, runId);
1039
+ }
1040
+ }
1041
+ };
1042
+ }
1043
+
1044
1044
  // streams/replicate-stream.ts
1045
1045
  async function ReplicateStream(res, cb, options) {
1046
1046
  var _a;
@@ -1063,6 +1063,79 @@ async function ReplicateStream(res, cb, options) {
1063
1063
  );
1064
1064
  }
1065
1065
 
1066
+ // streams/assistant-response.ts
1067
+ function experimental_AssistantResponse({ threadId, messageId }, process2) {
1068
+ const stream = new ReadableStream({
1069
+ async start(controller) {
1070
+ var _a;
1071
+ const textEncoder = new TextEncoder();
1072
+ const sendMessage = (message) => {
1073
+ controller.enqueue(
1074
+ textEncoder.encode(formatStreamPart("assistant_message", message))
1075
+ );
1076
+ };
1077
+ const sendDataMessage = (message) => {
1078
+ controller.enqueue(
1079
+ textEncoder.encode(formatStreamPart("data_message", message))
1080
+ );
1081
+ };
1082
+ const sendError = (errorMessage) => {
1083
+ controller.enqueue(
1084
+ textEncoder.encode(formatStreamPart("error", errorMessage))
1085
+ );
1086
+ };
1087
+ controller.enqueue(
1088
+ textEncoder.encode(
1089
+ formatStreamPart("assistant_control_data", {
1090
+ threadId,
1091
+ messageId
1092
+ })
1093
+ )
1094
+ );
1095
+ try {
1096
+ await process2({
1097
+ threadId,
1098
+ messageId,
1099
+ sendMessage,
1100
+ sendDataMessage
1101
+ });
1102
+ } catch (error) {
1103
+ sendError((_a = error.message) != null ? _a : `${error}`);
1104
+ } finally {
1105
+ controller.close();
1106
+ }
1107
+ },
1108
+ pull(controller) {
1109
+ },
1110
+ cancel() {
1111
+ }
1112
+ });
1113
+ return new Response(stream, {
1114
+ status: 200,
1115
+ headers: {
1116
+ "Content-Type": "text/plain; charset=utf-8"
1117
+ }
1118
+ });
1119
+ }
1120
+
1121
+ // streams/google-generative-ai-stream.ts
1122
+ async function* streamable4(response) {
1123
+ var _a, _b, _c;
1124
+ for await (const chunk of response.stream) {
1125
+ const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
1126
+ if (parts === void 0) {
1127
+ continue;
1128
+ }
1129
+ const firstPart = parts[0];
1130
+ if (typeof firstPart.text === "string") {
1131
+ yield firstPart.text;
1132
+ }
1133
+ }
1134
+ }
1135
+ function GoogleGenerativeAIStream(response, cb) {
1136
+ return readableFromAsyncIterable(streamable4(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
1137
+ }
1138
+
1066
1139
  // shared/read-data-stream.ts
1067
1140
  var NEWLINE = "\n".charCodeAt(0);
1068
1141
  function concatChunks(chunks, totalLength) {
@@ -1291,43 +1364,6 @@ var experimental_StreamingReactResponse = class {
1291
1364
  return next;
1292
1365
  }
1293
1366
  };
1294
-
1295
- // streams/streaming-text-response.ts
1296
- var StreamingTextResponse = class extends Response {
1297
- constructor(res, init, data) {
1298
- let processedStream = res;
1299
- if (data) {
1300
- processedStream = res.pipeThrough(data.stream);
1301
- }
1302
- super(processedStream, {
1303
- ...init,
1304
- status: 200,
1305
- headers: {
1306
- "Content-Type": "text/plain; charset=utf-8",
1307
- [COMPLEX_HEADER]: data ? "true" : "false",
1308
- ...init == null ? void 0 : init.headers
1309
- }
1310
- });
1311
- }
1312
- };
1313
- function streamToResponse(res, response, init) {
1314
- response.writeHead((init == null ? void 0 : init.status) || 200, {
1315
- "Content-Type": "text/plain; charset=utf-8",
1316
- ...init == null ? void 0 : init.headers
1317
- });
1318
- const reader = res.getReader();
1319
- function read() {
1320
- reader.read().then(({ done, value }) => {
1321
- if (done) {
1322
- response.end();
1323
- return;
1324
- }
1325
- response.write(value);
1326
- read();
1327
- });
1328
- }
1329
- read();
1330
- }
1331
1367
  export {
1332
1368
  AIStream,
1333
1369
  AWSBedrockAnthropicStream,
@@ -1339,6 +1375,7 @@ export {
1339
1375
  CohereStream,
1340
1376
  GoogleGenerativeAIStream,
1341
1377
  HuggingFaceStream,
1378
+ InkeepStream,
1342
1379
  LangChainStream,
1343
1380
  OpenAIStream,
1344
1381
  ReplicateStream,