ai 2.2.35 → 2.2.36

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -50,123 +50,8 @@ __export(streams_exports, {
50
50
  });
51
51
  module.exports = __toCommonJS(streams_exports);
52
52
 
53
- // streams/ai-stream.ts
54
- var import_eventsource_parser = require("eventsource-parser");
55
- function createEventStreamTransformer(customParser) {
56
- const textDecoder = new TextDecoder();
57
- let eventSourceParser;
58
- return new TransformStream({
59
- async start(controller) {
60
- eventSourceParser = (0, import_eventsource_parser.createParser)(
61
- (event) => {
62
- if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
63
- // @see https://replicate.com/docs/streaming
64
- event.event === "done") {
65
- controller.terminate();
66
- return;
67
- }
68
- if ("data" in event) {
69
- const parsedMessage = customParser ? customParser(event.data, {
70
- event: event.event
71
- }) : event.data;
72
- if (parsedMessage)
73
- controller.enqueue(parsedMessage);
74
- }
75
- }
76
- );
77
- },
78
- transform(chunk) {
79
- eventSourceParser.feed(textDecoder.decode(chunk));
80
- }
81
- });
82
- }
83
- function createCallbacksTransformer(cb) {
84
- const textEncoder = new TextEncoder();
85
- let aggregatedResponse = "";
86
- const callbacks = cb || {};
87
- return new TransformStream({
88
- async start() {
89
- if (callbacks.onStart)
90
- await callbacks.onStart();
91
- },
92
- async transform(message, controller) {
93
- controller.enqueue(textEncoder.encode(message));
94
- aggregatedResponse += message;
95
- if (callbacks.onToken)
96
- await callbacks.onToken(message);
97
- },
98
- async flush() {
99
- const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
100
- if (callbacks.onCompletion) {
101
- await callbacks.onCompletion(aggregatedResponse);
102
- }
103
- if (callbacks.onFinal && !isOpenAICallbacks) {
104
- await callbacks.onFinal(aggregatedResponse);
105
- }
106
- }
107
- });
108
- }
109
- function isOfTypeOpenAIStreamCallbacks(callbacks) {
110
- return "experimental_onFunctionCall" in callbacks;
111
- }
112
- function trimStartOfStreamHelper() {
113
- let isStreamStart = true;
114
- return (text) => {
115
- if (isStreamStart) {
116
- text = text.trimStart();
117
- if (text)
118
- isStreamStart = false;
119
- }
120
- return text;
121
- };
122
- }
123
- function AIStream(response, customParser, callbacks) {
124
- if (!response.ok) {
125
- if (response.body) {
126
- const reader = response.body.getReader();
127
- return new ReadableStream({
128
- async start(controller) {
129
- const { done, value } = await reader.read();
130
- if (!done) {
131
- const errorText = new TextDecoder().decode(value);
132
- controller.error(new Error(`Response error: ${errorText}`));
133
- }
134
- }
135
- });
136
- } else {
137
- return new ReadableStream({
138
- start(controller) {
139
- controller.error(new Error("Response error: No response body"));
140
- }
141
- });
142
- }
143
- }
144
- const responseBodyStream = response.body || createEmptyReadableStream();
145
- return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
146
- }
147
- function createEmptyReadableStream() {
148
- return new ReadableStream({
149
- start(controller) {
150
- controller.close();
151
- }
152
- });
153
- }
154
- function readableFromAsyncIterable(iterable) {
155
- let it = iterable[Symbol.asyncIterator]();
156
- return new ReadableStream({
157
- async pull(controller) {
158
- const { done, value } = await it.next();
159
- if (done)
160
- controller.close();
161
- else
162
- controller.enqueue(value);
163
- },
164
- async cancel(reason) {
165
- var _a;
166
- await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
167
- }
168
- });
169
- }
53
+ // shared/utils.ts
54
+ var import_non_secure = require("nanoid/non-secure");
170
55
 
171
56
  // shared/stream-parts.ts
172
57
  var textStreamPart = {
@@ -348,538 +233,390 @@ function formatStreamPart(type, value) {
348
233
  `;
349
234
  }
350
235
 
351
- // streams/stream-data.ts
352
- var experimental_StreamData = class {
353
- constructor() {
354
- this.encoder = new TextEncoder();
355
- this.controller = null;
356
- // closing the stream is synchronous, but we want to return a promise
357
- // in case we're doing async work
358
- this.isClosedPromise = null;
359
- this.isClosedPromiseResolver = void 0;
360
- this.isClosed = false;
361
- // array to store appended data
362
- this.data = [];
363
- this.messageAnnotations = [];
364
- this.isClosedPromise = new Promise((resolve) => {
365
- this.isClosedPromiseResolver = resolve;
366
- });
367
- const self = this;
368
- this.stream = new TransformStream({
369
- start: async (controller) => {
370
- self.controller = controller;
371
- },
372
- transform: async (chunk, controller) => {
373
- if (self.data.length > 0) {
374
- const encodedData = self.encoder.encode(
375
- formatStreamPart("data", self.data)
376
- );
377
- self.data = [];
378
- controller.enqueue(encodedData);
379
- }
380
- if (self.messageAnnotations.length) {
381
- const encodedMessageAnnotations = self.encoder.encode(
382
- formatStreamPart("message_annotations", self.messageAnnotations)
383
- );
384
- self.messageAnnotations = [];
385
- controller.enqueue(encodedMessageAnnotations);
386
- }
387
- controller.enqueue(chunk);
388
- },
389
- async flush(controller) {
390
- const warningTimeout = process.env.NODE_ENV === "development" ? setTimeout(() => {
391
- console.warn(
392
- "The data stream is hanging. Did you forget to close it with `data.close()`?"
393
- );
394
- }, 3e3) : null;
395
- await self.isClosedPromise;
396
- if (warningTimeout !== null) {
397
- clearTimeout(warningTimeout);
398
- }
399
- if (self.data.length) {
400
- const encodedData = self.encoder.encode(
401
- formatStreamPart("data", self.data)
402
- );
403
- controller.enqueue(encodedData);
404
- }
405
- if (self.messageAnnotations.length) {
406
- const encodedData = self.encoder.encode(
407
- formatStreamPart("message_annotations", self.messageAnnotations)
408
- );
409
- controller.enqueue(encodedData);
410
- }
411
- }
412
- });
413
- }
414
- async close() {
415
- var _a;
416
- if (this.isClosed) {
417
- throw new Error("Data Stream has already been closed.");
418
- }
419
- if (!this.controller) {
420
- throw new Error("Stream controller is not initialized.");
421
- }
422
- (_a = this.isClosedPromiseResolver) == null ? void 0 : _a.call(this);
423
- this.isClosed = true;
424
- }
425
- append(value) {
426
- if (this.isClosed) {
427
- throw new Error("Data Stream has already been closed.");
428
- }
429
- this.data.push(value);
430
- }
431
- appendMessageAnnotation(value) {
432
- if (this.isClosed) {
433
- throw new Error("Data Stream has already been closed.");
434
- }
435
- this.messageAnnotations.push(value);
436
- }
437
- };
438
- function createStreamDataTransformer(experimental_streamData) {
439
- if (!experimental_streamData) {
440
- return new TransformStream({
441
- transform: async (chunk, controller) => {
442
- controller.enqueue(chunk);
443
- }
444
- });
445
- }
446
- const encoder = new TextEncoder();
447
- const decoder = new TextDecoder();
448
- return new TransformStream({
449
- transform: async (chunk, controller) => {
450
- const message = decoder.decode(chunk);
451
- controller.enqueue(encoder.encode(formatStreamPart("text", message)));
452
- }
453
- });
454
- }
455
-
456
- // streams/aws-bedrock-stream.ts
457
- async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
458
- var _a, _b;
236
+ // shared/utils.ts
237
+ var nanoid = (0, import_non_secure.customAlphabet)(
238
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
239
+ 7
240
+ );
241
+ function createChunkDecoder(complex) {
459
242
  const decoder = new TextDecoder();
460
- for await (const chunk of (_a = response.body) != null ? _a : []) {
461
- const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
462
- if (bytes != null) {
463
- const chunkText = decoder.decode(bytes);
464
- const chunkJSON = JSON.parse(chunkText);
465
- const delta = extractTextDeltaFromChunk(chunkJSON);
466
- if (delta != null) {
467
- yield delta;
468
- }
469
- }
243
+ if (!complex) {
244
+ return function(chunk) {
245
+ if (!chunk)
246
+ return "";
247
+ return decoder.decode(chunk, { stream: true });
248
+ };
470
249
  }
471
- }
472
- function AWSBedrockAnthropicStream(response, callbacks) {
473
- return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
474
- }
475
- function AWSBedrockCohereStream(response, callbacks) {
476
- return AWSBedrockStream(
477
- response,
478
- callbacks,
479
- // As of 2023-11-17, Bedrock does not support streaming for Cohere,
480
- // so we take the full generation:
481
- (chunk) => {
482
- var _a, _b;
483
- return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
484
- }
485
- );
486
- }
487
- function AWSBedrockLlama2Stream(response, callbacks) {
488
- return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
489
- }
490
- function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
491
- return readableFromAsyncIterable(
492
- asDeltaIterable(response, extractTextDeltaFromChunk)
493
- ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
494
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
495
- );
496
- }
497
-
498
- // shared/utils.ts
499
- var import_non_secure = require("nanoid/non-secure");
500
- var nanoid = (0, import_non_secure.customAlphabet)(
501
- "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
502
- 7
503
- );
504
- function createChunkDecoder(complex) {
505
- const decoder = new TextDecoder();
506
- if (!complex) {
507
- return function(chunk) {
508
- if (!chunk)
509
- return "";
510
- return decoder.decode(chunk, { stream: true });
511
- };
512
- }
513
- return function(chunk) {
514
- const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
515
- return decoded.map(parseStreamPart).filter(Boolean);
516
- };
250
+ return function(chunk) {
251
+ const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
252
+ return decoded.map(parseStreamPart).filter(Boolean);
253
+ };
517
254
  }
518
255
  var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
519
256
  var COMPLEX_HEADER = "X-Experimental-Stream-Data";
520
257
 
521
- // streams/openai-stream.ts
522
- function parseOpenAIStream() {
523
- const extract = chunkToText();
524
- return (data) => extract(JSON.parse(data));
525
- }
526
- async function* streamable(stream) {
527
- const extract = chunkToText();
528
- for await (let chunk of stream) {
529
- if ("promptFilterResults" in chunk) {
530
- chunk = {
531
- id: chunk.id,
532
- created: chunk.created.getDate(),
533
- object: chunk.object,
534
- // not exposed by Azure API
535
- model: chunk.model,
536
- // not exposed by Azure API
537
- choices: chunk.choices.map((choice) => {
538
- var _a, _b, _c, _d, _e, _f, _g;
539
- return {
540
- delta: {
541
- content: (_a = choice.delta) == null ? void 0 : _a.content,
542
- function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
543
- role: (_c = choice.delta) == null ? void 0 : _c.role,
544
- tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
545
- index,
546
- id: toolCall.id,
547
- function: toolCall.function,
548
- type: toolCall.type
549
- })) : void 0
550
- },
551
- finish_reason: choice.finishReason,
552
- index: choice.index
553
- };
554
- })
555
- };
556
- }
557
- const text = extract(chunk);
558
- if (text)
559
- yield text;
560
- }
561
- }
562
- function chunkToText() {
563
- const trimStartOfStream = trimStartOfStreamHelper();
564
- let isFunctionStreamingIn;
565
- return (json) => {
566
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
567
- if (isChatCompletionChunk(json)) {
568
- const delta = (_a = json.choices[0]) == null ? void 0 : _a.delta;
569
- if ((_b = delta.function_call) == null ? void 0 : _b.name) {
570
- isFunctionStreamingIn = true;
571
- return `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`;
572
- } else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
573
- isFunctionStreamingIn = true;
574
- const toolCall = delta.tool_calls[0];
575
- if (toolCall.index === 0) {
576
- return `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`;
577
- } else {
578
- return `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`;
258
+ // streams/ai-stream.ts
259
+ var import_eventsource_parser = require("eventsource-parser");
260
+ function createEventStreamTransformer(customParser) {
261
+ const textDecoder = new TextDecoder();
262
+ let eventSourceParser;
263
+ return new TransformStream({
264
+ async start(controller) {
265
+ eventSourceParser = (0, import_eventsource_parser.createParser)(
266
+ (event) => {
267
+ if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
268
+ // @see https://replicate.com/docs/streaming
269
+ event.event === "done") {
270
+ controller.terminate();
271
+ return;
272
+ }
273
+ if ("data" in event) {
274
+ const parsedMessage = customParser ? customParser(event.data, {
275
+ event: event.event
276
+ }) : event.data;
277
+ if (parsedMessage)
278
+ controller.enqueue(parsedMessage);
279
+ }
579
280
  }
580
- } else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
581
- return cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments);
582
- } else if ((_k = (_j = delta.tool_calls) == null ? void 0 : _j[0].function) == null ? void 0 : _k.arguments) {
583
- return cleanupArguments((_n = (_m = (_l = delta.tool_calls) == null ? void 0 : _l[0]) == null ? void 0 : _m.function) == null ? void 0 : _n.arguments);
584
- } else if (isFunctionStreamingIn && (((_o = json.choices[0]) == null ? void 0 : _o.finish_reason) === "function_call" || ((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "stop")) {
585
- isFunctionStreamingIn = false;
586
- return '"}}';
587
- } else if (isFunctionStreamingIn && ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "tool_calls") {
588
- isFunctionStreamingIn = false;
589
- return '"}}]}';
590
- }
281
+ );
282
+ },
283
+ transform(chunk) {
284
+ eventSourceParser.feed(textDecoder.decode(chunk));
591
285
  }
592
- const text = trimStartOfStream(
593
- isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
594
- );
595
- return text;
596
- };
597
- function cleanupArguments(argumentChunk) {
598
- let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
599
- return `${escapedPartialJson}`;
600
- }
601
- }
602
- var __internal__OpenAIFnMessagesSymbol = Symbol(
603
- "internal_openai_fn_messages"
604
- );
605
- function isChatCompletionChunk(data) {
606
- return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
607
- }
608
- function isCompletion(data) {
609
- return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
610
- }
611
- function OpenAIStream(res, callbacks) {
612
- const cb = callbacks;
613
- let stream;
614
- if (Symbol.asyncIterator in res) {
615
- stream = readableFromAsyncIterable(streamable(res)).pipeThrough(
616
- createCallbacksTransformer(
617
- (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
618
- ...cb,
619
- onFinal: void 0
620
- } : {
621
- ...cb
622
- }
623
- )
624
- );
625
- } else {
626
- stream = AIStream(
627
- res,
628
- parseOpenAIStream(),
629
- (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
630
- ...cb,
631
- onFinal: void 0
632
- } : {
633
- ...cb
634
- }
635
- );
636
- }
637
- if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
638
- const functionCallTransformer = createFunctionCallTransformer(cb);
639
- return stream.pipeThrough(functionCallTransformer);
640
- } else {
641
- return stream.pipeThrough(
642
- createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
643
- );
644
- }
286
+ });
645
287
  }
646
- function createFunctionCallTransformer(callbacks) {
288
+ function createCallbacksTransformer(cb) {
647
289
  const textEncoder = new TextEncoder();
648
- let isFirstChunk = true;
649
290
  let aggregatedResponse = "";
650
- let aggregatedFinalCompletionResponse = "";
651
- let isFunctionStreamingIn = false;
652
- let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
653
- const isComplexMode = callbacks == null ? void 0 : callbacks.experimental_streamData;
654
- const decode = createChunkDecoder();
291
+ const callbacks = cb || {};
655
292
  return new TransformStream({
656
- async transform(chunk, controller) {
657
- const message = decode(chunk);
658
- aggregatedFinalCompletionResponse += message;
659
- const shouldHandleAsFunction = isFirstChunk && (message.startsWith('{"function_call":') || message.startsWith('{"tool_calls":'));
660
- if (shouldHandleAsFunction) {
661
- isFunctionStreamingIn = true;
662
- aggregatedResponse += message;
663
- isFirstChunk = false;
664
- return;
293
+ async start() {
294
+ if (callbacks.onStart)
295
+ await callbacks.onStart();
296
+ },
297
+ async transform(message, controller) {
298
+ controller.enqueue(textEncoder.encode(message));
299
+ aggregatedResponse += message;
300
+ if (callbacks.onToken)
301
+ await callbacks.onToken(message);
302
+ },
303
+ async flush() {
304
+ const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
305
+ if (callbacks.onCompletion) {
306
+ await callbacks.onCompletion(aggregatedResponse);
665
307
  }
666
- if (!isFunctionStreamingIn) {
667
- controller.enqueue(
668
- isComplexMode ? textEncoder.encode(formatStreamPart("text", message)) : chunk
669
- );
670
- return;
671
- } else {
672
- aggregatedResponse += message;
308
+ if (callbacks.onFinal && !isOpenAICallbacks) {
309
+ await callbacks.onFinal(aggregatedResponse);
673
310
  }
674
- },
675
- async flush(controller) {
676
- try {
677
- if (!isFirstChunk && isFunctionStreamingIn && (callbacks.experimental_onFunctionCall || callbacks.experimental_onToolCall)) {
678
- isFunctionStreamingIn = false;
679
- const payload = JSON.parse(aggregatedResponse);
680
- let newFunctionCallMessages = [
681
- ...functionCallMessages
682
- ];
683
- let functionResponse = void 0;
684
- if (callbacks.experimental_onFunctionCall) {
685
- if (payload.function_call === void 0) {
686
- console.warn(
687
- "experimental_onFunctionCall should not be defined when using tools"
688
- );
689
- }
690
- const argumentsPayload = JSON.parse(
691
- payload.function_call.arguments
692
- );
693
- functionResponse = await callbacks.experimental_onFunctionCall(
694
- {
695
- name: payload.function_call.name,
696
- arguments: argumentsPayload
697
- },
698
- (result) => {
699
- newFunctionCallMessages = [
700
- ...functionCallMessages,
701
- {
702
- role: "assistant",
703
- content: "",
704
- function_call: payload.function_call
705
- },
706
- {
707
- role: "function",
708
- name: payload.function_call.name,
709
- content: JSON.stringify(result)
710
- }
711
- ];
712
- return newFunctionCallMessages;
713
- }
714
- );
715
- }
716
- if (callbacks.experimental_onToolCall) {
717
- const toolCalls = {
718
- tools: []
719
- };
720
- for (const tool of payload.tool_calls) {
721
- toolCalls.tools.push({
722
- id: tool.id,
723
- type: "function",
724
- func: {
725
- name: tool.function.name,
726
- arguments: tool.function.arguments
727
- }
728
- });
729
- }
730
- let responseIndex = 0;
731
- try {
732
- functionResponse = await callbacks.experimental_onToolCall(
733
- toolCalls,
734
- (result) => {
735
- if (result) {
736
- const { tool_call_id, function_name, tool_call_result } = result;
737
- newFunctionCallMessages = [
738
- ...newFunctionCallMessages,
739
- // Only append the assistant message if it's the first response
740
- ...responseIndex === 0 ? [
741
- {
742
- role: "assistant",
743
- content: "",
744
- tool_calls: payload.tool_calls.map(
745
- (tc) => ({
746
- id: tc.id,
747
- type: "function",
748
- function: {
749
- name: tc.function.name,
750
- // we send the arguments an object to the user, but as the API expects a string, we need to stringify it
751
- arguments: JSON.stringify(
752
- tc.function.arguments
753
- )
754
- }
755
- })
756
- )
757
- }
758
- ] : [],
759
- // Append the function call result message
760
- {
761
- role: "tool",
762
- tool_call_id,
763
- name: function_name,
764
- content: JSON.stringify(tool_call_result)
765
- }
766
- ];
767
- responseIndex++;
768
- }
769
- return newFunctionCallMessages;
770
- }
771
- );
772
- } catch (e) {
773
- console.error("Error calling experimental_onToolCall:", e);
774
- }
775
- }
776
- if (!functionResponse) {
777
- controller.enqueue(
778
- textEncoder.encode(
779
- isComplexMode ? formatStreamPart(
780
- payload.function_call ? "function_call" : "tool_calls",
781
- // parse to prevent double-encoding:
782
- JSON.parse(aggregatedResponse)
783
- ) : aggregatedResponse
784
- )
785
- );
786
- return;
787
- } else if (typeof functionResponse === "string") {
788
- controller.enqueue(
789
- isComplexMode ? textEncoder.encode(formatStreamPart("text", functionResponse)) : textEncoder.encode(functionResponse)
790
- );
791
- return;
792
- }
793
- const filteredCallbacks = {
794
- ...callbacks,
795
- onStart: void 0
796
- };
797
- callbacks.onFinal = void 0;
798
- const openAIStream = OpenAIStream(functionResponse, {
799
- ...filteredCallbacks,
800
- [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
801
- });
802
- const reader = openAIStream.getReader();
803
- while (true) {
804
- const { done, value } = await reader.read();
805
- if (done) {
806
- break;
807
- }
808
- controller.enqueue(value);
311
+ }
312
+ });
313
+ }
314
+ function isOfTypeOpenAIStreamCallbacks(callbacks) {
315
+ return "experimental_onFunctionCall" in callbacks;
316
+ }
317
+ function trimStartOfStreamHelper() {
318
+ let isStreamStart = true;
319
+ return (text) => {
320
+ if (isStreamStart) {
321
+ text = text.trimStart();
322
+ if (text)
323
+ isStreamStart = false;
324
+ }
325
+ return text;
326
+ };
327
+ }
328
+ function AIStream(response, customParser, callbacks) {
329
+ if (!response.ok) {
330
+ if (response.body) {
331
+ const reader = response.body.getReader();
332
+ return new ReadableStream({
333
+ async start(controller) {
334
+ const { done, value } = await reader.read();
335
+ if (!done) {
336
+ const errorText = new TextDecoder().decode(value);
337
+ controller.error(new Error(`Response error: ${errorText}`));
809
338
  }
810
339
  }
811
- } finally {
812
- if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
813
- await callbacks.onFinal(aggregatedFinalCompletionResponse);
340
+ });
341
+ } else {
342
+ return new ReadableStream({
343
+ start(controller) {
344
+ controller.error(new Error("Response error: No response body"));
814
345
  }
815
- }
346
+ });
347
+ }
348
+ }
349
+ const responseBodyStream = response.body || createEmptyReadableStream();
350
+ return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
351
+ }
352
+ function createEmptyReadableStream() {
353
+ return new ReadableStream({
354
+ start(controller) {
355
+ controller.close();
356
+ }
357
+ });
358
+ }
359
+ function readableFromAsyncIterable(iterable) {
360
+ let it = iterable[Symbol.asyncIterator]();
361
+ return new ReadableStream({
362
+ async pull(controller) {
363
+ const { done, value } = await it.next();
364
+ if (done)
365
+ controller.close();
366
+ else
367
+ controller.enqueue(value);
368
+ },
369
+ async cancel(reason) {
370
+ var _a;
371
+ await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
816
372
  }
817
373
  });
818
374
  }
819
375
 
820
- // streams/streaming-text-response.ts
821
- var StreamingTextResponse = class extends Response {
822
- constructor(res, init, data) {
823
- let processedStream = res;
824
- if (data) {
825
- processedStream = res.pipeThrough(data.stream);
826
- }
827
- super(processedStream, {
828
- ...init,
829
- status: 200,
830
- headers: {
831
- "Content-Type": "text/plain; charset=utf-8",
832
- [COMPLEX_HEADER]: data ? "true" : "false",
833
- ...init == null ? void 0 : init.headers
376
+ // streams/stream-data.ts
377
+ var experimental_StreamData = class {
378
+ constructor() {
379
+ this.encoder = new TextEncoder();
380
+ this.controller = null;
381
+ // closing the stream is synchronous, but we want to return a promise
382
+ // in case we're doing async work
383
+ this.isClosedPromise = null;
384
+ this.isClosedPromiseResolver = void 0;
385
+ this.isClosed = false;
386
+ // array to store appended data
387
+ this.data = [];
388
+ this.messageAnnotations = [];
389
+ this.isClosedPromise = new Promise((resolve) => {
390
+ this.isClosedPromiseResolver = resolve;
391
+ });
392
+ const self = this;
393
+ this.stream = new TransformStream({
394
+ start: async (controller) => {
395
+ self.controller = controller;
396
+ },
397
+ transform: async (chunk, controller) => {
398
+ if (self.data.length > 0) {
399
+ const encodedData = self.encoder.encode(
400
+ formatStreamPart("data", self.data)
401
+ );
402
+ self.data = [];
403
+ controller.enqueue(encodedData);
404
+ }
405
+ if (self.messageAnnotations.length) {
406
+ const encodedMessageAnnotations = self.encoder.encode(
407
+ formatStreamPart("message_annotations", self.messageAnnotations)
408
+ );
409
+ self.messageAnnotations = [];
410
+ controller.enqueue(encodedMessageAnnotations);
411
+ }
412
+ controller.enqueue(chunk);
413
+ },
414
+ async flush(controller) {
415
+ const warningTimeout = process.env.NODE_ENV === "development" ? setTimeout(() => {
416
+ console.warn(
417
+ "The data stream is hanging. Did you forget to close it with `data.close()`?"
418
+ );
419
+ }, 3e3) : null;
420
+ await self.isClosedPromise;
421
+ if (warningTimeout !== null) {
422
+ clearTimeout(warningTimeout);
423
+ }
424
+ if (self.data.length) {
425
+ const encodedData = self.encoder.encode(
426
+ formatStreamPart("data", self.data)
427
+ );
428
+ controller.enqueue(encodedData);
429
+ }
430
+ if (self.messageAnnotations.length) {
431
+ const encodedData = self.encoder.encode(
432
+ formatStreamPart("message_annotations", self.messageAnnotations)
433
+ );
434
+ controller.enqueue(encodedData);
435
+ }
834
436
  }
835
437
  });
836
438
  }
439
+ async close() {
440
+ var _a;
441
+ if (this.isClosed) {
442
+ throw new Error("Data Stream has already been closed.");
443
+ }
444
+ if (!this.controller) {
445
+ throw new Error("Stream controller is not initialized.");
446
+ }
447
+ (_a = this.isClosedPromiseResolver) == null ? void 0 : _a.call(this);
448
+ this.isClosed = true;
449
+ }
450
+ append(value) {
451
+ if (this.isClosed) {
452
+ throw new Error("Data Stream has already been closed.");
453
+ }
454
+ this.data.push(value);
455
+ }
456
+ appendMessageAnnotation(value) {
457
+ if (this.isClosed) {
458
+ throw new Error("Data Stream has already been closed.");
459
+ }
460
+ this.messageAnnotations.push(value);
461
+ }
837
462
  };
838
- function streamToResponse(res, response, init) {
839
- response.writeHead((init == null ? void 0 : init.status) || 200, {
840
- "Content-Type": "text/plain; charset=utf-8",
841
- ...init == null ? void 0 : init.headers
842
- });
843
- const reader = res.getReader();
844
- function read() {
845
- reader.read().then(({ done, value }) => {
846
- if (done) {
847
- response.end();
848
- return;
463
+ function createStreamDataTransformer(experimental_streamData) {
464
+ if (!experimental_streamData) {
465
+ return new TransformStream({
466
+ transform: async (chunk, controller) => {
467
+ controller.enqueue(chunk);
849
468
  }
850
- response.write(value);
851
- read();
852
469
  });
853
470
  }
854
- read();
471
+ const encoder = new TextEncoder();
472
+ const decoder = new TextDecoder();
473
+ return new TransformStream({
474
+ transform: async (chunk, controller) => {
475
+ const message = decoder.decode(chunk);
476
+ controller.enqueue(encoder.encode(formatStreamPart("text", message)));
477
+ }
478
+ });
855
479
  }
856
480
 
857
- // streams/huggingface-stream.ts
858
- function createParser2(res) {
859
- const trimStartOfStream = trimStartOfStreamHelper();
860
- return new ReadableStream({
861
- async pull(controller) {
862
- var _a, _b;
863
- const { value, done } = await res.next();
864
- if (done) {
865
- controller.close();
866
- return;
481
+ // streams/anthropic-stream.ts
482
+ function parseAnthropicStream() {
483
+ let previous = "";
484
+ return (data) => {
485
+ const json = JSON.parse(data);
486
+ if ("error" in json) {
487
+ throw new Error(`${json.error.type}: ${json.error.message}`);
488
+ }
489
+ if (!("completion" in json)) {
490
+ return;
491
+ }
492
+ const text = json.completion;
493
+ if (!previous || text.length > previous.length && text.startsWith(previous)) {
494
+ const delta = text.slice(previous.length);
495
+ previous = text;
496
+ return delta;
497
+ }
498
+ return text;
499
+ };
500
+ }
501
+ async function* streamable(stream) {
502
+ for await (const chunk of stream) {
503
+ if ("completion" in chunk) {
504
+ const text = chunk.completion;
505
+ if (text)
506
+ yield text;
507
+ } else if ("delta" in chunk) {
508
+ const { delta } = chunk;
509
+ if ("text" in delta) {
510
+ const text = delta.text;
511
+ if (text)
512
+ yield text;
867
513
  }
868
- const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
869
- if (!text)
870
- return;
871
- if (value.generated_text != null && value.generated_text.length > 0) {
872
- return;
514
+ }
515
+ }
516
+ }
517
+ function AnthropicStream(res, cb) {
518
+ if (Symbol.asyncIterator in res) {
519
+ return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
520
+ } else {
521
+ return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
522
+ createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
523
+ );
524
+ }
525
+ }
526
+
527
+ // streams/assistant-response.ts
528
+ function experimental_AssistantResponse({ threadId, messageId }, process2) {
529
+ const stream = new ReadableStream({
530
+ async start(controller) {
531
+ var _a;
532
+ const textEncoder = new TextEncoder();
533
+ const sendMessage = (message) => {
534
+ controller.enqueue(
535
+ textEncoder.encode(formatStreamPart("assistant_message", message))
536
+ );
537
+ };
538
+ const sendDataMessage = (message) => {
539
+ controller.enqueue(
540
+ textEncoder.encode(formatStreamPart("data_message", message))
541
+ );
542
+ };
543
+ const sendError = (errorMessage) => {
544
+ controller.enqueue(
545
+ textEncoder.encode(formatStreamPart("error", errorMessage))
546
+ );
547
+ };
548
+ controller.enqueue(
549
+ textEncoder.encode(
550
+ formatStreamPart("assistant_control_data", {
551
+ threadId,
552
+ messageId
553
+ })
554
+ )
555
+ );
556
+ try {
557
+ await process2({
558
+ threadId,
559
+ messageId,
560
+ sendMessage,
561
+ sendDataMessage
562
+ });
563
+ } catch (error) {
564
+ sendError((_a = error.message) != null ? _a : `${error}`);
565
+ } finally {
566
+ controller.close();
873
567
  }
874
- if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
875
- return;
568
+ },
569
+ pull(controller) {
570
+ },
571
+ cancel() {
572
+ }
573
+ });
574
+ return new Response(stream, {
575
+ status: 200,
576
+ headers: {
577
+ "Content-Type": "text/plain; charset=utf-8"
578
+ }
579
+ });
580
+ }
581
+
582
+ // streams/aws-bedrock-stream.ts
583
+ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
584
+ var _a, _b;
585
+ const decoder = new TextDecoder();
586
+ for await (const chunk of (_a = response.body) != null ? _a : []) {
587
+ const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
588
+ if (bytes != null) {
589
+ const chunkText = decoder.decode(bytes);
590
+ const chunkJSON = JSON.parse(chunkText);
591
+ const delta = extractTextDeltaFromChunk(chunkJSON);
592
+ if (delta != null) {
593
+ yield delta;
876
594
  }
877
- controller.enqueue(text);
878
595
  }
879
- });
596
+ }
880
597
  }
881
- function HuggingFaceStream(res, callbacks) {
882
- return createParser2(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
598
+ function AWSBedrockAnthropicStream(response, callbacks) {
599
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
600
+ }
601
+ function AWSBedrockCohereStream(response, callbacks) {
602
+ return AWSBedrockStream(
603
+ response,
604
+ callbacks,
605
+ // As of 2023-11-17, Bedrock does not support streaming for Cohere,
606
+ // so we take the full generation:
607
+ (chunk) => {
608
+ var _a, _b;
609
+ return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
610
+ }
611
+ );
612
+ }
613
+ function AWSBedrockLlama2Stream(response, callbacks) {
614
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
615
+ }
616
+ function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
617
+ return readableFromAsyncIterable(
618
+ asDeltaIterable(response, extractTextDeltaFromChunk)
619
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
883
620
  createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
884
621
  );
885
622
  }
@@ -912,7 +649,7 @@ async function readAndProcessLines(reader, controller) {
912
649
  }
913
650
  controller.close();
914
651
  }
915
- function createParser3(res) {
652
+ function createParser2(res) {
916
653
  var _a;
917
654
  const reader = (_a = res.body) == null ? void 0 : _a.getReader();
918
655
  return new ReadableStream({
@@ -940,56 +677,58 @@ function CohereStream(reader, callbacks) {
940
677
  createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
941
678
  );
942
679
  } else {
943
- return createParser3(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
680
+ return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
944
681
  createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
945
682
  );
946
683
  }
947
684
  }
948
685
 
949
- // streams/anthropic-stream.ts
950
- function parseAnthropicStream() {
951
- let previous = "";
952
- return (data) => {
953
- const json = JSON.parse(data);
954
- if ("error" in json) {
955
- throw new Error(`${json.error.type}: ${json.error.message}`);
956
- }
957
- if (!("completion" in json)) {
958
- return;
686
+ // streams/google-generative-ai-stream.ts
687
+ async function* streamable3(response) {
688
+ var _a, _b, _c;
689
+ for await (const chunk of response.stream) {
690
+ const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
691
+ if (parts === void 0) {
692
+ continue;
959
693
  }
960
- const text = json.completion;
961
- if (!previous || text.length > previous.length && text.startsWith(previous)) {
962
- const delta = text.slice(previous.length);
963
- previous = text;
964
- return delta;
694
+ const firstPart = parts[0];
695
+ if (typeof firstPart.text === "string") {
696
+ yield firstPart.text;
965
697
  }
966
- return text;
967
- };
698
+ }
968
699
  }
969
- async function* streamable3(stream) {
970
- for await (const chunk of stream) {
971
- if ("completion" in chunk) {
972
- const text = chunk.completion;
973
- if (text)
974
- yield text;
975
- } else if ("delta" in chunk) {
976
- const { delta } = chunk;
977
- if ("text" in delta) {
978
- const text = delta.text;
979
- if (text)
980
- yield text;
700
+ function GoogleGenerativeAIStream(response, cb) {
701
+ return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
702
+ }
703
+
704
+ // streams/huggingface-stream.ts
705
+ function createParser3(res) {
706
+ const trimStartOfStream = trimStartOfStreamHelper();
707
+ return new ReadableStream({
708
+ async pull(controller) {
709
+ var _a, _b;
710
+ const { value, done } = await res.next();
711
+ if (done) {
712
+ controller.close();
713
+ return;
714
+ }
715
+ const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
716
+ if (!text)
717
+ return;
718
+ if (value.generated_text != null && value.generated_text.length > 0) {
719
+ return;
720
+ }
721
+ if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
722
+ return;
981
723
  }
724
+ controller.enqueue(text);
982
725
  }
983
- }
726
+ });
984
727
  }
985
- function AnthropicStream(res, cb) {
986
- if (Symbol.asyncIterator in res) {
987
- return readableFromAsyncIterable(streamable3(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
988
- } else {
989
- return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
990
- createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
991
- );
992
- }
728
+ function HuggingFaceStream(res, callbacks) {
729
+ return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
730
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
731
+ );
993
732
  }
994
733
 
995
734
  // streams/inkeep-stream.ts
@@ -1088,7 +827,307 @@ function LangChainStream(callbacks) {
1088
827
  await handleError(e, runId);
1089
828
  }
1090
829
  }
1091
- };
830
+ };
831
+ }
832
+
833
+ // streams/openai-stream.ts
834
+ function parseOpenAIStream() {
835
+ const extract = chunkToText();
836
+ return (data) => extract(JSON.parse(data));
837
+ }
838
+ async function* streamable4(stream) {
839
+ const extract = chunkToText();
840
+ for await (let chunk of stream) {
841
+ if ("promptFilterResults" in chunk) {
842
+ chunk = {
843
+ id: chunk.id,
844
+ created: chunk.created.getDate(),
845
+ object: chunk.object,
846
+ // not exposed by Azure API
847
+ model: chunk.model,
848
+ // not exposed by Azure API
849
+ choices: chunk.choices.map((choice) => {
850
+ var _a, _b, _c, _d, _e, _f, _g;
851
+ return {
852
+ delta: {
853
+ content: (_a = choice.delta) == null ? void 0 : _a.content,
854
+ function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
855
+ role: (_c = choice.delta) == null ? void 0 : _c.role,
856
+ tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
857
+ index,
858
+ id: toolCall.id,
859
+ function: toolCall.function,
860
+ type: toolCall.type
861
+ })) : void 0
862
+ },
863
+ finish_reason: choice.finishReason,
864
+ index: choice.index
865
+ };
866
+ })
867
+ };
868
+ }
869
+ const text = extract(chunk);
870
+ if (text)
871
+ yield text;
872
+ }
873
+ }
874
+ function chunkToText() {
875
+ const trimStartOfStream = trimStartOfStreamHelper();
876
+ let isFunctionStreamingIn;
877
+ return (json) => {
878
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
879
+ if (isChatCompletionChunk(json)) {
880
+ const delta = (_a = json.choices[0]) == null ? void 0 : _a.delta;
881
+ if ((_b = delta.function_call) == null ? void 0 : _b.name) {
882
+ isFunctionStreamingIn = true;
883
+ return `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`;
884
+ } else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
885
+ isFunctionStreamingIn = true;
886
+ const toolCall = delta.tool_calls[0];
887
+ if (toolCall.index === 0) {
888
+ return `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`;
889
+ } else {
890
+ return `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`;
891
+ }
892
+ } else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
893
+ return cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments);
894
+ } else if ((_k = (_j = delta.tool_calls) == null ? void 0 : _j[0].function) == null ? void 0 : _k.arguments) {
895
+ return cleanupArguments((_n = (_m = (_l = delta.tool_calls) == null ? void 0 : _l[0]) == null ? void 0 : _m.function) == null ? void 0 : _n.arguments);
896
+ } else if (isFunctionStreamingIn && (((_o = json.choices[0]) == null ? void 0 : _o.finish_reason) === "function_call" || ((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "stop")) {
897
+ isFunctionStreamingIn = false;
898
+ return '"}}';
899
+ } else if (isFunctionStreamingIn && ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "tool_calls") {
900
+ isFunctionStreamingIn = false;
901
+ return '"}}]}';
902
+ }
903
+ }
904
+ const text = trimStartOfStream(
905
+ isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
906
+ );
907
+ return text;
908
+ };
909
+ function cleanupArguments(argumentChunk) {
910
+ let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
911
+ return `${escapedPartialJson}`;
912
+ }
913
+ }
914
+ var __internal__OpenAIFnMessagesSymbol = Symbol(
915
+ "internal_openai_fn_messages"
916
+ );
917
+ function isChatCompletionChunk(data) {
918
+ return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
919
+ }
920
+ function isCompletion(data) {
921
+ return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
922
+ }
923
+ function OpenAIStream(res, callbacks) {
924
+ const cb = callbacks;
925
+ let stream;
926
+ if (Symbol.asyncIterator in res) {
927
+ stream = readableFromAsyncIterable(streamable4(res)).pipeThrough(
928
+ createCallbacksTransformer(
929
+ (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
930
+ ...cb,
931
+ onFinal: void 0
932
+ } : {
933
+ ...cb
934
+ }
935
+ )
936
+ );
937
+ } else {
938
+ stream = AIStream(
939
+ res,
940
+ parseOpenAIStream(),
941
+ (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
942
+ ...cb,
943
+ onFinal: void 0
944
+ } : {
945
+ ...cb
946
+ }
947
+ );
948
+ }
949
+ if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
950
+ const functionCallTransformer = createFunctionCallTransformer(cb);
951
+ return stream.pipeThrough(functionCallTransformer);
952
+ } else {
953
+ return stream.pipeThrough(
954
+ createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
955
+ );
956
+ }
957
+ }
958
+ function createFunctionCallTransformer(callbacks) {
959
+ const textEncoder = new TextEncoder();
960
+ let isFirstChunk = true;
961
+ let aggregatedResponse = "";
962
+ let aggregatedFinalCompletionResponse = "";
963
+ let isFunctionStreamingIn = false;
964
+ let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
965
+ const isComplexMode = callbacks == null ? void 0 : callbacks.experimental_streamData;
966
+ const decode = createChunkDecoder();
967
+ return new TransformStream({
968
+ async transform(chunk, controller) {
969
+ const message = decode(chunk);
970
+ aggregatedFinalCompletionResponse += message;
971
+ const shouldHandleAsFunction = isFirstChunk && (message.startsWith('{"function_call":') || message.startsWith('{"tool_calls":'));
972
+ if (shouldHandleAsFunction) {
973
+ isFunctionStreamingIn = true;
974
+ aggregatedResponse += message;
975
+ isFirstChunk = false;
976
+ return;
977
+ }
978
+ if (!isFunctionStreamingIn) {
979
+ controller.enqueue(
980
+ isComplexMode ? textEncoder.encode(formatStreamPart("text", message)) : chunk
981
+ );
982
+ return;
983
+ } else {
984
+ aggregatedResponse += message;
985
+ }
986
+ },
987
+ async flush(controller) {
988
+ try {
989
+ if (!isFirstChunk && isFunctionStreamingIn && (callbacks.experimental_onFunctionCall || callbacks.experimental_onToolCall)) {
990
+ isFunctionStreamingIn = false;
991
+ const payload = JSON.parse(aggregatedResponse);
992
+ let newFunctionCallMessages = [
993
+ ...functionCallMessages
994
+ ];
995
+ let functionResponse = void 0;
996
+ if (callbacks.experimental_onFunctionCall) {
997
+ if (payload.function_call === void 0) {
998
+ console.warn(
999
+ "experimental_onFunctionCall should not be defined when using tools"
1000
+ );
1001
+ }
1002
+ const argumentsPayload = JSON.parse(
1003
+ payload.function_call.arguments
1004
+ );
1005
+ functionResponse = await callbacks.experimental_onFunctionCall(
1006
+ {
1007
+ name: payload.function_call.name,
1008
+ arguments: argumentsPayload
1009
+ },
1010
+ (result) => {
1011
+ newFunctionCallMessages = [
1012
+ ...functionCallMessages,
1013
+ {
1014
+ role: "assistant",
1015
+ content: "",
1016
+ function_call: payload.function_call
1017
+ },
1018
+ {
1019
+ role: "function",
1020
+ name: payload.function_call.name,
1021
+ content: JSON.stringify(result)
1022
+ }
1023
+ ];
1024
+ return newFunctionCallMessages;
1025
+ }
1026
+ );
1027
+ }
1028
+ if (callbacks.experimental_onToolCall) {
1029
+ const toolCalls = {
1030
+ tools: []
1031
+ };
1032
+ for (const tool of payload.tool_calls) {
1033
+ toolCalls.tools.push({
1034
+ id: tool.id,
1035
+ type: "function",
1036
+ func: {
1037
+ name: tool.function.name,
1038
+ arguments: tool.function.arguments
1039
+ }
1040
+ });
1041
+ }
1042
+ let responseIndex = 0;
1043
+ try {
1044
+ functionResponse = await callbacks.experimental_onToolCall(
1045
+ toolCalls,
1046
+ (result) => {
1047
+ if (result) {
1048
+ const { tool_call_id, function_name, tool_call_result } = result;
1049
+ newFunctionCallMessages = [
1050
+ ...newFunctionCallMessages,
1051
+ // Only append the assistant message if it's the first response
1052
+ ...responseIndex === 0 ? [
1053
+ {
1054
+ role: "assistant",
1055
+ content: "",
1056
+ tool_calls: payload.tool_calls.map(
1057
+ (tc) => ({
1058
+ id: tc.id,
1059
+ type: "function",
1060
+ function: {
1061
+ name: tc.function.name,
1062
+ // we send the arguments an object to the user, but as the API expects a string, we need to stringify it
1063
+ arguments: JSON.stringify(
1064
+ tc.function.arguments
1065
+ )
1066
+ }
1067
+ })
1068
+ )
1069
+ }
1070
+ ] : [],
1071
+ // Append the function call result message
1072
+ {
1073
+ role: "tool",
1074
+ tool_call_id,
1075
+ name: function_name,
1076
+ content: JSON.stringify(tool_call_result)
1077
+ }
1078
+ ];
1079
+ responseIndex++;
1080
+ }
1081
+ return newFunctionCallMessages;
1082
+ }
1083
+ );
1084
+ } catch (e) {
1085
+ console.error("Error calling experimental_onToolCall:", e);
1086
+ }
1087
+ }
1088
+ if (!functionResponse) {
1089
+ controller.enqueue(
1090
+ textEncoder.encode(
1091
+ isComplexMode ? formatStreamPart(
1092
+ payload.function_call ? "function_call" : "tool_calls",
1093
+ // parse to prevent double-encoding:
1094
+ JSON.parse(aggregatedResponse)
1095
+ ) : aggregatedResponse
1096
+ )
1097
+ );
1098
+ return;
1099
+ } else if (typeof functionResponse === "string") {
1100
+ controller.enqueue(
1101
+ isComplexMode ? textEncoder.encode(formatStreamPart("text", functionResponse)) : textEncoder.encode(functionResponse)
1102
+ );
1103
+ aggregatedFinalCompletionResponse = functionResponse;
1104
+ return;
1105
+ }
1106
+ const filteredCallbacks = {
1107
+ ...callbacks,
1108
+ onStart: void 0
1109
+ };
1110
+ callbacks.onFinal = void 0;
1111
+ const openAIStream = OpenAIStream(functionResponse, {
1112
+ ...filteredCallbacks,
1113
+ [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
1114
+ });
1115
+ const reader = openAIStream.getReader();
1116
+ while (true) {
1117
+ const { done, value } = await reader.read();
1118
+ if (done) {
1119
+ break;
1120
+ }
1121
+ controller.enqueue(value);
1122
+ }
1123
+ }
1124
+ } finally {
1125
+ if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
1126
+ await callbacks.onFinal(aggregatedFinalCompletionResponse);
1127
+ }
1128
+ }
1129
+ }
1130
+ });
1092
1131
  }
1093
1132
 
1094
1133
  // streams/replicate-stream.ts
@@ -1113,79 +1152,6 @@ async function ReplicateStream(res, cb, options) {
1113
1152
  );
1114
1153
  }
1115
1154
 
1116
- // streams/assistant-response.ts
1117
- function experimental_AssistantResponse({ threadId, messageId }, process2) {
1118
- const stream = new ReadableStream({
1119
- async start(controller) {
1120
- var _a;
1121
- const textEncoder = new TextEncoder();
1122
- const sendMessage = (message) => {
1123
- controller.enqueue(
1124
- textEncoder.encode(formatStreamPart("assistant_message", message))
1125
- );
1126
- };
1127
- const sendDataMessage = (message) => {
1128
- controller.enqueue(
1129
- textEncoder.encode(formatStreamPart("data_message", message))
1130
- );
1131
- };
1132
- const sendError = (errorMessage) => {
1133
- controller.enqueue(
1134
- textEncoder.encode(formatStreamPart("error", errorMessage))
1135
- );
1136
- };
1137
- controller.enqueue(
1138
- textEncoder.encode(
1139
- formatStreamPart("assistant_control_data", {
1140
- threadId,
1141
- messageId
1142
- })
1143
- )
1144
- );
1145
- try {
1146
- await process2({
1147
- threadId,
1148
- messageId,
1149
- sendMessage,
1150
- sendDataMessage
1151
- });
1152
- } catch (error) {
1153
- sendError((_a = error.message) != null ? _a : `${error}`);
1154
- } finally {
1155
- controller.close();
1156
- }
1157
- },
1158
- pull(controller) {
1159
- },
1160
- cancel() {
1161
- }
1162
- });
1163
- return new Response(stream, {
1164
- status: 200,
1165
- headers: {
1166
- "Content-Type": "text/plain; charset=utf-8"
1167
- }
1168
- });
1169
- }
1170
-
1171
- // streams/google-generative-ai-stream.ts
1172
- async function* streamable4(response) {
1173
- var _a, _b, _c;
1174
- for await (const chunk of response.stream) {
1175
- const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
1176
- if (parts === void 0) {
1177
- continue;
1178
- }
1179
- const firstPart = parts[0];
1180
- if (typeof firstPart.text === "string") {
1181
- yield firstPart.text;
1182
- }
1183
- }
1184
- }
1185
- function GoogleGenerativeAIStream(response, cb) {
1186
- return readableFromAsyncIterable(streamable4(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
1187
- }
1188
-
1189
1155
  // shared/read-data-stream.ts
1190
1156
  var NEWLINE = "\n".charCodeAt(0);
1191
1157
  function concatChunks(chunks, totalLength) {
@@ -1414,6 +1380,43 @@ var experimental_StreamingReactResponse = class {
1414
1380
  return next;
1415
1381
  }
1416
1382
  };
1383
+
1384
+ // streams/streaming-text-response.ts
1385
+ var StreamingTextResponse = class extends Response {
1386
+ constructor(res, init, data) {
1387
+ let processedStream = res;
1388
+ if (data) {
1389
+ processedStream = res.pipeThrough(data.stream);
1390
+ }
1391
+ super(processedStream, {
1392
+ ...init,
1393
+ status: 200,
1394
+ headers: {
1395
+ "Content-Type": "text/plain; charset=utf-8",
1396
+ [COMPLEX_HEADER]: data ? "true" : "false",
1397
+ ...init == null ? void 0 : init.headers
1398
+ }
1399
+ });
1400
+ }
1401
+ };
1402
+ function streamToResponse(res, response, init) {
1403
+ response.writeHead((init == null ? void 0 : init.status) || 200, {
1404
+ "Content-Type": "text/plain; charset=utf-8",
1405
+ ...init == null ? void 0 : init.headers
1406
+ });
1407
+ const reader = res.getReader();
1408
+ function read() {
1409
+ reader.read().then(({ done, value }) => {
1410
+ if (done) {
1411
+ response.end();
1412
+ return;
1413
+ }
1414
+ response.write(value);
1415
+ read();
1416
+ });
1417
+ }
1418
+ read();
1419
+ }
1417
1420
  // Annotate the CommonJS export names for ESM import in node:
1418
1421
  0 && (module.exports = {
1419
1422
  AIStream,