ai 2.2.35 → 2.2.37

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,122 +1,5 @@
1
- // streams/ai-stream.ts
2
- import {
3
- createParser
4
- } from "eventsource-parser";
5
- function createEventStreamTransformer(customParser) {
6
- const textDecoder = new TextDecoder();
7
- let eventSourceParser;
8
- return new TransformStream({
9
- async start(controller) {
10
- eventSourceParser = createParser(
11
- (event) => {
12
- if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
13
- // @see https://replicate.com/docs/streaming
14
- event.event === "done") {
15
- controller.terminate();
16
- return;
17
- }
18
- if ("data" in event) {
19
- const parsedMessage = customParser ? customParser(event.data, {
20
- event: event.event
21
- }) : event.data;
22
- if (parsedMessage)
23
- controller.enqueue(parsedMessage);
24
- }
25
- }
26
- );
27
- },
28
- transform(chunk) {
29
- eventSourceParser.feed(textDecoder.decode(chunk));
30
- }
31
- });
32
- }
33
- function createCallbacksTransformer(cb) {
34
- const textEncoder = new TextEncoder();
35
- let aggregatedResponse = "";
36
- const callbacks = cb || {};
37
- return new TransformStream({
38
- async start() {
39
- if (callbacks.onStart)
40
- await callbacks.onStart();
41
- },
42
- async transform(message, controller) {
43
- controller.enqueue(textEncoder.encode(message));
44
- aggregatedResponse += message;
45
- if (callbacks.onToken)
46
- await callbacks.onToken(message);
47
- },
48
- async flush() {
49
- const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
50
- if (callbacks.onCompletion) {
51
- await callbacks.onCompletion(aggregatedResponse);
52
- }
53
- if (callbacks.onFinal && !isOpenAICallbacks) {
54
- await callbacks.onFinal(aggregatedResponse);
55
- }
56
- }
57
- });
58
- }
59
- function isOfTypeOpenAIStreamCallbacks(callbacks) {
60
- return "experimental_onFunctionCall" in callbacks;
61
- }
62
- function trimStartOfStreamHelper() {
63
- let isStreamStart = true;
64
- return (text) => {
65
- if (isStreamStart) {
66
- text = text.trimStart();
67
- if (text)
68
- isStreamStart = false;
69
- }
70
- return text;
71
- };
72
- }
73
- function AIStream(response, customParser, callbacks) {
74
- if (!response.ok) {
75
- if (response.body) {
76
- const reader = response.body.getReader();
77
- return new ReadableStream({
78
- async start(controller) {
79
- const { done, value } = await reader.read();
80
- if (!done) {
81
- const errorText = new TextDecoder().decode(value);
82
- controller.error(new Error(`Response error: ${errorText}`));
83
- }
84
- }
85
- });
86
- } else {
87
- return new ReadableStream({
88
- start(controller) {
89
- controller.error(new Error("Response error: No response body"));
90
- }
91
- });
92
- }
93
- }
94
- const responseBodyStream = response.body || createEmptyReadableStream();
95
- return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
96
- }
97
- function createEmptyReadableStream() {
98
- return new ReadableStream({
99
- start(controller) {
100
- controller.close();
101
- }
102
- });
103
- }
104
- function readableFromAsyncIterable(iterable) {
105
- let it = iterable[Symbol.asyncIterator]();
106
- return new ReadableStream({
107
- async pull(controller) {
108
- const { done, value } = await it.next();
109
- if (done)
110
- controller.close();
111
- else
112
- controller.enqueue(value);
113
- },
114
- async cancel(reason) {
115
- var _a;
116
- await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
117
- }
118
- });
119
- }
1
+ // shared/utils.ts
2
+ import { customAlphabet } from "nanoid/non-secure";
120
3
 
121
4
  // shared/stream-parts.ts
122
5
  var textStreamPart = {
@@ -298,538 +181,392 @@ function formatStreamPart(type, value) {
298
181
  `;
299
182
  }
300
183
 
301
- // streams/stream-data.ts
302
- var experimental_StreamData = class {
303
- constructor() {
304
- this.encoder = new TextEncoder();
305
- this.controller = null;
306
- // closing the stream is synchronous, but we want to return a promise
307
- // in case we're doing async work
308
- this.isClosedPromise = null;
309
- this.isClosedPromiseResolver = void 0;
310
- this.isClosed = false;
311
- // array to store appended data
312
- this.data = [];
313
- this.messageAnnotations = [];
314
- this.isClosedPromise = new Promise((resolve) => {
315
- this.isClosedPromiseResolver = resolve;
316
- });
317
- const self = this;
318
- this.stream = new TransformStream({
319
- start: async (controller) => {
320
- self.controller = controller;
321
- },
322
- transform: async (chunk, controller) => {
323
- if (self.data.length > 0) {
324
- const encodedData = self.encoder.encode(
325
- formatStreamPart("data", self.data)
326
- );
327
- self.data = [];
328
- controller.enqueue(encodedData);
329
- }
330
- if (self.messageAnnotations.length) {
331
- const encodedMessageAnnotations = self.encoder.encode(
332
- formatStreamPart("message_annotations", self.messageAnnotations)
333
- );
334
- self.messageAnnotations = [];
335
- controller.enqueue(encodedMessageAnnotations);
336
- }
337
- controller.enqueue(chunk);
338
- },
339
- async flush(controller) {
340
- const warningTimeout = process.env.NODE_ENV === "development" ? setTimeout(() => {
341
- console.warn(
342
- "The data stream is hanging. Did you forget to close it with `data.close()`?"
343
- );
344
- }, 3e3) : null;
345
- await self.isClosedPromise;
346
- if (warningTimeout !== null) {
347
- clearTimeout(warningTimeout);
348
- }
349
- if (self.data.length) {
350
- const encodedData = self.encoder.encode(
351
- formatStreamPart("data", self.data)
352
- );
353
- controller.enqueue(encodedData);
354
- }
355
- if (self.messageAnnotations.length) {
356
- const encodedData = self.encoder.encode(
357
- formatStreamPart("message_annotations", self.messageAnnotations)
358
- );
359
- controller.enqueue(encodedData);
360
- }
361
- }
362
- });
363
- }
364
- async close() {
365
- var _a;
366
- if (this.isClosed) {
367
- throw new Error("Data Stream has already been closed.");
368
- }
369
- if (!this.controller) {
370
- throw new Error("Stream controller is not initialized.");
371
- }
372
- (_a = this.isClosedPromiseResolver) == null ? void 0 : _a.call(this);
373
- this.isClosed = true;
374
- }
375
- append(value) {
376
- if (this.isClosed) {
377
- throw new Error("Data Stream has already been closed.");
378
- }
379
- this.data.push(value);
380
- }
381
- appendMessageAnnotation(value) {
382
- if (this.isClosed) {
383
- throw new Error("Data Stream has already been closed.");
384
- }
385
- this.messageAnnotations.push(value);
386
- }
387
- };
388
- function createStreamDataTransformer(experimental_streamData) {
389
- if (!experimental_streamData) {
390
- return new TransformStream({
391
- transform: async (chunk, controller) => {
392
- controller.enqueue(chunk);
393
- }
394
- });
395
- }
396
- const encoder = new TextEncoder();
397
- const decoder = new TextDecoder();
398
- return new TransformStream({
399
- transform: async (chunk, controller) => {
400
- const message = decoder.decode(chunk);
401
- controller.enqueue(encoder.encode(formatStreamPart("text", message)));
402
- }
403
- });
404
- }
405
-
406
- // streams/aws-bedrock-stream.ts
407
- async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
408
- var _a, _b;
184
+ // shared/utils.ts
185
+ var nanoid = customAlphabet(
186
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
187
+ 7
188
+ );
189
+ function createChunkDecoder(complex) {
409
190
  const decoder = new TextDecoder();
410
- for await (const chunk of (_a = response.body) != null ? _a : []) {
411
- const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
412
- if (bytes != null) {
413
- const chunkText = decoder.decode(bytes);
414
- const chunkJSON = JSON.parse(chunkText);
415
- const delta = extractTextDeltaFromChunk(chunkJSON);
416
- if (delta != null) {
417
- yield delta;
418
- }
419
- }
191
+ if (!complex) {
192
+ return function(chunk) {
193
+ if (!chunk)
194
+ return "";
195
+ return decoder.decode(chunk, { stream: true });
196
+ };
420
197
  }
421
- }
422
- function AWSBedrockAnthropicStream(response, callbacks) {
423
- return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
424
- }
425
- function AWSBedrockCohereStream(response, callbacks) {
426
- return AWSBedrockStream(
427
- response,
428
- callbacks,
429
- // As of 2023-11-17, Bedrock does not support streaming for Cohere,
430
- // so we take the full generation:
431
- (chunk) => {
432
- var _a, _b;
433
- return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
434
- }
435
- );
436
- }
437
- function AWSBedrockLlama2Stream(response, callbacks) {
438
- return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
439
- }
440
- function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
441
- return readableFromAsyncIterable(
442
- asDeltaIterable(response, extractTextDeltaFromChunk)
443
- ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
444
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
445
- );
446
- }
447
-
448
- // shared/utils.ts
449
- import { customAlphabet } from "nanoid/non-secure";
450
- var nanoid = customAlphabet(
451
- "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
452
- 7
453
- );
454
- function createChunkDecoder(complex) {
455
- const decoder = new TextDecoder();
456
- if (!complex) {
457
- return function(chunk) {
458
- if (!chunk)
459
- return "";
460
- return decoder.decode(chunk, { stream: true });
461
- };
462
- }
463
- return function(chunk) {
464
- const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
465
- return decoded.map(parseStreamPart).filter(Boolean);
466
- };
198
+ return function(chunk) {
199
+ const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
200
+ return decoded.map(parseStreamPart).filter(Boolean);
201
+ };
467
202
  }
468
203
  var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
469
204
  var COMPLEX_HEADER = "X-Experimental-Stream-Data";
470
205
 
471
- // streams/openai-stream.ts
472
- function parseOpenAIStream() {
473
- const extract = chunkToText();
474
- return (data) => extract(JSON.parse(data));
475
- }
476
- async function* streamable(stream) {
477
- const extract = chunkToText();
478
- for await (let chunk of stream) {
479
- if ("promptFilterResults" in chunk) {
480
- chunk = {
481
- id: chunk.id,
482
- created: chunk.created.getDate(),
483
- object: chunk.object,
484
- // not exposed by Azure API
485
- model: chunk.model,
486
- // not exposed by Azure API
487
- choices: chunk.choices.map((choice) => {
488
- var _a, _b, _c, _d, _e, _f, _g;
489
- return {
490
- delta: {
491
- content: (_a = choice.delta) == null ? void 0 : _a.content,
492
- function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
493
- role: (_c = choice.delta) == null ? void 0 : _c.role,
494
- tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
495
- index,
496
- id: toolCall.id,
497
- function: toolCall.function,
498
- type: toolCall.type
499
- })) : void 0
500
- },
501
- finish_reason: choice.finishReason,
502
- index: choice.index
503
- };
504
- })
505
- };
506
- }
507
- const text = extract(chunk);
508
- if (text)
509
- yield text;
510
- }
511
- }
512
- function chunkToText() {
513
- const trimStartOfStream = trimStartOfStreamHelper();
514
- let isFunctionStreamingIn;
515
- return (json) => {
516
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
517
- if (isChatCompletionChunk(json)) {
518
- const delta = (_a = json.choices[0]) == null ? void 0 : _a.delta;
519
- if ((_b = delta.function_call) == null ? void 0 : _b.name) {
520
- isFunctionStreamingIn = true;
521
- return `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`;
522
- } else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
523
- isFunctionStreamingIn = true;
524
- const toolCall = delta.tool_calls[0];
525
- if (toolCall.index === 0) {
526
- return `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`;
527
- } else {
528
- return `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`;
206
+ // streams/ai-stream.ts
207
+ import {
208
+ createParser
209
+ } from "eventsource-parser";
210
+ function createEventStreamTransformer(customParser) {
211
+ const textDecoder = new TextDecoder();
212
+ let eventSourceParser;
213
+ return new TransformStream({
214
+ async start(controller) {
215
+ eventSourceParser = createParser(
216
+ (event) => {
217
+ if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
218
+ // @see https://replicate.com/docs/streaming
219
+ event.event === "done") {
220
+ controller.terminate();
221
+ return;
222
+ }
223
+ if ("data" in event) {
224
+ const parsedMessage = customParser ? customParser(event.data, {
225
+ event: event.event
226
+ }) : event.data;
227
+ if (parsedMessage)
228
+ controller.enqueue(parsedMessage);
229
+ }
529
230
  }
530
- } else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
531
- return cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments);
532
- } else if ((_k = (_j = delta.tool_calls) == null ? void 0 : _j[0].function) == null ? void 0 : _k.arguments) {
533
- return cleanupArguments((_n = (_m = (_l = delta.tool_calls) == null ? void 0 : _l[0]) == null ? void 0 : _m.function) == null ? void 0 : _n.arguments);
534
- } else if (isFunctionStreamingIn && (((_o = json.choices[0]) == null ? void 0 : _o.finish_reason) === "function_call" || ((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "stop")) {
535
- isFunctionStreamingIn = false;
536
- return '"}}';
537
- } else if (isFunctionStreamingIn && ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "tool_calls") {
538
- isFunctionStreamingIn = false;
539
- return '"}}]}';
540
- }
231
+ );
232
+ },
233
+ transform(chunk) {
234
+ eventSourceParser.feed(textDecoder.decode(chunk));
541
235
  }
542
- const text = trimStartOfStream(
543
- isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
544
- );
545
- return text;
546
- };
547
- function cleanupArguments(argumentChunk) {
548
- let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
549
- return `${escapedPartialJson}`;
550
- }
551
- }
552
- var __internal__OpenAIFnMessagesSymbol = Symbol(
553
- "internal_openai_fn_messages"
554
- );
555
- function isChatCompletionChunk(data) {
556
- return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
557
- }
558
- function isCompletion(data) {
559
- return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
560
- }
561
- function OpenAIStream(res, callbacks) {
562
- const cb = callbacks;
563
- let stream;
564
- if (Symbol.asyncIterator in res) {
565
- stream = readableFromAsyncIterable(streamable(res)).pipeThrough(
566
- createCallbacksTransformer(
567
- (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
568
- ...cb,
569
- onFinal: void 0
570
- } : {
571
- ...cb
572
- }
573
- )
574
- );
575
- } else {
576
- stream = AIStream(
577
- res,
578
- parseOpenAIStream(),
579
- (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
580
- ...cb,
581
- onFinal: void 0
582
- } : {
583
- ...cb
584
- }
585
- );
586
- }
587
- if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
588
- const functionCallTransformer = createFunctionCallTransformer(cb);
589
- return stream.pipeThrough(functionCallTransformer);
590
- } else {
591
- return stream.pipeThrough(
592
- createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
593
- );
594
- }
236
+ });
595
237
  }
596
- function createFunctionCallTransformer(callbacks) {
238
+ function createCallbacksTransformer(cb) {
597
239
  const textEncoder = new TextEncoder();
598
- let isFirstChunk = true;
599
240
  let aggregatedResponse = "";
600
- let aggregatedFinalCompletionResponse = "";
601
- let isFunctionStreamingIn = false;
602
- let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
603
- const isComplexMode = callbacks == null ? void 0 : callbacks.experimental_streamData;
604
- const decode = createChunkDecoder();
241
+ const callbacks = cb || {};
605
242
  return new TransformStream({
606
- async transform(chunk, controller) {
607
- const message = decode(chunk);
608
- aggregatedFinalCompletionResponse += message;
609
- const shouldHandleAsFunction = isFirstChunk && (message.startsWith('{"function_call":') || message.startsWith('{"tool_calls":'));
610
- if (shouldHandleAsFunction) {
611
- isFunctionStreamingIn = true;
612
- aggregatedResponse += message;
613
- isFirstChunk = false;
614
- return;
243
+ async start() {
244
+ if (callbacks.onStart)
245
+ await callbacks.onStart();
246
+ },
247
+ async transform(message, controller) {
248
+ controller.enqueue(textEncoder.encode(message));
249
+ aggregatedResponse += message;
250
+ if (callbacks.onToken)
251
+ await callbacks.onToken(message);
252
+ },
253
+ async flush() {
254
+ const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
255
+ if (callbacks.onCompletion) {
256
+ await callbacks.onCompletion(aggregatedResponse);
615
257
  }
616
- if (!isFunctionStreamingIn) {
617
- controller.enqueue(
618
- isComplexMode ? textEncoder.encode(formatStreamPart("text", message)) : chunk
619
- );
620
- return;
621
- } else {
622
- aggregatedResponse += message;
258
+ if (callbacks.onFinal && !isOpenAICallbacks) {
259
+ await callbacks.onFinal(aggregatedResponse);
623
260
  }
624
- },
625
- async flush(controller) {
626
- try {
627
- if (!isFirstChunk && isFunctionStreamingIn && (callbacks.experimental_onFunctionCall || callbacks.experimental_onToolCall)) {
628
- isFunctionStreamingIn = false;
629
- const payload = JSON.parse(aggregatedResponse);
630
- let newFunctionCallMessages = [
631
- ...functionCallMessages
632
- ];
633
- let functionResponse = void 0;
634
- if (callbacks.experimental_onFunctionCall) {
635
- if (payload.function_call === void 0) {
636
- console.warn(
637
- "experimental_onFunctionCall should not be defined when using tools"
638
- );
639
- }
640
- const argumentsPayload = JSON.parse(
641
- payload.function_call.arguments
642
- );
643
- functionResponse = await callbacks.experimental_onFunctionCall(
644
- {
645
- name: payload.function_call.name,
646
- arguments: argumentsPayload
647
- },
648
- (result) => {
649
- newFunctionCallMessages = [
650
- ...functionCallMessages,
651
- {
652
- role: "assistant",
653
- content: "",
654
- function_call: payload.function_call
655
- },
656
- {
657
- role: "function",
658
- name: payload.function_call.name,
659
- content: JSON.stringify(result)
660
- }
661
- ];
662
- return newFunctionCallMessages;
663
- }
664
- );
665
- }
666
- if (callbacks.experimental_onToolCall) {
667
- const toolCalls = {
668
- tools: []
669
- };
670
- for (const tool of payload.tool_calls) {
671
- toolCalls.tools.push({
672
- id: tool.id,
673
- type: "function",
674
- func: {
675
- name: tool.function.name,
676
- arguments: tool.function.arguments
677
- }
678
- });
679
- }
680
- let responseIndex = 0;
681
- try {
682
- functionResponse = await callbacks.experimental_onToolCall(
683
- toolCalls,
684
- (result) => {
685
- if (result) {
686
- const { tool_call_id, function_name, tool_call_result } = result;
687
- newFunctionCallMessages = [
688
- ...newFunctionCallMessages,
689
- // Only append the assistant message if it's the first response
690
- ...responseIndex === 0 ? [
691
- {
692
- role: "assistant",
693
- content: "",
694
- tool_calls: payload.tool_calls.map(
695
- (tc) => ({
696
- id: tc.id,
697
- type: "function",
698
- function: {
699
- name: tc.function.name,
700
- // we send the arguments an object to the user, but as the API expects a string, we need to stringify it
701
- arguments: JSON.stringify(
702
- tc.function.arguments
703
- )
704
- }
705
- })
706
- )
707
- }
708
- ] : [],
709
- // Append the function call result message
710
- {
711
- role: "tool",
712
- tool_call_id,
713
- name: function_name,
714
- content: JSON.stringify(tool_call_result)
715
- }
716
- ];
717
- responseIndex++;
718
- }
719
- return newFunctionCallMessages;
720
- }
721
- );
722
- } catch (e) {
723
- console.error("Error calling experimental_onToolCall:", e);
724
- }
725
- }
726
- if (!functionResponse) {
727
- controller.enqueue(
728
- textEncoder.encode(
729
- isComplexMode ? formatStreamPart(
730
- payload.function_call ? "function_call" : "tool_calls",
731
- // parse to prevent double-encoding:
732
- JSON.parse(aggregatedResponse)
733
- ) : aggregatedResponse
734
- )
735
- );
736
- return;
737
- } else if (typeof functionResponse === "string") {
738
- controller.enqueue(
739
- isComplexMode ? textEncoder.encode(formatStreamPart("text", functionResponse)) : textEncoder.encode(functionResponse)
740
- );
741
- return;
742
- }
743
- const filteredCallbacks = {
744
- ...callbacks,
745
- onStart: void 0
746
- };
747
- callbacks.onFinal = void 0;
748
- const openAIStream = OpenAIStream(functionResponse, {
749
- ...filteredCallbacks,
750
- [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
751
- });
752
- const reader = openAIStream.getReader();
753
- while (true) {
754
- const { done, value } = await reader.read();
755
- if (done) {
756
- break;
757
- }
758
- controller.enqueue(value);
261
+ }
262
+ });
263
+ }
264
+ function isOfTypeOpenAIStreamCallbacks(callbacks) {
265
+ return "experimental_onFunctionCall" in callbacks;
266
+ }
267
+ function trimStartOfStreamHelper() {
268
+ let isStreamStart = true;
269
+ return (text) => {
270
+ if (isStreamStart) {
271
+ text = text.trimStart();
272
+ if (text)
273
+ isStreamStart = false;
274
+ }
275
+ return text;
276
+ };
277
+ }
278
+ function AIStream(response, customParser, callbacks) {
279
+ if (!response.ok) {
280
+ if (response.body) {
281
+ const reader = response.body.getReader();
282
+ return new ReadableStream({
283
+ async start(controller) {
284
+ const { done, value } = await reader.read();
285
+ if (!done) {
286
+ const errorText = new TextDecoder().decode(value);
287
+ controller.error(new Error(`Response error: ${errorText}`));
759
288
  }
760
289
  }
761
- } finally {
762
- if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
763
- await callbacks.onFinal(aggregatedFinalCompletionResponse);
290
+ });
291
+ } else {
292
+ return new ReadableStream({
293
+ start(controller) {
294
+ controller.error(new Error("Response error: No response body"));
764
295
  }
765
- }
296
+ });
297
+ }
298
+ }
299
+ const responseBodyStream = response.body || createEmptyReadableStream();
300
+ return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
301
+ }
302
+ function createEmptyReadableStream() {
303
+ return new ReadableStream({
304
+ start(controller) {
305
+ controller.close();
306
+ }
307
+ });
308
+ }
309
+ function readableFromAsyncIterable(iterable) {
310
+ let it = iterable[Symbol.asyncIterator]();
311
+ return new ReadableStream({
312
+ async pull(controller) {
313
+ const { done, value } = await it.next();
314
+ if (done)
315
+ controller.close();
316
+ else
317
+ controller.enqueue(value);
318
+ },
319
+ async cancel(reason) {
320
+ var _a;
321
+ await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
766
322
  }
767
323
  });
768
324
  }
769
325
 
770
- // streams/streaming-text-response.ts
771
- var StreamingTextResponse = class extends Response {
772
- constructor(res, init, data) {
773
- let processedStream = res;
774
- if (data) {
775
- processedStream = res.pipeThrough(data.stream);
776
- }
777
- super(processedStream, {
778
- ...init,
779
- status: 200,
780
- headers: {
781
- "Content-Type": "text/plain; charset=utf-8",
782
- [COMPLEX_HEADER]: data ? "true" : "false",
783
- ...init == null ? void 0 : init.headers
326
+ // streams/stream-data.ts
327
+ var experimental_StreamData = class {
328
+ constructor() {
329
+ this.encoder = new TextEncoder();
330
+ this.controller = null;
331
+ // closing the stream is synchronous, but we want to return a promise
332
+ // in case we're doing async work
333
+ this.isClosedPromise = null;
334
+ this.isClosedPromiseResolver = void 0;
335
+ this.isClosed = false;
336
+ // array to store appended data
337
+ this.data = [];
338
+ this.messageAnnotations = [];
339
+ this.isClosedPromise = new Promise((resolve) => {
340
+ this.isClosedPromiseResolver = resolve;
341
+ });
342
+ const self = this;
343
+ this.stream = new TransformStream({
344
+ start: async (controller) => {
345
+ self.controller = controller;
346
+ },
347
+ transform: async (chunk, controller) => {
348
+ if (self.data.length > 0) {
349
+ const encodedData = self.encoder.encode(
350
+ formatStreamPart("data", self.data)
351
+ );
352
+ self.data = [];
353
+ controller.enqueue(encodedData);
354
+ }
355
+ if (self.messageAnnotations.length) {
356
+ const encodedMessageAnnotations = self.encoder.encode(
357
+ formatStreamPart("message_annotations", self.messageAnnotations)
358
+ );
359
+ self.messageAnnotations = [];
360
+ controller.enqueue(encodedMessageAnnotations);
361
+ }
362
+ controller.enqueue(chunk);
363
+ },
364
+ async flush(controller) {
365
+ const warningTimeout = process.env.NODE_ENV === "development" ? setTimeout(() => {
366
+ console.warn(
367
+ "The data stream is hanging. Did you forget to close it with `data.close()`?"
368
+ );
369
+ }, 3e3) : null;
370
+ await self.isClosedPromise;
371
+ if (warningTimeout !== null) {
372
+ clearTimeout(warningTimeout);
373
+ }
374
+ if (self.data.length) {
375
+ const encodedData = self.encoder.encode(
376
+ formatStreamPart("data", self.data)
377
+ );
378
+ controller.enqueue(encodedData);
379
+ }
380
+ if (self.messageAnnotations.length) {
381
+ const encodedData = self.encoder.encode(
382
+ formatStreamPart("message_annotations", self.messageAnnotations)
383
+ );
384
+ controller.enqueue(encodedData);
385
+ }
784
386
  }
785
387
  });
786
388
  }
389
+ async close() {
390
+ var _a;
391
+ if (this.isClosed) {
392
+ throw new Error("Data Stream has already been closed.");
393
+ }
394
+ if (!this.controller) {
395
+ throw new Error("Stream controller is not initialized.");
396
+ }
397
+ (_a = this.isClosedPromiseResolver) == null ? void 0 : _a.call(this);
398
+ this.isClosed = true;
399
+ }
400
+ append(value) {
401
+ if (this.isClosed) {
402
+ throw new Error("Data Stream has already been closed.");
403
+ }
404
+ this.data.push(value);
405
+ }
406
+ appendMessageAnnotation(value) {
407
+ if (this.isClosed) {
408
+ throw new Error("Data Stream has already been closed.");
409
+ }
410
+ this.messageAnnotations.push(value);
411
+ }
787
412
  };
788
- function streamToResponse(res, response, init) {
789
- response.writeHead((init == null ? void 0 : init.status) || 200, {
790
- "Content-Type": "text/plain; charset=utf-8",
791
- ...init == null ? void 0 : init.headers
792
- });
793
- const reader = res.getReader();
794
- function read() {
795
- reader.read().then(({ done, value }) => {
796
- if (done) {
797
- response.end();
798
- return;
413
+ function createStreamDataTransformer(experimental_streamData) {
414
+ if (!experimental_streamData) {
415
+ return new TransformStream({
416
+ transform: async (chunk, controller) => {
417
+ controller.enqueue(chunk);
799
418
  }
800
- response.write(value);
801
- read();
802
419
  });
803
420
  }
804
- read();
421
+ const encoder = new TextEncoder();
422
+ const decoder = new TextDecoder();
423
+ return new TransformStream({
424
+ transform: async (chunk, controller) => {
425
+ const message = decoder.decode(chunk);
426
+ controller.enqueue(encoder.encode(formatStreamPart("text", message)));
427
+ }
428
+ });
805
429
  }
806
430
 
807
- // streams/huggingface-stream.ts
808
- function createParser2(res) {
809
- const trimStartOfStream = trimStartOfStreamHelper();
810
- return new ReadableStream({
811
- async pull(controller) {
812
- var _a, _b;
813
- const { value, done } = await res.next();
814
- if (done) {
815
- controller.close();
816
- return;
431
+ // streams/anthropic-stream.ts
432
+ function parseAnthropicStream() {
433
+ let previous = "";
434
+ return (data) => {
435
+ const json = JSON.parse(data);
436
+ if ("error" in json) {
437
+ throw new Error(`${json.error.type}: ${json.error.message}`);
438
+ }
439
+ if (!("completion" in json)) {
440
+ return;
441
+ }
442
+ const text = json.completion;
443
+ if (!previous || text.length > previous.length && text.startsWith(previous)) {
444
+ const delta = text.slice(previous.length);
445
+ previous = text;
446
+ return delta;
447
+ }
448
+ return text;
449
+ };
450
+ }
451
+ async function* streamable(stream) {
452
+ for await (const chunk of stream) {
453
+ if ("completion" in chunk) {
454
+ const text = chunk.completion;
455
+ if (text)
456
+ yield text;
457
+ } else if ("delta" in chunk) {
458
+ const { delta } = chunk;
459
+ if ("text" in delta) {
460
+ const text = delta.text;
461
+ if (text)
462
+ yield text;
817
463
  }
818
- const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
819
- if (!text)
820
- return;
821
- if (value.generated_text != null && value.generated_text.length > 0) {
822
- return;
464
+ }
465
+ }
466
+ }
467
+ function AnthropicStream(res, cb) {
468
+ if (Symbol.asyncIterator in res) {
469
+ return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
470
+ } else {
471
+ return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
472
+ createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
473
+ );
474
+ }
475
+ }
476
+
477
+ // streams/assistant-response.ts
478
+ function experimental_AssistantResponse({ threadId, messageId }, process2) {
479
+ const stream = new ReadableStream({
480
+ async start(controller) {
481
+ var _a;
482
+ const textEncoder = new TextEncoder();
483
+ const sendMessage = (message) => {
484
+ controller.enqueue(
485
+ textEncoder.encode(formatStreamPart("assistant_message", message))
486
+ );
487
+ };
488
+ const sendDataMessage = (message) => {
489
+ controller.enqueue(
490
+ textEncoder.encode(formatStreamPart("data_message", message))
491
+ );
492
+ };
493
+ const sendError = (errorMessage) => {
494
+ controller.enqueue(
495
+ textEncoder.encode(formatStreamPart("error", errorMessage))
496
+ );
497
+ };
498
+ controller.enqueue(
499
+ textEncoder.encode(
500
+ formatStreamPart("assistant_control_data", {
501
+ threadId,
502
+ messageId
503
+ })
504
+ )
505
+ );
506
+ try {
507
+ await process2({
508
+ threadId,
509
+ messageId,
510
+ sendMessage,
511
+ sendDataMessage
512
+ });
513
+ } catch (error) {
514
+ sendError((_a = error.message) != null ? _a : `${error}`);
515
+ } finally {
516
+ controller.close();
823
517
  }
824
- if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
825
- return;
518
+ },
519
+ pull(controller) {
520
+ },
521
+ cancel() {
522
+ }
523
+ });
524
+ return new Response(stream, {
525
+ status: 200,
526
+ headers: {
527
+ "Content-Type": "text/plain; charset=utf-8"
528
+ }
529
+ });
530
+ }
531
+
532
+ // streams/aws-bedrock-stream.ts
533
+ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
534
+ var _a, _b;
535
+ const decoder = new TextDecoder();
536
+ for await (const chunk of (_a = response.body) != null ? _a : []) {
537
+ const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
538
+ if (bytes != null) {
539
+ const chunkText = decoder.decode(bytes);
540
+ const chunkJSON = JSON.parse(chunkText);
541
+ const delta = extractTextDeltaFromChunk(chunkJSON);
542
+ if (delta != null) {
543
+ yield delta;
826
544
  }
827
- controller.enqueue(text);
828
545
  }
829
- });
546
+ }
830
547
  }
831
- function HuggingFaceStream(res, callbacks) {
832
- return createParser2(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
548
+ function AWSBedrockAnthropicStream(response, callbacks) {
549
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
550
+ }
551
+ function AWSBedrockCohereStream(response, callbacks) {
552
+ return AWSBedrockStream(
553
+ response,
554
+ callbacks,
555
+ // As of 2023-11-17, Bedrock does not support streaming for Cohere,
556
+ // so we take the full generation:
557
+ (chunk) => {
558
+ var _a, _b;
559
+ return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
560
+ }
561
+ );
562
+ }
563
+ function AWSBedrockLlama2Stream(response, callbacks) {
564
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
565
+ }
566
+ function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
567
+ return readableFromAsyncIterable(
568
+ asDeltaIterable(response, extractTextDeltaFromChunk)
569
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
833
570
  createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
834
571
  );
835
572
  }
@@ -862,7 +599,7 @@ async function readAndProcessLines(reader, controller) {
862
599
  }
863
600
  controller.close();
864
601
  }
865
- function createParser3(res) {
602
+ function createParser2(res) {
866
603
  var _a;
867
604
  const reader = (_a = res.body) == null ? void 0 : _a.getReader();
868
605
  return new ReadableStream({
@@ -890,56 +627,58 @@ function CohereStream(reader, callbacks) {
890
627
  createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
891
628
  );
892
629
  } else {
893
- return createParser3(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
630
+ return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
894
631
  createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
895
632
  );
896
633
  }
897
634
  }
898
635
 
899
- // streams/anthropic-stream.ts
900
- function parseAnthropicStream() {
901
- let previous = "";
902
- return (data) => {
903
- const json = JSON.parse(data);
904
- if ("error" in json) {
905
- throw new Error(`${json.error.type}: ${json.error.message}`);
906
- }
907
- if (!("completion" in json)) {
908
- return;
636
+ // streams/google-generative-ai-stream.ts
637
+ async function* streamable3(response) {
638
+ var _a, _b, _c;
639
+ for await (const chunk of response.stream) {
640
+ const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
641
+ if (parts === void 0) {
642
+ continue;
909
643
  }
910
- const text = json.completion;
911
- if (!previous || text.length > previous.length && text.startsWith(previous)) {
912
- const delta = text.slice(previous.length);
913
- previous = text;
914
- return delta;
644
+ const firstPart = parts[0];
645
+ if (typeof firstPart.text === "string") {
646
+ yield firstPart.text;
915
647
  }
916
- return text;
917
- };
648
+ }
918
649
  }
919
- async function* streamable3(stream) {
920
- for await (const chunk of stream) {
921
- if ("completion" in chunk) {
922
- const text = chunk.completion;
923
- if (text)
924
- yield text;
925
- } else if ("delta" in chunk) {
926
- const { delta } = chunk;
927
- if ("text" in delta) {
928
- const text = delta.text;
929
- if (text)
930
- yield text;
650
+ function GoogleGenerativeAIStream(response, cb) {
651
+ return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
652
+ }
653
+
654
+ // streams/huggingface-stream.ts
655
+ function createParser3(res) {
656
+ const trimStartOfStream = trimStartOfStreamHelper();
657
+ return new ReadableStream({
658
+ async pull(controller) {
659
+ var _a, _b;
660
+ const { value, done } = await res.next();
661
+ if (done) {
662
+ controller.close();
663
+ return;
664
+ }
665
+ const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
666
+ if (!text)
667
+ return;
668
+ if (value.generated_text != null && value.generated_text.length > 0) {
669
+ return;
931
670
  }
671
+ if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
672
+ return;
673
+ }
674
+ controller.enqueue(text);
932
675
  }
933
- }
676
+ });
934
677
  }
935
- function AnthropicStream(res, cb) {
936
- if (Symbol.asyncIterator in res) {
937
- return readableFromAsyncIterable(streamable3(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
938
- } else {
939
- return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
940
- createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
941
- );
942
- }
678
+ function HuggingFaceStream(res, callbacks) {
679
+ return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
680
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
681
+ );
943
682
  }
944
683
 
945
684
  // streams/inkeep-stream.ts
@@ -1038,7 +777,325 @@ function LangChainStream(callbacks) {
1038
777
  await handleError(e, runId);
1039
778
  }
1040
779
  }
1041
- };
780
+ };
781
+ }
782
+
783
+ // streams/mistral-stream.ts
784
+ async function* streamable4(stream) {
785
+ var _a, _b;
786
+ for await (const chunk of stream) {
787
+ const content = (_b = (_a = chunk.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.content;
788
+ if (content === void 0 || content === "") {
789
+ continue;
790
+ }
791
+ yield content;
792
+ }
793
+ }
794
+ function MistralStream(response, callbacks) {
795
+ const stream = readableFromAsyncIterable(streamable4(response));
796
+ return stream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
797
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
798
+ );
799
+ }
800
+
801
+ // streams/openai-stream.ts
802
+ function parseOpenAIStream() {
803
+ const extract = chunkToText();
804
+ return (data) => extract(JSON.parse(data));
805
+ }
806
+ async function* streamable5(stream) {
807
+ const extract = chunkToText();
808
+ for await (let chunk of stream) {
809
+ if ("promptFilterResults" in chunk) {
810
+ chunk = {
811
+ id: chunk.id,
812
+ created: chunk.created.getDate(),
813
+ object: chunk.object,
814
+ // not exposed by Azure API
815
+ model: chunk.model,
816
+ // not exposed by Azure API
817
+ choices: chunk.choices.map((choice) => {
818
+ var _a, _b, _c, _d, _e, _f, _g;
819
+ return {
820
+ delta: {
821
+ content: (_a = choice.delta) == null ? void 0 : _a.content,
822
+ function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
823
+ role: (_c = choice.delta) == null ? void 0 : _c.role,
824
+ tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
825
+ index,
826
+ id: toolCall.id,
827
+ function: toolCall.function,
828
+ type: toolCall.type
829
+ })) : void 0
830
+ },
831
+ finish_reason: choice.finishReason,
832
+ index: choice.index
833
+ };
834
+ })
835
+ };
836
+ }
837
+ const text = extract(chunk);
838
+ if (text)
839
+ yield text;
840
+ }
841
+ }
842
+ function chunkToText() {
843
+ const trimStartOfStream = trimStartOfStreamHelper();
844
+ let isFunctionStreamingIn;
845
+ return (json) => {
846
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
847
+ if (isChatCompletionChunk(json)) {
848
+ const delta = (_a = json.choices[0]) == null ? void 0 : _a.delta;
849
+ if ((_b = delta.function_call) == null ? void 0 : _b.name) {
850
+ isFunctionStreamingIn = true;
851
+ return `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`;
852
+ } else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
853
+ isFunctionStreamingIn = true;
854
+ const toolCall = delta.tool_calls[0];
855
+ if (toolCall.index === 0) {
856
+ return `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`;
857
+ } else {
858
+ return `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`;
859
+ }
860
+ } else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
861
+ return cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments);
862
+ } else if ((_l = (_k = (_j = delta.tool_calls) == null ? void 0 : _j[0]) == null ? void 0 : _k.function) == null ? void 0 : _l.arguments) {
863
+ return cleanupArguments((_o = (_n = (_m = delta.tool_calls) == null ? void 0 : _m[0]) == null ? void 0 : _n.function) == null ? void 0 : _o.arguments);
864
+ } else if (isFunctionStreamingIn && (((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "function_call" || ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "stop")) {
865
+ isFunctionStreamingIn = false;
866
+ return '"}}';
867
+ } else if (isFunctionStreamingIn && ((_r = json.choices[0]) == null ? void 0 : _r.finish_reason) === "tool_calls") {
868
+ isFunctionStreamingIn = false;
869
+ return '"}}]}';
870
+ }
871
+ }
872
+ const text = trimStartOfStream(
873
+ isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
874
+ );
875
+ return text;
876
+ };
877
+ function cleanupArguments(argumentChunk) {
878
+ let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
879
+ return `${escapedPartialJson}`;
880
+ }
881
+ }
882
+ var __internal__OpenAIFnMessagesSymbol = Symbol(
883
+ "internal_openai_fn_messages"
884
+ );
885
+ function isChatCompletionChunk(data) {
886
+ return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
887
+ }
888
+ function isCompletion(data) {
889
+ return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
890
+ }
891
+ function OpenAIStream(res, callbacks) {
892
+ const cb = callbacks;
893
+ let stream;
894
+ if (Symbol.asyncIterator in res) {
895
+ stream = readableFromAsyncIterable(streamable5(res)).pipeThrough(
896
+ createCallbacksTransformer(
897
+ (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
898
+ ...cb,
899
+ onFinal: void 0
900
+ } : {
901
+ ...cb
902
+ }
903
+ )
904
+ );
905
+ } else {
906
+ stream = AIStream(
907
+ res,
908
+ parseOpenAIStream(),
909
+ (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
910
+ ...cb,
911
+ onFinal: void 0
912
+ } : {
913
+ ...cb
914
+ }
915
+ );
916
+ }
917
+ if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
918
+ const functionCallTransformer = createFunctionCallTransformer(cb);
919
+ return stream.pipeThrough(functionCallTransformer);
920
+ } else {
921
+ return stream.pipeThrough(
922
+ createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
923
+ );
924
+ }
925
+ }
926
+ function createFunctionCallTransformer(callbacks) {
927
+ const textEncoder = new TextEncoder();
928
+ let isFirstChunk = true;
929
+ let aggregatedResponse = "";
930
+ let aggregatedFinalCompletionResponse = "";
931
+ let isFunctionStreamingIn = false;
932
+ let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
933
+ const isComplexMode = callbacks == null ? void 0 : callbacks.experimental_streamData;
934
+ const decode = createChunkDecoder();
935
+ return new TransformStream({
936
+ async transform(chunk, controller) {
937
+ const message = decode(chunk);
938
+ aggregatedFinalCompletionResponse += message;
939
+ const shouldHandleAsFunction = isFirstChunk && (message.startsWith('{"function_call":') || message.startsWith('{"tool_calls":'));
940
+ if (shouldHandleAsFunction) {
941
+ isFunctionStreamingIn = true;
942
+ aggregatedResponse += message;
943
+ isFirstChunk = false;
944
+ return;
945
+ }
946
+ if (!isFunctionStreamingIn) {
947
+ controller.enqueue(
948
+ isComplexMode ? textEncoder.encode(formatStreamPart("text", message)) : chunk
949
+ );
950
+ return;
951
+ } else {
952
+ aggregatedResponse += message;
953
+ }
954
+ },
955
+ async flush(controller) {
956
+ try {
957
+ if (!isFirstChunk && isFunctionStreamingIn && (callbacks.experimental_onFunctionCall || callbacks.experimental_onToolCall)) {
958
+ isFunctionStreamingIn = false;
959
+ const payload = JSON.parse(aggregatedResponse);
960
+ let newFunctionCallMessages = [
961
+ ...functionCallMessages
962
+ ];
963
+ let functionResponse = void 0;
964
+ if (callbacks.experimental_onFunctionCall) {
965
+ if (payload.function_call === void 0) {
966
+ console.warn(
967
+ "experimental_onFunctionCall should not be defined when using tools"
968
+ );
969
+ }
970
+ const argumentsPayload = JSON.parse(
971
+ payload.function_call.arguments
972
+ );
973
+ functionResponse = await callbacks.experimental_onFunctionCall(
974
+ {
975
+ name: payload.function_call.name,
976
+ arguments: argumentsPayload
977
+ },
978
+ (result) => {
979
+ newFunctionCallMessages = [
980
+ ...functionCallMessages,
981
+ {
982
+ role: "assistant",
983
+ content: "",
984
+ function_call: payload.function_call
985
+ },
986
+ {
987
+ role: "function",
988
+ name: payload.function_call.name,
989
+ content: JSON.stringify(result)
990
+ }
991
+ ];
992
+ return newFunctionCallMessages;
993
+ }
994
+ );
995
+ }
996
+ if (callbacks.experimental_onToolCall) {
997
+ const toolCalls = {
998
+ tools: []
999
+ };
1000
+ for (const tool of payload.tool_calls) {
1001
+ toolCalls.tools.push({
1002
+ id: tool.id,
1003
+ type: "function",
1004
+ func: {
1005
+ name: tool.function.name,
1006
+ arguments: tool.function.arguments
1007
+ }
1008
+ });
1009
+ }
1010
+ let responseIndex = 0;
1011
+ try {
1012
+ functionResponse = await callbacks.experimental_onToolCall(
1013
+ toolCalls,
1014
+ (result) => {
1015
+ if (result) {
1016
+ const { tool_call_id, function_name, tool_call_result } = result;
1017
+ newFunctionCallMessages = [
1018
+ ...newFunctionCallMessages,
1019
+ // Only append the assistant message if it's the first response
1020
+ ...responseIndex === 0 ? [
1021
+ {
1022
+ role: "assistant",
1023
+ content: "",
1024
+ tool_calls: payload.tool_calls.map(
1025
+ (tc) => ({
1026
+ id: tc.id,
1027
+ type: "function",
1028
+ function: {
1029
+ name: tc.function.name,
1030
+ // we send the arguments an object to the user, but as the API expects a string, we need to stringify it
1031
+ arguments: JSON.stringify(
1032
+ tc.function.arguments
1033
+ )
1034
+ }
1035
+ })
1036
+ )
1037
+ }
1038
+ ] : [],
1039
+ // Append the function call result message
1040
+ {
1041
+ role: "tool",
1042
+ tool_call_id,
1043
+ name: function_name,
1044
+ content: JSON.stringify(tool_call_result)
1045
+ }
1046
+ ];
1047
+ responseIndex++;
1048
+ }
1049
+ return newFunctionCallMessages;
1050
+ }
1051
+ );
1052
+ } catch (e) {
1053
+ console.error("Error calling experimental_onToolCall:", e);
1054
+ }
1055
+ }
1056
+ if (!functionResponse) {
1057
+ controller.enqueue(
1058
+ textEncoder.encode(
1059
+ isComplexMode ? formatStreamPart(
1060
+ payload.function_call ? "function_call" : "tool_calls",
1061
+ // parse to prevent double-encoding:
1062
+ JSON.parse(aggregatedResponse)
1063
+ ) : aggregatedResponse
1064
+ )
1065
+ );
1066
+ return;
1067
+ } else if (typeof functionResponse === "string") {
1068
+ controller.enqueue(
1069
+ isComplexMode ? textEncoder.encode(formatStreamPart("text", functionResponse)) : textEncoder.encode(functionResponse)
1070
+ );
1071
+ aggregatedFinalCompletionResponse = functionResponse;
1072
+ return;
1073
+ }
1074
+ const filteredCallbacks = {
1075
+ ...callbacks,
1076
+ onStart: void 0
1077
+ };
1078
+ callbacks.onFinal = void 0;
1079
+ const openAIStream = OpenAIStream(functionResponse, {
1080
+ ...filteredCallbacks,
1081
+ [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
1082
+ });
1083
+ const reader = openAIStream.getReader();
1084
+ while (true) {
1085
+ const { done, value } = await reader.read();
1086
+ if (done) {
1087
+ break;
1088
+ }
1089
+ controller.enqueue(value);
1090
+ }
1091
+ }
1092
+ } finally {
1093
+ if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
1094
+ await callbacks.onFinal(aggregatedFinalCompletionResponse);
1095
+ }
1096
+ }
1097
+ }
1098
+ });
1042
1099
  }
1043
1100
 
1044
1101
  // streams/replicate-stream.ts
@@ -1063,79 +1120,6 @@ async function ReplicateStream(res, cb, options) {
1063
1120
  );
1064
1121
  }
1065
1122
 
1066
- // streams/assistant-response.ts
1067
- function experimental_AssistantResponse({ threadId, messageId }, process2) {
1068
- const stream = new ReadableStream({
1069
- async start(controller) {
1070
- var _a;
1071
- const textEncoder = new TextEncoder();
1072
- const sendMessage = (message) => {
1073
- controller.enqueue(
1074
- textEncoder.encode(formatStreamPart("assistant_message", message))
1075
- );
1076
- };
1077
- const sendDataMessage = (message) => {
1078
- controller.enqueue(
1079
- textEncoder.encode(formatStreamPart("data_message", message))
1080
- );
1081
- };
1082
- const sendError = (errorMessage) => {
1083
- controller.enqueue(
1084
- textEncoder.encode(formatStreamPart("error", errorMessage))
1085
- );
1086
- };
1087
- controller.enqueue(
1088
- textEncoder.encode(
1089
- formatStreamPart("assistant_control_data", {
1090
- threadId,
1091
- messageId
1092
- })
1093
- )
1094
- );
1095
- try {
1096
- await process2({
1097
- threadId,
1098
- messageId,
1099
- sendMessage,
1100
- sendDataMessage
1101
- });
1102
- } catch (error) {
1103
- sendError((_a = error.message) != null ? _a : `${error}`);
1104
- } finally {
1105
- controller.close();
1106
- }
1107
- },
1108
- pull(controller) {
1109
- },
1110
- cancel() {
1111
- }
1112
- });
1113
- return new Response(stream, {
1114
- status: 200,
1115
- headers: {
1116
- "Content-Type": "text/plain; charset=utf-8"
1117
- }
1118
- });
1119
- }
1120
-
1121
- // streams/google-generative-ai-stream.ts
1122
- async function* streamable4(response) {
1123
- var _a, _b, _c;
1124
- for await (const chunk of response.stream) {
1125
- const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
1126
- if (parts === void 0) {
1127
- continue;
1128
- }
1129
- const firstPart = parts[0];
1130
- if (typeof firstPart.text === "string") {
1131
- yield firstPart.text;
1132
- }
1133
- }
1134
- }
1135
- function GoogleGenerativeAIStream(response, cb) {
1136
- return readableFromAsyncIterable(streamable4(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
1137
- }
1138
-
1139
1123
  // shared/read-data-stream.ts
1140
1124
  var NEWLINE = "\n".charCodeAt(0);
1141
1125
  function concatChunks(chunks, totalLength) {
@@ -1364,6 +1348,43 @@ var experimental_StreamingReactResponse = class {
1364
1348
  return next;
1365
1349
  }
1366
1350
  };
1351
+
1352
+ // streams/streaming-text-response.ts
1353
+ var StreamingTextResponse = class extends Response {
1354
+ constructor(res, init, data) {
1355
+ let processedStream = res;
1356
+ if (data) {
1357
+ processedStream = res.pipeThrough(data.stream);
1358
+ }
1359
+ super(processedStream, {
1360
+ ...init,
1361
+ status: 200,
1362
+ headers: {
1363
+ "Content-Type": "text/plain; charset=utf-8",
1364
+ [COMPLEX_HEADER]: data ? "true" : "false",
1365
+ ...init == null ? void 0 : init.headers
1366
+ }
1367
+ });
1368
+ }
1369
+ };
1370
+ function streamToResponse(res, response, init) {
1371
+ response.writeHead((init == null ? void 0 : init.status) || 200, {
1372
+ "Content-Type": "text/plain; charset=utf-8",
1373
+ ...init == null ? void 0 : init.headers
1374
+ });
1375
+ const reader = res.getReader();
1376
+ function read() {
1377
+ reader.read().then(({ done, value }) => {
1378
+ if (done) {
1379
+ response.end();
1380
+ return;
1381
+ }
1382
+ response.write(value);
1383
+ read();
1384
+ });
1385
+ }
1386
+ read();
1387
+ }
1367
1388
  export {
1368
1389
  AIStream,
1369
1390
  AWSBedrockAnthropicStream,
@@ -1377,6 +1398,7 @@ export {
1377
1398
  HuggingFaceStream,
1378
1399
  InkeepStream,
1379
1400
  LangChainStream,
1401
+ MistralStream,
1380
1402
  OpenAIStream,
1381
1403
  ReplicateStream,
1382
1404
  StreamingTextResponse,