ai 2.2.35 → 2.2.37
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +163 -160
- package/dist/index.js +751 -728
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +752 -730
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/index.js
CHANGED
@@ -32,6 +32,7 @@ __export(streams_exports, {
|
|
32
32
|
HuggingFaceStream: () => HuggingFaceStream,
|
33
33
|
InkeepStream: () => InkeepStream,
|
34
34
|
LangChainStream: () => LangChainStream,
|
35
|
+
MistralStream: () => MistralStream,
|
35
36
|
OpenAIStream: () => OpenAIStream,
|
36
37
|
ReplicateStream: () => ReplicateStream,
|
37
38
|
StreamingTextResponse: () => StreamingTextResponse,
|
@@ -50,123 +51,8 @@ __export(streams_exports, {
|
|
50
51
|
});
|
51
52
|
module.exports = __toCommonJS(streams_exports);
|
52
53
|
|
53
|
-
//
|
54
|
-
var
|
55
|
-
function createEventStreamTransformer(customParser) {
|
56
|
-
const textDecoder = new TextDecoder();
|
57
|
-
let eventSourceParser;
|
58
|
-
return new TransformStream({
|
59
|
-
async start(controller) {
|
60
|
-
eventSourceParser = (0, import_eventsource_parser.createParser)(
|
61
|
-
(event) => {
|
62
|
-
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
63
|
-
// @see https://replicate.com/docs/streaming
|
64
|
-
event.event === "done") {
|
65
|
-
controller.terminate();
|
66
|
-
return;
|
67
|
-
}
|
68
|
-
if ("data" in event) {
|
69
|
-
const parsedMessage = customParser ? customParser(event.data, {
|
70
|
-
event: event.event
|
71
|
-
}) : event.data;
|
72
|
-
if (parsedMessage)
|
73
|
-
controller.enqueue(parsedMessage);
|
74
|
-
}
|
75
|
-
}
|
76
|
-
);
|
77
|
-
},
|
78
|
-
transform(chunk) {
|
79
|
-
eventSourceParser.feed(textDecoder.decode(chunk));
|
80
|
-
}
|
81
|
-
});
|
82
|
-
}
|
83
|
-
function createCallbacksTransformer(cb) {
|
84
|
-
const textEncoder = new TextEncoder();
|
85
|
-
let aggregatedResponse = "";
|
86
|
-
const callbacks = cb || {};
|
87
|
-
return new TransformStream({
|
88
|
-
async start() {
|
89
|
-
if (callbacks.onStart)
|
90
|
-
await callbacks.onStart();
|
91
|
-
},
|
92
|
-
async transform(message, controller) {
|
93
|
-
controller.enqueue(textEncoder.encode(message));
|
94
|
-
aggregatedResponse += message;
|
95
|
-
if (callbacks.onToken)
|
96
|
-
await callbacks.onToken(message);
|
97
|
-
},
|
98
|
-
async flush() {
|
99
|
-
const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
|
100
|
-
if (callbacks.onCompletion) {
|
101
|
-
await callbacks.onCompletion(aggregatedResponse);
|
102
|
-
}
|
103
|
-
if (callbacks.onFinal && !isOpenAICallbacks) {
|
104
|
-
await callbacks.onFinal(aggregatedResponse);
|
105
|
-
}
|
106
|
-
}
|
107
|
-
});
|
108
|
-
}
|
109
|
-
function isOfTypeOpenAIStreamCallbacks(callbacks) {
|
110
|
-
return "experimental_onFunctionCall" in callbacks;
|
111
|
-
}
|
112
|
-
function trimStartOfStreamHelper() {
|
113
|
-
let isStreamStart = true;
|
114
|
-
return (text) => {
|
115
|
-
if (isStreamStart) {
|
116
|
-
text = text.trimStart();
|
117
|
-
if (text)
|
118
|
-
isStreamStart = false;
|
119
|
-
}
|
120
|
-
return text;
|
121
|
-
};
|
122
|
-
}
|
123
|
-
function AIStream(response, customParser, callbacks) {
|
124
|
-
if (!response.ok) {
|
125
|
-
if (response.body) {
|
126
|
-
const reader = response.body.getReader();
|
127
|
-
return new ReadableStream({
|
128
|
-
async start(controller) {
|
129
|
-
const { done, value } = await reader.read();
|
130
|
-
if (!done) {
|
131
|
-
const errorText = new TextDecoder().decode(value);
|
132
|
-
controller.error(new Error(`Response error: ${errorText}`));
|
133
|
-
}
|
134
|
-
}
|
135
|
-
});
|
136
|
-
} else {
|
137
|
-
return new ReadableStream({
|
138
|
-
start(controller) {
|
139
|
-
controller.error(new Error("Response error: No response body"));
|
140
|
-
}
|
141
|
-
});
|
142
|
-
}
|
143
|
-
}
|
144
|
-
const responseBodyStream = response.body || createEmptyReadableStream();
|
145
|
-
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
146
|
-
}
|
147
|
-
function createEmptyReadableStream() {
|
148
|
-
return new ReadableStream({
|
149
|
-
start(controller) {
|
150
|
-
controller.close();
|
151
|
-
}
|
152
|
-
});
|
153
|
-
}
|
154
|
-
function readableFromAsyncIterable(iterable) {
|
155
|
-
let it = iterable[Symbol.asyncIterator]();
|
156
|
-
return new ReadableStream({
|
157
|
-
async pull(controller) {
|
158
|
-
const { done, value } = await it.next();
|
159
|
-
if (done)
|
160
|
-
controller.close();
|
161
|
-
else
|
162
|
-
controller.enqueue(value);
|
163
|
-
},
|
164
|
-
async cancel(reason) {
|
165
|
-
var _a;
|
166
|
-
await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
|
167
|
-
}
|
168
|
-
});
|
169
|
-
}
|
54
|
+
// shared/utils.ts
|
55
|
+
var import_non_secure = require("nanoid/non-secure");
|
170
56
|
|
171
57
|
// shared/stream-parts.ts
|
172
58
|
var textStreamPart = {
|
@@ -348,538 +234,390 @@ function formatStreamPart(type, value) {
|
|
348
234
|
`;
|
349
235
|
}
|
350
236
|
|
351
|
-
//
|
352
|
-
var
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
// in case we're doing async work
|
358
|
-
this.isClosedPromise = null;
|
359
|
-
this.isClosedPromiseResolver = void 0;
|
360
|
-
this.isClosed = false;
|
361
|
-
// array to store appended data
|
362
|
-
this.data = [];
|
363
|
-
this.messageAnnotations = [];
|
364
|
-
this.isClosedPromise = new Promise((resolve) => {
|
365
|
-
this.isClosedPromiseResolver = resolve;
|
366
|
-
});
|
367
|
-
const self = this;
|
368
|
-
this.stream = new TransformStream({
|
369
|
-
start: async (controller) => {
|
370
|
-
self.controller = controller;
|
371
|
-
},
|
372
|
-
transform: async (chunk, controller) => {
|
373
|
-
if (self.data.length > 0) {
|
374
|
-
const encodedData = self.encoder.encode(
|
375
|
-
formatStreamPart("data", self.data)
|
376
|
-
);
|
377
|
-
self.data = [];
|
378
|
-
controller.enqueue(encodedData);
|
379
|
-
}
|
380
|
-
if (self.messageAnnotations.length) {
|
381
|
-
const encodedMessageAnnotations = self.encoder.encode(
|
382
|
-
formatStreamPart("message_annotations", self.messageAnnotations)
|
383
|
-
);
|
384
|
-
self.messageAnnotations = [];
|
385
|
-
controller.enqueue(encodedMessageAnnotations);
|
386
|
-
}
|
387
|
-
controller.enqueue(chunk);
|
388
|
-
},
|
389
|
-
async flush(controller) {
|
390
|
-
const warningTimeout = process.env.NODE_ENV === "development" ? setTimeout(() => {
|
391
|
-
console.warn(
|
392
|
-
"The data stream is hanging. Did you forget to close it with `data.close()`?"
|
393
|
-
);
|
394
|
-
}, 3e3) : null;
|
395
|
-
await self.isClosedPromise;
|
396
|
-
if (warningTimeout !== null) {
|
397
|
-
clearTimeout(warningTimeout);
|
398
|
-
}
|
399
|
-
if (self.data.length) {
|
400
|
-
const encodedData = self.encoder.encode(
|
401
|
-
formatStreamPart("data", self.data)
|
402
|
-
);
|
403
|
-
controller.enqueue(encodedData);
|
404
|
-
}
|
405
|
-
if (self.messageAnnotations.length) {
|
406
|
-
const encodedData = self.encoder.encode(
|
407
|
-
formatStreamPart("message_annotations", self.messageAnnotations)
|
408
|
-
);
|
409
|
-
controller.enqueue(encodedData);
|
410
|
-
}
|
411
|
-
}
|
412
|
-
});
|
413
|
-
}
|
414
|
-
async close() {
|
415
|
-
var _a;
|
416
|
-
if (this.isClosed) {
|
417
|
-
throw new Error("Data Stream has already been closed.");
|
418
|
-
}
|
419
|
-
if (!this.controller) {
|
420
|
-
throw new Error("Stream controller is not initialized.");
|
421
|
-
}
|
422
|
-
(_a = this.isClosedPromiseResolver) == null ? void 0 : _a.call(this);
|
423
|
-
this.isClosed = true;
|
424
|
-
}
|
425
|
-
append(value) {
|
426
|
-
if (this.isClosed) {
|
427
|
-
throw new Error("Data Stream has already been closed.");
|
428
|
-
}
|
429
|
-
this.data.push(value);
|
430
|
-
}
|
431
|
-
appendMessageAnnotation(value) {
|
432
|
-
if (this.isClosed) {
|
433
|
-
throw new Error("Data Stream has already been closed.");
|
434
|
-
}
|
435
|
-
this.messageAnnotations.push(value);
|
436
|
-
}
|
437
|
-
};
|
438
|
-
function createStreamDataTransformer(experimental_streamData) {
|
439
|
-
if (!experimental_streamData) {
|
440
|
-
return new TransformStream({
|
441
|
-
transform: async (chunk, controller) => {
|
442
|
-
controller.enqueue(chunk);
|
443
|
-
}
|
444
|
-
});
|
445
|
-
}
|
446
|
-
const encoder = new TextEncoder();
|
447
|
-
const decoder = new TextDecoder();
|
448
|
-
return new TransformStream({
|
449
|
-
transform: async (chunk, controller) => {
|
450
|
-
const message = decoder.decode(chunk);
|
451
|
-
controller.enqueue(encoder.encode(formatStreamPart("text", message)));
|
452
|
-
}
|
453
|
-
});
|
454
|
-
}
|
455
|
-
|
456
|
-
// streams/aws-bedrock-stream.ts
|
457
|
-
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
458
|
-
var _a, _b;
|
237
|
+
// shared/utils.ts
|
238
|
+
var nanoid = (0, import_non_secure.customAlphabet)(
|
239
|
+
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
240
|
+
7
|
241
|
+
);
|
242
|
+
function createChunkDecoder(complex) {
|
459
243
|
const decoder = new TextDecoder();
|
460
|
-
|
461
|
-
|
462
|
-
|
463
|
-
|
464
|
-
|
465
|
-
|
466
|
-
if (delta != null) {
|
467
|
-
yield delta;
|
468
|
-
}
|
469
|
-
}
|
244
|
+
if (!complex) {
|
245
|
+
return function(chunk) {
|
246
|
+
if (!chunk)
|
247
|
+
return "";
|
248
|
+
return decoder.decode(chunk, { stream: true });
|
249
|
+
};
|
470
250
|
}
|
471
|
-
|
472
|
-
|
473
|
-
|
474
|
-
}
|
475
|
-
function AWSBedrockCohereStream(response, callbacks) {
|
476
|
-
return AWSBedrockStream(
|
477
|
-
response,
|
478
|
-
callbacks,
|
479
|
-
// As of 2023-11-17, Bedrock does not support streaming for Cohere,
|
480
|
-
// so we take the full generation:
|
481
|
-
(chunk) => {
|
482
|
-
var _a, _b;
|
483
|
-
return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
|
484
|
-
}
|
485
|
-
);
|
486
|
-
}
|
487
|
-
function AWSBedrockLlama2Stream(response, callbacks) {
|
488
|
-
return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
|
489
|
-
}
|
490
|
-
function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
|
491
|
-
return readableFromAsyncIterable(
|
492
|
-
asDeltaIterable(response, extractTextDeltaFromChunk)
|
493
|
-
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
494
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
495
|
-
);
|
496
|
-
}
|
497
|
-
|
498
|
-
// shared/utils.ts
|
499
|
-
var import_non_secure = require("nanoid/non-secure");
|
500
|
-
var nanoid = (0, import_non_secure.customAlphabet)(
|
501
|
-
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
502
|
-
7
|
503
|
-
);
|
504
|
-
function createChunkDecoder(complex) {
|
505
|
-
const decoder = new TextDecoder();
|
506
|
-
if (!complex) {
|
507
|
-
return function(chunk) {
|
508
|
-
if (!chunk)
|
509
|
-
return "";
|
510
|
-
return decoder.decode(chunk, { stream: true });
|
511
|
-
};
|
512
|
-
}
|
513
|
-
return function(chunk) {
|
514
|
-
const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
|
515
|
-
return decoded.map(parseStreamPart).filter(Boolean);
|
516
|
-
};
|
251
|
+
return function(chunk) {
|
252
|
+
const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
|
253
|
+
return decoded.map(parseStreamPart).filter(Boolean);
|
254
|
+
};
|
517
255
|
}
|
518
256
|
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
519
257
|
var COMPLEX_HEADER = "X-Experimental-Stream-Data";
|
520
258
|
|
521
|
-
// streams/
|
522
|
-
|
523
|
-
|
524
|
-
|
525
|
-
|
526
|
-
|
527
|
-
|
528
|
-
|
529
|
-
|
530
|
-
|
531
|
-
|
532
|
-
|
533
|
-
|
534
|
-
|
535
|
-
|
536
|
-
|
537
|
-
|
538
|
-
|
539
|
-
|
540
|
-
|
541
|
-
|
542
|
-
|
543
|
-
role: (_c = choice.delta) == null ? void 0 : _c.role,
|
544
|
-
tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
|
545
|
-
index,
|
546
|
-
id: toolCall.id,
|
547
|
-
function: toolCall.function,
|
548
|
-
type: toolCall.type
|
549
|
-
})) : void 0
|
550
|
-
},
|
551
|
-
finish_reason: choice.finishReason,
|
552
|
-
index: choice.index
|
553
|
-
};
|
554
|
-
})
|
555
|
-
};
|
556
|
-
}
|
557
|
-
const text = extract(chunk);
|
558
|
-
if (text)
|
559
|
-
yield text;
|
560
|
-
}
|
561
|
-
}
|
562
|
-
function chunkToText() {
|
563
|
-
const trimStartOfStream = trimStartOfStreamHelper();
|
564
|
-
let isFunctionStreamingIn;
|
565
|
-
return (json) => {
|
566
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
567
|
-
if (isChatCompletionChunk(json)) {
|
568
|
-
const delta = (_a = json.choices[0]) == null ? void 0 : _a.delta;
|
569
|
-
if ((_b = delta.function_call) == null ? void 0 : _b.name) {
|
570
|
-
isFunctionStreamingIn = true;
|
571
|
-
return `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`;
|
572
|
-
} else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
|
573
|
-
isFunctionStreamingIn = true;
|
574
|
-
const toolCall = delta.tool_calls[0];
|
575
|
-
if (toolCall.index === 0) {
|
576
|
-
return `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`;
|
577
|
-
} else {
|
578
|
-
return `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`;
|
259
|
+
// streams/ai-stream.ts
|
260
|
+
var import_eventsource_parser = require("eventsource-parser");
|
261
|
+
function createEventStreamTransformer(customParser) {
|
262
|
+
const textDecoder = new TextDecoder();
|
263
|
+
let eventSourceParser;
|
264
|
+
return new TransformStream({
|
265
|
+
async start(controller) {
|
266
|
+
eventSourceParser = (0, import_eventsource_parser.createParser)(
|
267
|
+
(event) => {
|
268
|
+
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
269
|
+
// @see https://replicate.com/docs/streaming
|
270
|
+
event.event === "done") {
|
271
|
+
controller.terminate();
|
272
|
+
return;
|
273
|
+
}
|
274
|
+
if ("data" in event) {
|
275
|
+
const parsedMessage = customParser ? customParser(event.data, {
|
276
|
+
event: event.event
|
277
|
+
}) : event.data;
|
278
|
+
if (parsedMessage)
|
279
|
+
controller.enqueue(parsedMessage);
|
280
|
+
}
|
579
281
|
}
|
580
|
-
|
581
|
-
|
582
|
-
|
583
|
-
|
584
|
-
} else if (isFunctionStreamingIn && (((_o = json.choices[0]) == null ? void 0 : _o.finish_reason) === "function_call" || ((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "stop")) {
|
585
|
-
isFunctionStreamingIn = false;
|
586
|
-
return '"}}';
|
587
|
-
} else if (isFunctionStreamingIn && ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "tool_calls") {
|
588
|
-
isFunctionStreamingIn = false;
|
589
|
-
return '"}}]}';
|
590
|
-
}
|
282
|
+
);
|
283
|
+
},
|
284
|
+
transform(chunk) {
|
285
|
+
eventSourceParser.feed(textDecoder.decode(chunk));
|
591
286
|
}
|
592
|
-
|
593
|
-
isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
|
594
|
-
);
|
595
|
-
return text;
|
596
|
-
};
|
597
|
-
function cleanupArguments(argumentChunk) {
|
598
|
-
let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
|
599
|
-
return `${escapedPartialJson}`;
|
600
|
-
}
|
601
|
-
}
|
602
|
-
var __internal__OpenAIFnMessagesSymbol = Symbol(
|
603
|
-
"internal_openai_fn_messages"
|
604
|
-
);
|
605
|
-
function isChatCompletionChunk(data) {
|
606
|
-
return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
|
607
|
-
}
|
608
|
-
function isCompletion(data) {
|
609
|
-
return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
|
610
|
-
}
|
611
|
-
function OpenAIStream(res, callbacks) {
|
612
|
-
const cb = callbacks;
|
613
|
-
let stream;
|
614
|
-
if (Symbol.asyncIterator in res) {
|
615
|
-
stream = readableFromAsyncIterable(streamable(res)).pipeThrough(
|
616
|
-
createCallbacksTransformer(
|
617
|
-
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
618
|
-
...cb,
|
619
|
-
onFinal: void 0
|
620
|
-
} : {
|
621
|
-
...cb
|
622
|
-
}
|
623
|
-
)
|
624
|
-
);
|
625
|
-
} else {
|
626
|
-
stream = AIStream(
|
627
|
-
res,
|
628
|
-
parseOpenAIStream(),
|
629
|
-
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
630
|
-
...cb,
|
631
|
-
onFinal: void 0
|
632
|
-
} : {
|
633
|
-
...cb
|
634
|
-
}
|
635
|
-
);
|
636
|
-
}
|
637
|
-
if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
|
638
|
-
const functionCallTransformer = createFunctionCallTransformer(cb);
|
639
|
-
return stream.pipeThrough(functionCallTransformer);
|
640
|
-
} else {
|
641
|
-
return stream.pipeThrough(
|
642
|
-
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
643
|
-
);
|
644
|
-
}
|
287
|
+
});
|
645
288
|
}
|
646
|
-
function
|
289
|
+
function createCallbacksTransformer(cb) {
|
647
290
|
const textEncoder = new TextEncoder();
|
648
|
-
let isFirstChunk = true;
|
649
291
|
let aggregatedResponse = "";
|
650
|
-
|
651
|
-
let isFunctionStreamingIn = false;
|
652
|
-
let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
|
653
|
-
const isComplexMode = callbacks == null ? void 0 : callbacks.experimental_streamData;
|
654
|
-
const decode = createChunkDecoder();
|
292
|
+
const callbacks = cb || {};
|
655
293
|
return new TransformStream({
|
656
|
-
async
|
657
|
-
|
658
|
-
|
659
|
-
|
660
|
-
|
661
|
-
|
662
|
-
|
663
|
-
|
664
|
-
|
294
|
+
async start() {
|
295
|
+
if (callbacks.onStart)
|
296
|
+
await callbacks.onStart();
|
297
|
+
},
|
298
|
+
async transform(message, controller) {
|
299
|
+
controller.enqueue(textEncoder.encode(message));
|
300
|
+
aggregatedResponse += message;
|
301
|
+
if (callbacks.onToken)
|
302
|
+
await callbacks.onToken(message);
|
303
|
+
},
|
304
|
+
async flush() {
|
305
|
+
const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
|
306
|
+
if (callbacks.onCompletion) {
|
307
|
+
await callbacks.onCompletion(aggregatedResponse);
|
665
308
|
}
|
666
|
-
if (!
|
667
|
-
|
668
|
-
isComplexMode ? textEncoder.encode(formatStreamPart("text", message)) : chunk
|
669
|
-
);
|
670
|
-
return;
|
671
|
-
} else {
|
672
|
-
aggregatedResponse += message;
|
309
|
+
if (callbacks.onFinal && !isOpenAICallbacks) {
|
310
|
+
await callbacks.onFinal(aggregatedResponse);
|
673
311
|
}
|
674
|
-
}
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
|
682
|
-
|
683
|
-
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
|
688
|
-
|
689
|
-
|
690
|
-
|
691
|
-
|
692
|
-
|
693
|
-
|
694
|
-
|
695
|
-
|
696
|
-
|
697
|
-
|
698
|
-
|
699
|
-
|
700
|
-
|
701
|
-
{
|
702
|
-
role: "assistant",
|
703
|
-
content: "",
|
704
|
-
function_call: payload.function_call
|
705
|
-
},
|
706
|
-
{
|
707
|
-
role: "function",
|
708
|
-
name: payload.function_call.name,
|
709
|
-
content: JSON.stringify(result)
|
710
|
-
}
|
711
|
-
];
|
712
|
-
return newFunctionCallMessages;
|
713
|
-
}
|
714
|
-
);
|
715
|
-
}
|
716
|
-
if (callbacks.experimental_onToolCall) {
|
717
|
-
const toolCalls = {
|
718
|
-
tools: []
|
719
|
-
};
|
720
|
-
for (const tool of payload.tool_calls) {
|
721
|
-
toolCalls.tools.push({
|
722
|
-
id: tool.id,
|
723
|
-
type: "function",
|
724
|
-
func: {
|
725
|
-
name: tool.function.name,
|
726
|
-
arguments: tool.function.arguments
|
727
|
-
}
|
728
|
-
});
|
729
|
-
}
|
730
|
-
let responseIndex = 0;
|
731
|
-
try {
|
732
|
-
functionResponse = await callbacks.experimental_onToolCall(
|
733
|
-
toolCalls,
|
734
|
-
(result) => {
|
735
|
-
if (result) {
|
736
|
-
const { tool_call_id, function_name, tool_call_result } = result;
|
737
|
-
newFunctionCallMessages = [
|
738
|
-
...newFunctionCallMessages,
|
739
|
-
// Only append the assistant message if it's the first response
|
740
|
-
...responseIndex === 0 ? [
|
741
|
-
{
|
742
|
-
role: "assistant",
|
743
|
-
content: "",
|
744
|
-
tool_calls: payload.tool_calls.map(
|
745
|
-
(tc) => ({
|
746
|
-
id: tc.id,
|
747
|
-
type: "function",
|
748
|
-
function: {
|
749
|
-
name: tc.function.name,
|
750
|
-
// we send the arguments an object to the user, but as the API expects a string, we need to stringify it
|
751
|
-
arguments: JSON.stringify(
|
752
|
-
tc.function.arguments
|
753
|
-
)
|
754
|
-
}
|
755
|
-
})
|
756
|
-
)
|
757
|
-
}
|
758
|
-
] : [],
|
759
|
-
// Append the function call result message
|
760
|
-
{
|
761
|
-
role: "tool",
|
762
|
-
tool_call_id,
|
763
|
-
name: function_name,
|
764
|
-
content: JSON.stringify(tool_call_result)
|
765
|
-
}
|
766
|
-
];
|
767
|
-
responseIndex++;
|
768
|
-
}
|
769
|
-
return newFunctionCallMessages;
|
770
|
-
}
|
771
|
-
);
|
772
|
-
} catch (e) {
|
773
|
-
console.error("Error calling experimental_onToolCall:", e);
|
774
|
-
}
|
775
|
-
}
|
776
|
-
if (!functionResponse) {
|
777
|
-
controller.enqueue(
|
778
|
-
textEncoder.encode(
|
779
|
-
isComplexMode ? formatStreamPart(
|
780
|
-
payload.function_call ? "function_call" : "tool_calls",
|
781
|
-
// parse to prevent double-encoding:
|
782
|
-
JSON.parse(aggregatedResponse)
|
783
|
-
) : aggregatedResponse
|
784
|
-
)
|
785
|
-
);
|
786
|
-
return;
|
787
|
-
} else if (typeof functionResponse === "string") {
|
788
|
-
controller.enqueue(
|
789
|
-
isComplexMode ? textEncoder.encode(formatStreamPart("text", functionResponse)) : textEncoder.encode(functionResponse)
|
790
|
-
);
|
791
|
-
return;
|
792
|
-
}
|
793
|
-
const filteredCallbacks = {
|
794
|
-
...callbacks,
|
795
|
-
onStart: void 0
|
796
|
-
};
|
797
|
-
callbacks.onFinal = void 0;
|
798
|
-
const openAIStream = OpenAIStream(functionResponse, {
|
799
|
-
...filteredCallbacks,
|
800
|
-
[__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
|
801
|
-
});
|
802
|
-
const reader = openAIStream.getReader();
|
803
|
-
while (true) {
|
804
|
-
const { done, value } = await reader.read();
|
805
|
-
if (done) {
|
806
|
-
break;
|
807
|
-
}
|
808
|
-
controller.enqueue(value);
|
312
|
+
}
|
313
|
+
});
|
314
|
+
}
|
315
|
+
function isOfTypeOpenAIStreamCallbacks(callbacks) {
|
316
|
+
return "experimental_onFunctionCall" in callbacks;
|
317
|
+
}
|
318
|
+
function trimStartOfStreamHelper() {
|
319
|
+
let isStreamStart = true;
|
320
|
+
return (text) => {
|
321
|
+
if (isStreamStart) {
|
322
|
+
text = text.trimStart();
|
323
|
+
if (text)
|
324
|
+
isStreamStart = false;
|
325
|
+
}
|
326
|
+
return text;
|
327
|
+
};
|
328
|
+
}
|
329
|
+
function AIStream(response, customParser, callbacks) {
|
330
|
+
if (!response.ok) {
|
331
|
+
if (response.body) {
|
332
|
+
const reader = response.body.getReader();
|
333
|
+
return new ReadableStream({
|
334
|
+
async start(controller) {
|
335
|
+
const { done, value } = await reader.read();
|
336
|
+
if (!done) {
|
337
|
+
const errorText = new TextDecoder().decode(value);
|
338
|
+
controller.error(new Error(`Response error: ${errorText}`));
|
809
339
|
}
|
810
340
|
}
|
811
|
-
}
|
812
|
-
|
813
|
-
|
341
|
+
});
|
342
|
+
} else {
|
343
|
+
return new ReadableStream({
|
344
|
+
start(controller) {
|
345
|
+
controller.error(new Error("Response error: No response body"));
|
814
346
|
}
|
815
|
-
}
|
347
|
+
});
|
348
|
+
}
|
349
|
+
}
|
350
|
+
const responseBodyStream = response.body || createEmptyReadableStream();
|
351
|
+
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
352
|
+
}
|
353
|
+
function createEmptyReadableStream() {
|
354
|
+
return new ReadableStream({
|
355
|
+
start(controller) {
|
356
|
+
controller.close();
|
357
|
+
}
|
358
|
+
});
|
359
|
+
}
|
360
|
+
function readableFromAsyncIterable(iterable) {
|
361
|
+
let it = iterable[Symbol.asyncIterator]();
|
362
|
+
return new ReadableStream({
|
363
|
+
async pull(controller) {
|
364
|
+
const { done, value } = await it.next();
|
365
|
+
if (done)
|
366
|
+
controller.close();
|
367
|
+
else
|
368
|
+
controller.enqueue(value);
|
369
|
+
},
|
370
|
+
async cancel(reason) {
|
371
|
+
var _a;
|
372
|
+
await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
|
816
373
|
}
|
817
374
|
});
|
818
375
|
}
|
819
376
|
|
820
|
-
// streams/
|
821
|
-
var
|
822
|
-
constructor(
|
823
|
-
|
824
|
-
|
825
|
-
|
826
|
-
|
827
|
-
|
828
|
-
|
829
|
-
|
830
|
-
|
831
|
-
|
832
|
-
|
833
|
-
|
377
|
+
// streams/stream-data.ts
|
378
|
+
var experimental_StreamData = class {
|
379
|
+
constructor() {
|
380
|
+
this.encoder = new TextEncoder();
|
381
|
+
this.controller = null;
|
382
|
+
// closing the stream is synchronous, but we want to return a promise
|
383
|
+
// in case we're doing async work
|
384
|
+
this.isClosedPromise = null;
|
385
|
+
this.isClosedPromiseResolver = void 0;
|
386
|
+
this.isClosed = false;
|
387
|
+
// array to store appended data
|
388
|
+
this.data = [];
|
389
|
+
this.messageAnnotations = [];
|
390
|
+
this.isClosedPromise = new Promise((resolve) => {
|
391
|
+
this.isClosedPromiseResolver = resolve;
|
392
|
+
});
|
393
|
+
const self = this;
|
394
|
+
this.stream = new TransformStream({
|
395
|
+
start: async (controller) => {
|
396
|
+
self.controller = controller;
|
397
|
+
},
|
398
|
+
transform: async (chunk, controller) => {
|
399
|
+
if (self.data.length > 0) {
|
400
|
+
const encodedData = self.encoder.encode(
|
401
|
+
formatStreamPart("data", self.data)
|
402
|
+
);
|
403
|
+
self.data = [];
|
404
|
+
controller.enqueue(encodedData);
|
405
|
+
}
|
406
|
+
if (self.messageAnnotations.length) {
|
407
|
+
const encodedMessageAnnotations = self.encoder.encode(
|
408
|
+
formatStreamPart("message_annotations", self.messageAnnotations)
|
409
|
+
);
|
410
|
+
self.messageAnnotations = [];
|
411
|
+
controller.enqueue(encodedMessageAnnotations);
|
412
|
+
}
|
413
|
+
controller.enqueue(chunk);
|
414
|
+
},
|
415
|
+
async flush(controller) {
|
416
|
+
const warningTimeout = process.env.NODE_ENV === "development" ? setTimeout(() => {
|
417
|
+
console.warn(
|
418
|
+
"The data stream is hanging. Did you forget to close it with `data.close()`?"
|
419
|
+
);
|
420
|
+
}, 3e3) : null;
|
421
|
+
await self.isClosedPromise;
|
422
|
+
if (warningTimeout !== null) {
|
423
|
+
clearTimeout(warningTimeout);
|
424
|
+
}
|
425
|
+
if (self.data.length) {
|
426
|
+
const encodedData = self.encoder.encode(
|
427
|
+
formatStreamPart("data", self.data)
|
428
|
+
);
|
429
|
+
controller.enqueue(encodedData);
|
430
|
+
}
|
431
|
+
if (self.messageAnnotations.length) {
|
432
|
+
const encodedData = self.encoder.encode(
|
433
|
+
formatStreamPart("message_annotations", self.messageAnnotations)
|
434
|
+
);
|
435
|
+
controller.enqueue(encodedData);
|
436
|
+
}
|
834
437
|
}
|
835
438
|
});
|
836
439
|
}
|
440
|
+
async close() {
|
441
|
+
var _a;
|
442
|
+
if (this.isClosed) {
|
443
|
+
throw new Error("Data Stream has already been closed.");
|
444
|
+
}
|
445
|
+
if (!this.controller) {
|
446
|
+
throw new Error("Stream controller is not initialized.");
|
447
|
+
}
|
448
|
+
(_a = this.isClosedPromiseResolver) == null ? void 0 : _a.call(this);
|
449
|
+
this.isClosed = true;
|
450
|
+
}
|
451
|
+
append(value) {
|
452
|
+
if (this.isClosed) {
|
453
|
+
throw new Error("Data Stream has already been closed.");
|
454
|
+
}
|
455
|
+
this.data.push(value);
|
456
|
+
}
|
457
|
+
appendMessageAnnotation(value) {
|
458
|
+
if (this.isClosed) {
|
459
|
+
throw new Error("Data Stream has already been closed.");
|
460
|
+
}
|
461
|
+
this.messageAnnotations.push(value);
|
462
|
+
}
|
837
463
|
};
|
838
|
-
function
|
839
|
-
|
840
|
-
|
841
|
-
|
842
|
-
|
843
|
-
const reader = res.getReader();
|
844
|
-
function read() {
|
845
|
-
reader.read().then(({ done, value }) => {
|
846
|
-
if (done) {
|
847
|
-
response.end();
|
848
|
-
return;
|
464
|
+
function createStreamDataTransformer(experimental_streamData) {
|
465
|
+
if (!experimental_streamData) {
|
466
|
+
return new TransformStream({
|
467
|
+
transform: async (chunk, controller) => {
|
468
|
+
controller.enqueue(chunk);
|
849
469
|
}
|
850
|
-
response.write(value);
|
851
|
-
read();
|
852
470
|
});
|
853
471
|
}
|
854
|
-
|
472
|
+
const encoder = new TextEncoder();
|
473
|
+
const decoder = new TextDecoder();
|
474
|
+
return new TransformStream({
|
475
|
+
transform: async (chunk, controller) => {
|
476
|
+
const message = decoder.decode(chunk);
|
477
|
+
controller.enqueue(encoder.encode(formatStreamPart("text", message)));
|
478
|
+
}
|
479
|
+
});
|
855
480
|
}
|
856
481
|
|
857
|
-
// streams/
|
858
|
-
function
|
859
|
-
|
860
|
-
return
|
861
|
-
|
862
|
-
|
863
|
-
|
864
|
-
|
865
|
-
|
866
|
-
|
482
|
+
// streams/anthropic-stream.ts
|
483
|
+
function parseAnthropicStream() {
|
484
|
+
let previous = "";
|
485
|
+
return (data) => {
|
486
|
+
const json = JSON.parse(data);
|
487
|
+
if ("error" in json) {
|
488
|
+
throw new Error(`${json.error.type}: ${json.error.message}`);
|
489
|
+
}
|
490
|
+
if (!("completion" in json)) {
|
491
|
+
return;
|
492
|
+
}
|
493
|
+
const text = json.completion;
|
494
|
+
if (!previous || text.length > previous.length && text.startsWith(previous)) {
|
495
|
+
const delta = text.slice(previous.length);
|
496
|
+
previous = text;
|
497
|
+
return delta;
|
498
|
+
}
|
499
|
+
return text;
|
500
|
+
};
|
501
|
+
}
|
502
|
+
async function* streamable(stream) {
|
503
|
+
for await (const chunk of stream) {
|
504
|
+
if ("completion" in chunk) {
|
505
|
+
const text = chunk.completion;
|
506
|
+
if (text)
|
507
|
+
yield text;
|
508
|
+
} else if ("delta" in chunk) {
|
509
|
+
const { delta } = chunk;
|
510
|
+
if ("text" in delta) {
|
511
|
+
const text = delta.text;
|
512
|
+
if (text)
|
513
|
+
yield text;
|
867
514
|
}
|
868
|
-
|
869
|
-
|
870
|
-
|
871
|
-
|
872
|
-
|
515
|
+
}
|
516
|
+
}
|
517
|
+
}
|
518
|
+
function AnthropicStream(res, cb) {
|
519
|
+
if (Symbol.asyncIterator in res) {
|
520
|
+
return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
|
521
|
+
} else {
|
522
|
+
return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
|
523
|
+
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
524
|
+
);
|
525
|
+
}
|
526
|
+
}
|
527
|
+
|
528
|
+
// streams/assistant-response.ts
|
529
|
+
function experimental_AssistantResponse({ threadId, messageId }, process2) {
|
530
|
+
const stream = new ReadableStream({
|
531
|
+
async start(controller) {
|
532
|
+
var _a;
|
533
|
+
const textEncoder = new TextEncoder();
|
534
|
+
const sendMessage = (message) => {
|
535
|
+
controller.enqueue(
|
536
|
+
textEncoder.encode(formatStreamPart("assistant_message", message))
|
537
|
+
);
|
538
|
+
};
|
539
|
+
const sendDataMessage = (message) => {
|
540
|
+
controller.enqueue(
|
541
|
+
textEncoder.encode(formatStreamPart("data_message", message))
|
542
|
+
);
|
543
|
+
};
|
544
|
+
const sendError = (errorMessage) => {
|
545
|
+
controller.enqueue(
|
546
|
+
textEncoder.encode(formatStreamPart("error", errorMessage))
|
547
|
+
);
|
548
|
+
};
|
549
|
+
controller.enqueue(
|
550
|
+
textEncoder.encode(
|
551
|
+
formatStreamPart("assistant_control_data", {
|
552
|
+
threadId,
|
553
|
+
messageId
|
554
|
+
})
|
555
|
+
)
|
556
|
+
);
|
557
|
+
try {
|
558
|
+
await process2({
|
559
|
+
threadId,
|
560
|
+
messageId,
|
561
|
+
sendMessage,
|
562
|
+
sendDataMessage
|
563
|
+
});
|
564
|
+
} catch (error) {
|
565
|
+
sendError((_a = error.message) != null ? _a : `${error}`);
|
566
|
+
} finally {
|
567
|
+
controller.close();
|
873
568
|
}
|
874
|
-
|
875
|
-
|
569
|
+
},
|
570
|
+
pull(controller) {
|
571
|
+
},
|
572
|
+
cancel() {
|
573
|
+
}
|
574
|
+
});
|
575
|
+
return new Response(stream, {
|
576
|
+
status: 200,
|
577
|
+
headers: {
|
578
|
+
"Content-Type": "text/plain; charset=utf-8"
|
579
|
+
}
|
580
|
+
});
|
581
|
+
}
|
582
|
+
|
583
|
+
// streams/aws-bedrock-stream.ts
|
584
|
+
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
585
|
+
var _a, _b;
|
586
|
+
const decoder = new TextDecoder();
|
587
|
+
for await (const chunk of (_a = response.body) != null ? _a : []) {
|
588
|
+
const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
|
589
|
+
if (bytes != null) {
|
590
|
+
const chunkText = decoder.decode(bytes);
|
591
|
+
const chunkJSON = JSON.parse(chunkText);
|
592
|
+
const delta = extractTextDeltaFromChunk(chunkJSON);
|
593
|
+
if (delta != null) {
|
594
|
+
yield delta;
|
876
595
|
}
|
877
|
-
controller.enqueue(text);
|
878
596
|
}
|
879
|
-
}
|
597
|
+
}
|
880
598
|
}
|
881
|
-
function
|
882
|
-
return
|
599
|
+
function AWSBedrockAnthropicStream(response, callbacks) {
|
600
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
|
601
|
+
}
|
602
|
+
function AWSBedrockCohereStream(response, callbacks) {
|
603
|
+
return AWSBedrockStream(
|
604
|
+
response,
|
605
|
+
callbacks,
|
606
|
+
// As of 2023-11-17, Bedrock does not support streaming for Cohere,
|
607
|
+
// so we take the full generation:
|
608
|
+
(chunk) => {
|
609
|
+
var _a, _b;
|
610
|
+
return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
|
611
|
+
}
|
612
|
+
);
|
613
|
+
}
|
614
|
+
function AWSBedrockLlama2Stream(response, callbacks) {
|
615
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
|
616
|
+
}
|
617
|
+
function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
|
618
|
+
return readableFromAsyncIterable(
|
619
|
+
asDeltaIterable(response, extractTextDeltaFromChunk)
|
620
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
883
621
|
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
884
622
|
);
|
885
623
|
}
|
@@ -912,7 +650,7 @@ async function readAndProcessLines(reader, controller) {
|
|
912
650
|
}
|
913
651
|
controller.close();
|
914
652
|
}
|
915
|
-
function
|
653
|
+
function createParser2(res) {
|
916
654
|
var _a;
|
917
655
|
const reader = (_a = res.body) == null ? void 0 : _a.getReader();
|
918
656
|
return new ReadableStream({
|
@@ -940,56 +678,58 @@ function CohereStream(reader, callbacks) {
|
|
940
678
|
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
941
679
|
);
|
942
680
|
} else {
|
943
|
-
return
|
681
|
+
return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
944
682
|
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
945
683
|
);
|
946
684
|
}
|
947
685
|
}
|
948
686
|
|
949
|
-
// streams/
|
950
|
-
function
|
951
|
-
|
952
|
-
|
953
|
-
const
|
954
|
-
if (
|
955
|
-
|
956
|
-
}
|
957
|
-
if (!("completion" in json)) {
|
958
|
-
return;
|
687
|
+
// streams/google-generative-ai-stream.ts
|
688
|
+
async function* streamable3(response) {
|
689
|
+
var _a, _b, _c;
|
690
|
+
for await (const chunk of response.stream) {
|
691
|
+
const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
|
692
|
+
if (parts === void 0) {
|
693
|
+
continue;
|
959
694
|
}
|
960
|
-
const
|
961
|
-
if (
|
962
|
-
|
963
|
-
previous = text;
|
964
|
-
return delta;
|
695
|
+
const firstPart = parts[0];
|
696
|
+
if (typeof firstPart.text === "string") {
|
697
|
+
yield firstPart.text;
|
965
698
|
}
|
966
|
-
|
967
|
-
};
|
699
|
+
}
|
968
700
|
}
|
969
|
-
|
970
|
-
|
971
|
-
|
972
|
-
|
973
|
-
|
974
|
-
|
975
|
-
|
976
|
-
|
977
|
-
|
978
|
-
|
979
|
-
|
980
|
-
|
701
|
+
function GoogleGenerativeAIStream(response, cb) {
|
702
|
+
return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
|
703
|
+
}
|
704
|
+
|
705
|
+
// streams/huggingface-stream.ts
|
706
|
+
function createParser3(res) {
|
707
|
+
const trimStartOfStream = trimStartOfStreamHelper();
|
708
|
+
return new ReadableStream({
|
709
|
+
async pull(controller) {
|
710
|
+
var _a, _b;
|
711
|
+
const { value, done } = await res.next();
|
712
|
+
if (done) {
|
713
|
+
controller.close();
|
714
|
+
return;
|
715
|
+
}
|
716
|
+
const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
|
717
|
+
if (!text)
|
718
|
+
return;
|
719
|
+
if (value.generated_text != null && value.generated_text.length > 0) {
|
720
|
+
return;
|
981
721
|
}
|
722
|
+
if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
|
723
|
+
return;
|
724
|
+
}
|
725
|
+
controller.enqueue(text);
|
982
726
|
}
|
983
|
-
}
|
727
|
+
});
|
984
728
|
}
|
985
|
-
function
|
986
|
-
|
987
|
-
|
988
|
-
|
989
|
-
return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
|
990
|
-
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
991
|
-
);
|
992
|
-
}
|
729
|
+
function HuggingFaceStream(res, callbacks) {
|
730
|
+
return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
731
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
732
|
+
);
|
993
733
|
}
|
994
734
|
|
995
735
|
// streams/inkeep-stream.ts
|
@@ -1088,7 +828,325 @@ function LangChainStream(callbacks) {
|
|
1088
828
|
await handleError(e, runId);
|
1089
829
|
}
|
1090
830
|
}
|
1091
|
-
};
|
831
|
+
};
|
832
|
+
}
|
833
|
+
|
834
|
+
// streams/mistral-stream.ts
|
835
|
+
async function* streamable4(stream) {
|
836
|
+
var _a, _b;
|
837
|
+
for await (const chunk of stream) {
|
838
|
+
const content = (_b = (_a = chunk.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.content;
|
839
|
+
if (content === void 0 || content === "") {
|
840
|
+
continue;
|
841
|
+
}
|
842
|
+
yield content;
|
843
|
+
}
|
844
|
+
}
|
845
|
+
function MistralStream(response, callbacks) {
|
846
|
+
const stream = readableFromAsyncIterable(streamable4(response));
|
847
|
+
return stream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
848
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
849
|
+
);
|
850
|
+
}
|
851
|
+
|
852
|
+
// streams/openai-stream.ts
|
853
|
+
function parseOpenAIStream() {
|
854
|
+
const extract = chunkToText();
|
855
|
+
return (data) => extract(JSON.parse(data));
|
856
|
+
}
|
857
|
+
async function* streamable5(stream) {
|
858
|
+
const extract = chunkToText();
|
859
|
+
for await (let chunk of stream) {
|
860
|
+
if ("promptFilterResults" in chunk) {
|
861
|
+
chunk = {
|
862
|
+
id: chunk.id,
|
863
|
+
created: chunk.created.getDate(),
|
864
|
+
object: chunk.object,
|
865
|
+
// not exposed by Azure API
|
866
|
+
model: chunk.model,
|
867
|
+
// not exposed by Azure API
|
868
|
+
choices: chunk.choices.map((choice) => {
|
869
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
870
|
+
return {
|
871
|
+
delta: {
|
872
|
+
content: (_a = choice.delta) == null ? void 0 : _a.content,
|
873
|
+
function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
|
874
|
+
role: (_c = choice.delta) == null ? void 0 : _c.role,
|
875
|
+
tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
|
876
|
+
index,
|
877
|
+
id: toolCall.id,
|
878
|
+
function: toolCall.function,
|
879
|
+
type: toolCall.type
|
880
|
+
})) : void 0
|
881
|
+
},
|
882
|
+
finish_reason: choice.finishReason,
|
883
|
+
index: choice.index
|
884
|
+
};
|
885
|
+
})
|
886
|
+
};
|
887
|
+
}
|
888
|
+
const text = extract(chunk);
|
889
|
+
if (text)
|
890
|
+
yield text;
|
891
|
+
}
|
892
|
+
}
|
893
|
+
function chunkToText() {
|
894
|
+
const trimStartOfStream = trimStartOfStreamHelper();
|
895
|
+
let isFunctionStreamingIn;
|
896
|
+
return (json) => {
|
897
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
898
|
+
if (isChatCompletionChunk(json)) {
|
899
|
+
const delta = (_a = json.choices[0]) == null ? void 0 : _a.delta;
|
900
|
+
if ((_b = delta.function_call) == null ? void 0 : _b.name) {
|
901
|
+
isFunctionStreamingIn = true;
|
902
|
+
return `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`;
|
903
|
+
} else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
|
904
|
+
isFunctionStreamingIn = true;
|
905
|
+
const toolCall = delta.tool_calls[0];
|
906
|
+
if (toolCall.index === 0) {
|
907
|
+
return `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`;
|
908
|
+
} else {
|
909
|
+
return `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`;
|
910
|
+
}
|
911
|
+
} else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
|
912
|
+
return cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments);
|
913
|
+
} else if ((_l = (_k = (_j = delta.tool_calls) == null ? void 0 : _j[0]) == null ? void 0 : _k.function) == null ? void 0 : _l.arguments) {
|
914
|
+
return cleanupArguments((_o = (_n = (_m = delta.tool_calls) == null ? void 0 : _m[0]) == null ? void 0 : _n.function) == null ? void 0 : _o.arguments);
|
915
|
+
} else if (isFunctionStreamingIn && (((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "function_call" || ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "stop")) {
|
916
|
+
isFunctionStreamingIn = false;
|
917
|
+
return '"}}';
|
918
|
+
} else if (isFunctionStreamingIn && ((_r = json.choices[0]) == null ? void 0 : _r.finish_reason) === "tool_calls") {
|
919
|
+
isFunctionStreamingIn = false;
|
920
|
+
return '"}}]}';
|
921
|
+
}
|
922
|
+
}
|
923
|
+
const text = trimStartOfStream(
|
924
|
+
isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
|
925
|
+
);
|
926
|
+
return text;
|
927
|
+
};
|
928
|
+
function cleanupArguments(argumentChunk) {
|
929
|
+
let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
|
930
|
+
return `${escapedPartialJson}`;
|
931
|
+
}
|
932
|
+
}
|
933
|
+
var __internal__OpenAIFnMessagesSymbol = Symbol(
|
934
|
+
"internal_openai_fn_messages"
|
935
|
+
);
|
936
|
+
function isChatCompletionChunk(data) {
|
937
|
+
return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
|
938
|
+
}
|
939
|
+
function isCompletion(data) {
|
940
|
+
return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
|
941
|
+
}
|
942
|
+
function OpenAIStream(res, callbacks) {
|
943
|
+
const cb = callbacks;
|
944
|
+
let stream;
|
945
|
+
if (Symbol.asyncIterator in res) {
|
946
|
+
stream = readableFromAsyncIterable(streamable5(res)).pipeThrough(
|
947
|
+
createCallbacksTransformer(
|
948
|
+
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
949
|
+
...cb,
|
950
|
+
onFinal: void 0
|
951
|
+
} : {
|
952
|
+
...cb
|
953
|
+
}
|
954
|
+
)
|
955
|
+
);
|
956
|
+
} else {
|
957
|
+
stream = AIStream(
|
958
|
+
res,
|
959
|
+
parseOpenAIStream(),
|
960
|
+
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
961
|
+
...cb,
|
962
|
+
onFinal: void 0
|
963
|
+
} : {
|
964
|
+
...cb
|
965
|
+
}
|
966
|
+
);
|
967
|
+
}
|
968
|
+
if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
|
969
|
+
const functionCallTransformer = createFunctionCallTransformer(cb);
|
970
|
+
return stream.pipeThrough(functionCallTransformer);
|
971
|
+
} else {
|
972
|
+
return stream.pipeThrough(
|
973
|
+
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
974
|
+
);
|
975
|
+
}
|
976
|
+
}
|
977
|
+
function createFunctionCallTransformer(callbacks) {
|
978
|
+
const textEncoder = new TextEncoder();
|
979
|
+
let isFirstChunk = true;
|
980
|
+
let aggregatedResponse = "";
|
981
|
+
let aggregatedFinalCompletionResponse = "";
|
982
|
+
let isFunctionStreamingIn = false;
|
983
|
+
let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
|
984
|
+
const isComplexMode = callbacks == null ? void 0 : callbacks.experimental_streamData;
|
985
|
+
const decode = createChunkDecoder();
|
986
|
+
return new TransformStream({
|
987
|
+
async transform(chunk, controller) {
|
988
|
+
const message = decode(chunk);
|
989
|
+
aggregatedFinalCompletionResponse += message;
|
990
|
+
const shouldHandleAsFunction = isFirstChunk && (message.startsWith('{"function_call":') || message.startsWith('{"tool_calls":'));
|
991
|
+
if (shouldHandleAsFunction) {
|
992
|
+
isFunctionStreamingIn = true;
|
993
|
+
aggregatedResponse += message;
|
994
|
+
isFirstChunk = false;
|
995
|
+
return;
|
996
|
+
}
|
997
|
+
if (!isFunctionStreamingIn) {
|
998
|
+
controller.enqueue(
|
999
|
+
isComplexMode ? textEncoder.encode(formatStreamPart("text", message)) : chunk
|
1000
|
+
);
|
1001
|
+
return;
|
1002
|
+
} else {
|
1003
|
+
aggregatedResponse += message;
|
1004
|
+
}
|
1005
|
+
},
|
1006
|
+
async flush(controller) {
|
1007
|
+
try {
|
1008
|
+
if (!isFirstChunk && isFunctionStreamingIn && (callbacks.experimental_onFunctionCall || callbacks.experimental_onToolCall)) {
|
1009
|
+
isFunctionStreamingIn = false;
|
1010
|
+
const payload = JSON.parse(aggregatedResponse);
|
1011
|
+
let newFunctionCallMessages = [
|
1012
|
+
...functionCallMessages
|
1013
|
+
];
|
1014
|
+
let functionResponse = void 0;
|
1015
|
+
if (callbacks.experimental_onFunctionCall) {
|
1016
|
+
if (payload.function_call === void 0) {
|
1017
|
+
console.warn(
|
1018
|
+
"experimental_onFunctionCall should not be defined when using tools"
|
1019
|
+
);
|
1020
|
+
}
|
1021
|
+
const argumentsPayload = JSON.parse(
|
1022
|
+
payload.function_call.arguments
|
1023
|
+
);
|
1024
|
+
functionResponse = await callbacks.experimental_onFunctionCall(
|
1025
|
+
{
|
1026
|
+
name: payload.function_call.name,
|
1027
|
+
arguments: argumentsPayload
|
1028
|
+
},
|
1029
|
+
(result) => {
|
1030
|
+
newFunctionCallMessages = [
|
1031
|
+
...functionCallMessages,
|
1032
|
+
{
|
1033
|
+
role: "assistant",
|
1034
|
+
content: "",
|
1035
|
+
function_call: payload.function_call
|
1036
|
+
},
|
1037
|
+
{
|
1038
|
+
role: "function",
|
1039
|
+
name: payload.function_call.name,
|
1040
|
+
content: JSON.stringify(result)
|
1041
|
+
}
|
1042
|
+
];
|
1043
|
+
return newFunctionCallMessages;
|
1044
|
+
}
|
1045
|
+
);
|
1046
|
+
}
|
1047
|
+
if (callbacks.experimental_onToolCall) {
|
1048
|
+
const toolCalls = {
|
1049
|
+
tools: []
|
1050
|
+
};
|
1051
|
+
for (const tool of payload.tool_calls) {
|
1052
|
+
toolCalls.tools.push({
|
1053
|
+
id: tool.id,
|
1054
|
+
type: "function",
|
1055
|
+
func: {
|
1056
|
+
name: tool.function.name,
|
1057
|
+
arguments: tool.function.arguments
|
1058
|
+
}
|
1059
|
+
});
|
1060
|
+
}
|
1061
|
+
let responseIndex = 0;
|
1062
|
+
try {
|
1063
|
+
functionResponse = await callbacks.experimental_onToolCall(
|
1064
|
+
toolCalls,
|
1065
|
+
(result) => {
|
1066
|
+
if (result) {
|
1067
|
+
const { tool_call_id, function_name, tool_call_result } = result;
|
1068
|
+
newFunctionCallMessages = [
|
1069
|
+
...newFunctionCallMessages,
|
1070
|
+
// Only append the assistant message if it's the first response
|
1071
|
+
...responseIndex === 0 ? [
|
1072
|
+
{
|
1073
|
+
role: "assistant",
|
1074
|
+
content: "",
|
1075
|
+
tool_calls: payload.tool_calls.map(
|
1076
|
+
(tc) => ({
|
1077
|
+
id: tc.id,
|
1078
|
+
type: "function",
|
1079
|
+
function: {
|
1080
|
+
name: tc.function.name,
|
1081
|
+
// we send the arguments an object to the user, but as the API expects a string, we need to stringify it
|
1082
|
+
arguments: JSON.stringify(
|
1083
|
+
tc.function.arguments
|
1084
|
+
)
|
1085
|
+
}
|
1086
|
+
})
|
1087
|
+
)
|
1088
|
+
}
|
1089
|
+
] : [],
|
1090
|
+
// Append the function call result message
|
1091
|
+
{
|
1092
|
+
role: "tool",
|
1093
|
+
tool_call_id,
|
1094
|
+
name: function_name,
|
1095
|
+
content: JSON.stringify(tool_call_result)
|
1096
|
+
}
|
1097
|
+
];
|
1098
|
+
responseIndex++;
|
1099
|
+
}
|
1100
|
+
return newFunctionCallMessages;
|
1101
|
+
}
|
1102
|
+
);
|
1103
|
+
} catch (e) {
|
1104
|
+
console.error("Error calling experimental_onToolCall:", e);
|
1105
|
+
}
|
1106
|
+
}
|
1107
|
+
if (!functionResponse) {
|
1108
|
+
controller.enqueue(
|
1109
|
+
textEncoder.encode(
|
1110
|
+
isComplexMode ? formatStreamPart(
|
1111
|
+
payload.function_call ? "function_call" : "tool_calls",
|
1112
|
+
// parse to prevent double-encoding:
|
1113
|
+
JSON.parse(aggregatedResponse)
|
1114
|
+
) : aggregatedResponse
|
1115
|
+
)
|
1116
|
+
);
|
1117
|
+
return;
|
1118
|
+
} else if (typeof functionResponse === "string") {
|
1119
|
+
controller.enqueue(
|
1120
|
+
isComplexMode ? textEncoder.encode(formatStreamPart("text", functionResponse)) : textEncoder.encode(functionResponse)
|
1121
|
+
);
|
1122
|
+
aggregatedFinalCompletionResponse = functionResponse;
|
1123
|
+
return;
|
1124
|
+
}
|
1125
|
+
const filteredCallbacks = {
|
1126
|
+
...callbacks,
|
1127
|
+
onStart: void 0
|
1128
|
+
};
|
1129
|
+
callbacks.onFinal = void 0;
|
1130
|
+
const openAIStream = OpenAIStream(functionResponse, {
|
1131
|
+
...filteredCallbacks,
|
1132
|
+
[__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
|
1133
|
+
});
|
1134
|
+
const reader = openAIStream.getReader();
|
1135
|
+
while (true) {
|
1136
|
+
const { done, value } = await reader.read();
|
1137
|
+
if (done) {
|
1138
|
+
break;
|
1139
|
+
}
|
1140
|
+
controller.enqueue(value);
|
1141
|
+
}
|
1142
|
+
}
|
1143
|
+
} finally {
|
1144
|
+
if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
|
1145
|
+
await callbacks.onFinal(aggregatedFinalCompletionResponse);
|
1146
|
+
}
|
1147
|
+
}
|
1148
|
+
}
|
1149
|
+
});
|
1092
1150
|
}
|
1093
1151
|
|
1094
1152
|
// streams/replicate-stream.ts
|
@@ -1113,79 +1171,6 @@ async function ReplicateStream(res, cb, options) {
|
|
1113
1171
|
);
|
1114
1172
|
}
|
1115
1173
|
|
1116
|
-
// streams/assistant-response.ts
|
1117
|
-
function experimental_AssistantResponse({ threadId, messageId }, process2) {
|
1118
|
-
const stream = new ReadableStream({
|
1119
|
-
async start(controller) {
|
1120
|
-
var _a;
|
1121
|
-
const textEncoder = new TextEncoder();
|
1122
|
-
const sendMessage = (message) => {
|
1123
|
-
controller.enqueue(
|
1124
|
-
textEncoder.encode(formatStreamPart("assistant_message", message))
|
1125
|
-
);
|
1126
|
-
};
|
1127
|
-
const sendDataMessage = (message) => {
|
1128
|
-
controller.enqueue(
|
1129
|
-
textEncoder.encode(formatStreamPart("data_message", message))
|
1130
|
-
);
|
1131
|
-
};
|
1132
|
-
const sendError = (errorMessage) => {
|
1133
|
-
controller.enqueue(
|
1134
|
-
textEncoder.encode(formatStreamPart("error", errorMessage))
|
1135
|
-
);
|
1136
|
-
};
|
1137
|
-
controller.enqueue(
|
1138
|
-
textEncoder.encode(
|
1139
|
-
formatStreamPart("assistant_control_data", {
|
1140
|
-
threadId,
|
1141
|
-
messageId
|
1142
|
-
})
|
1143
|
-
)
|
1144
|
-
);
|
1145
|
-
try {
|
1146
|
-
await process2({
|
1147
|
-
threadId,
|
1148
|
-
messageId,
|
1149
|
-
sendMessage,
|
1150
|
-
sendDataMessage
|
1151
|
-
});
|
1152
|
-
} catch (error) {
|
1153
|
-
sendError((_a = error.message) != null ? _a : `${error}`);
|
1154
|
-
} finally {
|
1155
|
-
controller.close();
|
1156
|
-
}
|
1157
|
-
},
|
1158
|
-
pull(controller) {
|
1159
|
-
},
|
1160
|
-
cancel() {
|
1161
|
-
}
|
1162
|
-
});
|
1163
|
-
return new Response(stream, {
|
1164
|
-
status: 200,
|
1165
|
-
headers: {
|
1166
|
-
"Content-Type": "text/plain; charset=utf-8"
|
1167
|
-
}
|
1168
|
-
});
|
1169
|
-
}
|
1170
|
-
|
1171
|
-
// streams/google-generative-ai-stream.ts
|
1172
|
-
async function* streamable4(response) {
|
1173
|
-
var _a, _b, _c;
|
1174
|
-
for await (const chunk of response.stream) {
|
1175
|
-
const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
|
1176
|
-
if (parts === void 0) {
|
1177
|
-
continue;
|
1178
|
-
}
|
1179
|
-
const firstPart = parts[0];
|
1180
|
-
if (typeof firstPart.text === "string") {
|
1181
|
-
yield firstPart.text;
|
1182
|
-
}
|
1183
|
-
}
|
1184
|
-
}
|
1185
|
-
function GoogleGenerativeAIStream(response, cb) {
|
1186
|
-
return readableFromAsyncIterable(streamable4(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
|
1187
|
-
}
|
1188
|
-
|
1189
1174
|
// shared/read-data-stream.ts
|
1190
1175
|
var NEWLINE = "\n".charCodeAt(0);
|
1191
1176
|
function concatChunks(chunks, totalLength) {
|
@@ -1414,6 +1399,43 @@ var experimental_StreamingReactResponse = class {
|
|
1414
1399
|
return next;
|
1415
1400
|
}
|
1416
1401
|
};
|
1402
|
+
|
1403
|
+
// streams/streaming-text-response.ts
|
1404
|
+
var StreamingTextResponse = class extends Response {
|
1405
|
+
constructor(res, init, data) {
|
1406
|
+
let processedStream = res;
|
1407
|
+
if (data) {
|
1408
|
+
processedStream = res.pipeThrough(data.stream);
|
1409
|
+
}
|
1410
|
+
super(processedStream, {
|
1411
|
+
...init,
|
1412
|
+
status: 200,
|
1413
|
+
headers: {
|
1414
|
+
"Content-Type": "text/plain; charset=utf-8",
|
1415
|
+
[COMPLEX_HEADER]: data ? "true" : "false",
|
1416
|
+
...init == null ? void 0 : init.headers
|
1417
|
+
}
|
1418
|
+
});
|
1419
|
+
}
|
1420
|
+
};
|
1421
|
+
function streamToResponse(res, response, init) {
|
1422
|
+
response.writeHead((init == null ? void 0 : init.status) || 200, {
|
1423
|
+
"Content-Type": "text/plain; charset=utf-8",
|
1424
|
+
...init == null ? void 0 : init.headers
|
1425
|
+
});
|
1426
|
+
const reader = res.getReader();
|
1427
|
+
function read() {
|
1428
|
+
reader.read().then(({ done, value }) => {
|
1429
|
+
if (done) {
|
1430
|
+
response.end();
|
1431
|
+
return;
|
1432
|
+
}
|
1433
|
+
response.write(value);
|
1434
|
+
read();
|
1435
|
+
});
|
1436
|
+
}
|
1437
|
+
read();
|
1438
|
+
}
|
1417
1439
|
// Annotate the CommonJS export names for ESM import in node:
|
1418
1440
|
0 && (module.exports = {
|
1419
1441
|
AIStream,
|
@@ -1428,6 +1450,7 @@ var experimental_StreamingReactResponse = class {
|
|
1428
1450
|
HuggingFaceStream,
|
1429
1451
|
InkeepStream,
|
1430
1452
|
LangChainStream,
|
1453
|
+
MistralStream,
|
1431
1454
|
OpenAIStream,
|
1432
1455
|
ReplicateStream,
|
1433
1456
|
StreamingTextResponse,
|