ai 2.2.33 → 2.2.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +169 -154
- package/dist/index.js +567 -494
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +568 -496
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -1
- package/react/dist/index.js +41 -22
- package/react/dist/index.js.map +1 -1
- package/react/dist/index.mjs +41 -22
- package/react/dist/index.mjs.map +1 -1
- package/solid/dist/index.js +41 -22
- package/solid/dist/index.js.map +1 -1
- package/solid/dist/index.mjs +41 -22
- package/solid/dist/index.mjs.map +1 -1
- package/svelte/dist/index.js +41 -22
- package/svelte/dist/index.js.map +1 -1
- package/svelte/dist/index.mjs +41 -22
- package/svelte/dist/index.mjs.map +1 -1
- package/vue/dist/index.js +41 -22
- package/vue/dist/index.js.map +1 -1
- package/vue/dist/index.mjs +41 -22
- package/vue/dist/index.mjs.map +1 -1
package/dist/index.mjs
CHANGED
@@ -1,5 +1,122 @@
|
|
1
|
-
//
|
2
|
-
import {
|
1
|
+
// streams/ai-stream.ts
|
2
|
+
import {
|
3
|
+
createParser
|
4
|
+
} from "eventsource-parser";
|
5
|
+
function createEventStreamTransformer(customParser) {
|
6
|
+
const textDecoder = new TextDecoder();
|
7
|
+
let eventSourceParser;
|
8
|
+
return new TransformStream({
|
9
|
+
async start(controller) {
|
10
|
+
eventSourceParser = createParser(
|
11
|
+
(event) => {
|
12
|
+
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
13
|
+
// @see https://replicate.com/docs/streaming
|
14
|
+
event.event === "done") {
|
15
|
+
controller.terminate();
|
16
|
+
return;
|
17
|
+
}
|
18
|
+
if ("data" in event) {
|
19
|
+
const parsedMessage = customParser ? customParser(event.data, {
|
20
|
+
event: event.event
|
21
|
+
}) : event.data;
|
22
|
+
if (parsedMessage)
|
23
|
+
controller.enqueue(parsedMessage);
|
24
|
+
}
|
25
|
+
}
|
26
|
+
);
|
27
|
+
},
|
28
|
+
transform(chunk) {
|
29
|
+
eventSourceParser.feed(textDecoder.decode(chunk));
|
30
|
+
}
|
31
|
+
});
|
32
|
+
}
|
33
|
+
function createCallbacksTransformer(cb) {
|
34
|
+
const textEncoder = new TextEncoder();
|
35
|
+
let aggregatedResponse = "";
|
36
|
+
const callbacks = cb || {};
|
37
|
+
return new TransformStream({
|
38
|
+
async start() {
|
39
|
+
if (callbacks.onStart)
|
40
|
+
await callbacks.onStart();
|
41
|
+
},
|
42
|
+
async transform(message, controller) {
|
43
|
+
controller.enqueue(textEncoder.encode(message));
|
44
|
+
aggregatedResponse += message;
|
45
|
+
if (callbacks.onToken)
|
46
|
+
await callbacks.onToken(message);
|
47
|
+
},
|
48
|
+
async flush() {
|
49
|
+
const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
|
50
|
+
if (callbacks.onCompletion) {
|
51
|
+
await callbacks.onCompletion(aggregatedResponse);
|
52
|
+
}
|
53
|
+
if (callbacks.onFinal && !isOpenAICallbacks) {
|
54
|
+
await callbacks.onFinal(aggregatedResponse);
|
55
|
+
}
|
56
|
+
}
|
57
|
+
});
|
58
|
+
}
|
59
|
+
function isOfTypeOpenAIStreamCallbacks(callbacks) {
|
60
|
+
return "experimental_onFunctionCall" in callbacks;
|
61
|
+
}
|
62
|
+
function trimStartOfStreamHelper() {
|
63
|
+
let isStreamStart = true;
|
64
|
+
return (text) => {
|
65
|
+
if (isStreamStart) {
|
66
|
+
text = text.trimStart();
|
67
|
+
if (text)
|
68
|
+
isStreamStart = false;
|
69
|
+
}
|
70
|
+
return text;
|
71
|
+
};
|
72
|
+
}
|
73
|
+
function AIStream(response, customParser, callbacks) {
|
74
|
+
if (!response.ok) {
|
75
|
+
if (response.body) {
|
76
|
+
const reader = response.body.getReader();
|
77
|
+
return new ReadableStream({
|
78
|
+
async start(controller) {
|
79
|
+
const { done, value } = await reader.read();
|
80
|
+
if (!done) {
|
81
|
+
const errorText = new TextDecoder().decode(value);
|
82
|
+
controller.error(new Error(`Response error: ${errorText}`));
|
83
|
+
}
|
84
|
+
}
|
85
|
+
});
|
86
|
+
} else {
|
87
|
+
return new ReadableStream({
|
88
|
+
start(controller) {
|
89
|
+
controller.error(new Error("Response error: No response body"));
|
90
|
+
}
|
91
|
+
});
|
92
|
+
}
|
93
|
+
}
|
94
|
+
const responseBodyStream = response.body || createEmptyReadableStream();
|
95
|
+
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
96
|
+
}
|
97
|
+
function createEmptyReadableStream() {
|
98
|
+
return new ReadableStream({
|
99
|
+
start(controller) {
|
100
|
+
controller.close();
|
101
|
+
}
|
102
|
+
});
|
103
|
+
}
|
104
|
+
function readableFromAsyncIterable(iterable) {
|
105
|
+
let it = iterable[Symbol.asyncIterator]();
|
106
|
+
return new ReadableStream({
|
107
|
+
async pull(controller) {
|
108
|
+
const { done, value } = await it.next();
|
109
|
+
if (done)
|
110
|
+
controller.close();
|
111
|
+
else
|
112
|
+
controller.enqueue(value);
|
113
|
+
},
|
114
|
+
async cancel(reason) {
|
115
|
+
var _a;
|
116
|
+
await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
|
117
|
+
}
|
118
|
+
});
|
119
|
+
}
|
3
120
|
|
4
121
|
// shared/stream-parts.ts
|
5
122
|
var textStreamPart = {
|
@@ -181,148 +298,6 @@ function formatStreamPart(type, value) {
|
|
181
298
|
`;
|
182
299
|
}
|
183
300
|
|
184
|
-
// shared/utils.ts
|
185
|
-
var nanoid = customAlphabet(
|
186
|
-
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
187
|
-
7
|
188
|
-
);
|
189
|
-
function createChunkDecoder(complex) {
|
190
|
-
const decoder = new TextDecoder();
|
191
|
-
if (!complex) {
|
192
|
-
return function(chunk) {
|
193
|
-
if (!chunk)
|
194
|
-
return "";
|
195
|
-
return decoder.decode(chunk, { stream: true });
|
196
|
-
};
|
197
|
-
}
|
198
|
-
return function(chunk) {
|
199
|
-
const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
|
200
|
-
return decoded.map(parseStreamPart).filter(Boolean);
|
201
|
-
};
|
202
|
-
}
|
203
|
-
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
204
|
-
var COMPLEX_HEADER = "X-Experimental-Stream-Data";
|
205
|
-
|
206
|
-
// streams/ai-stream.ts
|
207
|
-
import {
|
208
|
-
createParser
|
209
|
-
} from "eventsource-parser";
|
210
|
-
function createEventStreamTransformer(customParser) {
|
211
|
-
const textDecoder = new TextDecoder();
|
212
|
-
let eventSourceParser;
|
213
|
-
return new TransformStream({
|
214
|
-
async start(controller) {
|
215
|
-
eventSourceParser = createParser(
|
216
|
-
(event) => {
|
217
|
-
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
218
|
-
// @see https://replicate.com/docs/streaming
|
219
|
-
event.event === "done") {
|
220
|
-
controller.terminate();
|
221
|
-
return;
|
222
|
-
}
|
223
|
-
if ("data" in event) {
|
224
|
-
const parsedMessage = customParser ? customParser(event.data, {
|
225
|
-
event: event.event
|
226
|
-
}) : event.data;
|
227
|
-
if (parsedMessage)
|
228
|
-
controller.enqueue(parsedMessage);
|
229
|
-
}
|
230
|
-
}
|
231
|
-
);
|
232
|
-
},
|
233
|
-
transform(chunk) {
|
234
|
-
eventSourceParser.feed(textDecoder.decode(chunk));
|
235
|
-
}
|
236
|
-
});
|
237
|
-
}
|
238
|
-
function createCallbacksTransformer(cb) {
|
239
|
-
const textEncoder = new TextEncoder();
|
240
|
-
let aggregatedResponse = "";
|
241
|
-
const callbacks = cb || {};
|
242
|
-
return new TransformStream({
|
243
|
-
async start() {
|
244
|
-
if (callbacks.onStart)
|
245
|
-
await callbacks.onStart();
|
246
|
-
},
|
247
|
-
async transform(message, controller) {
|
248
|
-
controller.enqueue(textEncoder.encode(message));
|
249
|
-
aggregatedResponse += message;
|
250
|
-
if (callbacks.onToken)
|
251
|
-
await callbacks.onToken(message);
|
252
|
-
},
|
253
|
-
async flush() {
|
254
|
-
const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
|
255
|
-
if (callbacks.onCompletion) {
|
256
|
-
await callbacks.onCompletion(aggregatedResponse);
|
257
|
-
}
|
258
|
-
if (callbacks.onFinal && !isOpenAICallbacks) {
|
259
|
-
await callbacks.onFinal(aggregatedResponse);
|
260
|
-
}
|
261
|
-
}
|
262
|
-
});
|
263
|
-
}
|
264
|
-
function isOfTypeOpenAIStreamCallbacks(callbacks) {
|
265
|
-
return "experimental_onFunctionCall" in callbacks;
|
266
|
-
}
|
267
|
-
function trimStartOfStreamHelper() {
|
268
|
-
let isStreamStart = true;
|
269
|
-
return (text) => {
|
270
|
-
if (isStreamStart) {
|
271
|
-
text = text.trimStart();
|
272
|
-
if (text)
|
273
|
-
isStreamStart = false;
|
274
|
-
}
|
275
|
-
return text;
|
276
|
-
};
|
277
|
-
}
|
278
|
-
function AIStream(response, customParser, callbacks) {
|
279
|
-
if (!response.ok) {
|
280
|
-
if (response.body) {
|
281
|
-
const reader = response.body.getReader();
|
282
|
-
return new ReadableStream({
|
283
|
-
async start(controller) {
|
284
|
-
const { done, value } = await reader.read();
|
285
|
-
if (!done) {
|
286
|
-
const errorText = new TextDecoder().decode(value);
|
287
|
-
controller.error(new Error(`Response error: ${errorText}`));
|
288
|
-
}
|
289
|
-
}
|
290
|
-
});
|
291
|
-
} else {
|
292
|
-
return new ReadableStream({
|
293
|
-
start(controller) {
|
294
|
-
controller.error(new Error("Response error: No response body"));
|
295
|
-
}
|
296
|
-
});
|
297
|
-
}
|
298
|
-
}
|
299
|
-
const responseBodyStream = response.body || createEmptyReadableStream();
|
300
|
-
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
301
|
-
}
|
302
|
-
function createEmptyReadableStream() {
|
303
|
-
return new ReadableStream({
|
304
|
-
start(controller) {
|
305
|
-
controller.close();
|
306
|
-
}
|
307
|
-
});
|
308
|
-
}
|
309
|
-
function readableFromAsyncIterable(iterable) {
|
310
|
-
let it = iterable[Symbol.asyncIterator]();
|
311
|
-
return new ReadableStream({
|
312
|
-
async pull(controller) {
|
313
|
-
const { done, value } = await it.next();
|
314
|
-
if (done)
|
315
|
-
controller.close();
|
316
|
-
else
|
317
|
-
controller.enqueue(value);
|
318
|
-
},
|
319
|
-
async cancel(reason) {
|
320
|
-
var _a;
|
321
|
-
await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
|
322
|
-
}
|
323
|
-
});
|
324
|
-
}
|
325
|
-
|
326
301
|
// streams/stream-data.ts
|
327
302
|
var experimental_StreamData = class {
|
328
303
|
constructor() {
|
@@ -353,10 +328,11 @@ var experimental_StreamData = class {
|
|
353
328
|
controller.enqueue(encodedData);
|
354
329
|
}
|
355
330
|
if (self.messageAnnotations.length) {
|
356
|
-
const
|
331
|
+
const encodedMessageAnnotations = self.encoder.encode(
|
357
332
|
formatStreamPart("message_annotations", self.messageAnnotations)
|
358
333
|
);
|
359
|
-
|
334
|
+
self.messageAnnotations = [];
|
335
|
+
controller.enqueue(encodedMessageAnnotations);
|
360
336
|
}
|
361
337
|
controller.enqueue(chunk);
|
362
338
|
},
|
@@ -424,314 +400,80 @@ function createStreamDataTransformer(experimental_streamData) {
|
|
424
400
|
const message = decoder.decode(chunk);
|
425
401
|
controller.enqueue(encoder.encode(formatStreamPart("text", message)));
|
426
402
|
}
|
427
|
-
});
|
428
|
-
}
|
429
|
-
|
430
|
-
// streams/
|
431
|
-
function
|
432
|
-
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
|
438
|
-
|
439
|
-
|
440
|
-
|
441
|
-
|
442
|
-
|
443
|
-
const delta = text.slice(previous.length);
|
444
|
-
previous = text;
|
445
|
-
return delta;
|
446
|
-
}
|
447
|
-
return text;
|
448
|
-
};
|
449
|
-
}
|
450
|
-
async function* streamable(stream) {
|
451
|
-
for await (const chunk of stream) {
|
452
|
-
if ("completion" in chunk) {
|
453
|
-
const text = chunk.completion;
|
454
|
-
if (text)
|
455
|
-
yield text;
|
456
|
-
} else if ("delta" in chunk) {
|
457
|
-
const { delta } = chunk;
|
458
|
-
if ("text" in delta) {
|
459
|
-
const text = delta.text;
|
460
|
-
if (text)
|
461
|
-
yield text;
|
462
|
-
}
|
463
|
-
}
|
464
|
-
}
|
465
|
-
}
|
466
|
-
function AnthropicStream(res, cb) {
|
467
|
-
if (Symbol.asyncIterator in res) {
|
468
|
-
return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
|
469
|
-
} else {
|
470
|
-
return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
|
471
|
-
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
472
|
-
);
|
473
|
-
}
|
474
|
-
}
|
475
|
-
|
476
|
-
// streams/assistant-response.ts
|
477
|
-
function experimental_AssistantResponse({ threadId, messageId }, process2) {
|
478
|
-
const stream = new ReadableStream({
|
479
|
-
async start(controller) {
|
480
|
-
var _a;
|
481
|
-
const textEncoder = new TextEncoder();
|
482
|
-
const sendMessage = (message) => {
|
483
|
-
controller.enqueue(
|
484
|
-
textEncoder.encode(formatStreamPart("assistant_message", message))
|
485
|
-
);
|
486
|
-
};
|
487
|
-
const sendDataMessage = (message) => {
|
488
|
-
controller.enqueue(
|
489
|
-
textEncoder.encode(formatStreamPart("data_message", message))
|
490
|
-
);
|
491
|
-
};
|
492
|
-
const sendError = (errorMessage) => {
|
493
|
-
controller.enqueue(
|
494
|
-
textEncoder.encode(formatStreamPart("error", errorMessage))
|
495
|
-
);
|
496
|
-
};
|
497
|
-
controller.enqueue(
|
498
|
-
textEncoder.encode(
|
499
|
-
formatStreamPart("assistant_control_data", {
|
500
|
-
threadId,
|
501
|
-
messageId
|
502
|
-
})
|
503
|
-
)
|
504
|
-
);
|
505
|
-
try {
|
506
|
-
await process2({
|
507
|
-
threadId,
|
508
|
-
messageId,
|
509
|
-
sendMessage,
|
510
|
-
sendDataMessage
|
511
|
-
});
|
512
|
-
} catch (error) {
|
513
|
-
sendError((_a = error.message) != null ? _a : `${error}`);
|
514
|
-
} finally {
|
515
|
-
controller.close();
|
516
|
-
}
|
517
|
-
},
|
518
|
-
pull(controller) {
|
519
|
-
},
|
520
|
-
cancel() {
|
521
|
-
}
|
522
|
-
});
|
523
|
-
return new Response(stream, {
|
524
|
-
status: 200,
|
525
|
-
headers: {
|
526
|
-
"Content-Type": "text/plain; charset=utf-8"
|
527
|
-
}
|
528
|
-
});
|
529
|
-
}
|
530
|
-
|
531
|
-
// streams/aws-bedrock-stream.ts
|
532
|
-
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
533
|
-
var _a, _b;
|
534
|
-
const decoder = new TextDecoder();
|
535
|
-
for await (const chunk of (_a = response.body) != null ? _a : []) {
|
536
|
-
const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
|
537
|
-
if (bytes != null) {
|
538
|
-
const chunkText = decoder.decode(bytes);
|
539
|
-
const chunkJSON = JSON.parse(chunkText);
|
540
|
-
const delta = extractTextDeltaFromChunk(chunkJSON);
|
541
|
-
if (delta != null) {
|
542
|
-
yield delta;
|
543
|
-
}
|
544
|
-
}
|
545
|
-
}
|
546
|
-
}
|
547
|
-
function AWSBedrockAnthropicStream(response, callbacks) {
|
548
|
-
return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
|
549
|
-
}
|
550
|
-
function AWSBedrockCohereStream(response, callbacks) {
|
551
|
-
return AWSBedrockStream(
|
552
|
-
response,
|
553
|
-
callbacks,
|
554
|
-
// As of 2023-11-17, Bedrock does not support streaming for Cohere,
|
555
|
-
// so we take the full generation:
|
556
|
-
(chunk) => {
|
557
|
-
var _a, _b;
|
558
|
-
return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
|
559
|
-
}
|
560
|
-
);
|
561
|
-
}
|
562
|
-
function AWSBedrockLlama2Stream(response, callbacks) {
|
563
|
-
return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
|
564
|
-
}
|
565
|
-
function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
|
566
|
-
return readableFromAsyncIterable(
|
567
|
-
asDeltaIterable(response, extractTextDeltaFromChunk)
|
568
|
-
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
569
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
570
|
-
);
|
571
|
-
}
|
572
|
-
|
573
|
-
// streams/cohere-stream.ts
|
574
|
-
var utf8Decoder = new TextDecoder("utf-8");
|
575
|
-
async function processLines(lines, controller) {
|
576
|
-
for (const line of lines) {
|
577
|
-
const { text, is_finished } = JSON.parse(line);
|
578
|
-
if (!is_finished) {
|
579
|
-
controller.enqueue(text);
|
580
|
-
}
|
581
|
-
}
|
582
|
-
}
|
583
|
-
async function readAndProcessLines(reader, controller) {
|
584
|
-
let segment = "";
|
585
|
-
while (true) {
|
586
|
-
const { value: chunk, done } = await reader.read();
|
587
|
-
if (done) {
|
588
|
-
break;
|
589
|
-
}
|
590
|
-
segment += utf8Decoder.decode(chunk, { stream: true });
|
591
|
-
const linesArray = segment.split(/\r\n|\n|\r/g);
|
592
|
-
segment = linesArray.pop() || "";
|
593
|
-
await processLines(linesArray, controller);
|
594
|
-
}
|
595
|
-
if (segment) {
|
596
|
-
const linesArray = [segment];
|
597
|
-
await processLines(linesArray, controller);
|
598
|
-
}
|
599
|
-
controller.close();
|
600
|
-
}
|
601
|
-
function createParser2(res) {
|
602
|
-
var _a;
|
603
|
-
const reader = (_a = res.body) == null ? void 0 : _a.getReader();
|
604
|
-
return new ReadableStream({
|
605
|
-
async start(controller) {
|
606
|
-
if (!reader) {
|
607
|
-
controller.close();
|
608
|
-
return;
|
609
|
-
}
|
610
|
-
await readAndProcessLines(reader, controller);
|
611
|
-
}
|
612
|
-
});
|
613
|
-
}
|
614
|
-
function CohereStream(reader, callbacks) {
|
615
|
-
return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
616
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
617
|
-
);
|
618
|
-
}
|
619
|
-
|
620
|
-
// streams/google-generative-ai-stream.ts
|
621
|
-
async function* streamable2(response) {
|
622
|
-
var _a, _b, _c;
|
623
|
-
for await (const chunk of response.stream) {
|
624
|
-
const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
|
625
|
-
if (parts === void 0) {
|
626
|
-
continue;
|
627
|
-
}
|
628
|
-
const firstPart = parts[0];
|
629
|
-
if (typeof firstPart.text === "string") {
|
630
|
-
yield firstPart.text;
|
403
|
+
});
|
404
|
+
}
|
405
|
+
|
406
|
+
// streams/aws-bedrock-stream.ts
|
407
|
+
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
408
|
+
var _a, _b;
|
409
|
+
const decoder = new TextDecoder();
|
410
|
+
for await (const chunk of (_a = response.body) != null ? _a : []) {
|
411
|
+
const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
|
412
|
+
if (bytes != null) {
|
413
|
+
const chunkText = decoder.decode(bytes);
|
414
|
+
const chunkJSON = JSON.parse(chunkText);
|
415
|
+
const delta = extractTextDeltaFromChunk(chunkJSON);
|
416
|
+
if (delta != null) {
|
417
|
+
yield delta;
|
418
|
+
}
|
631
419
|
}
|
632
420
|
}
|
633
421
|
}
|
634
|
-
function
|
635
|
-
return
|
422
|
+
function AWSBedrockAnthropicStream(response, callbacks) {
|
423
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
|
636
424
|
}
|
637
|
-
|
638
|
-
|
639
|
-
|
640
|
-
|
641
|
-
|
642
|
-
|
425
|
+
function AWSBedrockCohereStream(response, callbacks) {
|
426
|
+
return AWSBedrockStream(
|
427
|
+
response,
|
428
|
+
callbacks,
|
429
|
+
// As of 2023-11-17, Bedrock does not support streaming for Cohere,
|
430
|
+
// so we take the full generation:
|
431
|
+
(chunk) => {
|
643
432
|
var _a, _b;
|
644
|
-
|
645
|
-
if (done) {
|
646
|
-
controller.close();
|
647
|
-
return;
|
648
|
-
}
|
649
|
-
const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
|
650
|
-
if (!text)
|
651
|
-
return;
|
652
|
-
if (value.generated_text != null && value.generated_text.length > 0) {
|
653
|
-
return;
|
654
|
-
}
|
655
|
-
if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
|
656
|
-
return;
|
657
|
-
}
|
658
|
-
controller.enqueue(text);
|
433
|
+
return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
|
659
434
|
}
|
660
|
-
|
435
|
+
);
|
661
436
|
}
|
662
|
-
function
|
663
|
-
return
|
437
|
+
function AWSBedrockLlama2Stream(response, callbacks) {
|
438
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
|
439
|
+
}
|
440
|
+
function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
|
441
|
+
return readableFromAsyncIterable(
|
442
|
+
asDeltaIterable(response, extractTextDeltaFromChunk)
|
443
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
664
444
|
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
665
445
|
);
|
666
446
|
}
|
667
447
|
|
668
|
-
//
|
669
|
-
|
670
|
-
|
671
|
-
|
672
|
-
|
673
|
-
|
674
|
-
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
|
682
|
-
|
683
|
-
|
684
|
-
|
685
|
-
|
686
|
-
}
|
687
|
-
};
|
688
|
-
return {
|
689
|
-
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
690
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
691
|
-
),
|
692
|
-
writer,
|
693
|
-
handlers: {
|
694
|
-
handleLLMNewToken: async (token) => {
|
695
|
-
await writer.ready;
|
696
|
-
await writer.write(token);
|
697
|
-
},
|
698
|
-
handleLLMStart: async (_llm, _prompts, runId) => {
|
699
|
-
handleStart(runId);
|
700
|
-
},
|
701
|
-
handleLLMEnd: async (_output, runId) => {
|
702
|
-
await handleEnd(runId);
|
703
|
-
},
|
704
|
-
handleLLMError: async (e, runId) => {
|
705
|
-
await handleError(e, runId);
|
706
|
-
},
|
707
|
-
handleChainStart: async (_chain, _inputs, runId) => {
|
708
|
-
handleStart(runId);
|
709
|
-
},
|
710
|
-
handleChainEnd: async (_outputs, runId) => {
|
711
|
-
await handleEnd(runId);
|
712
|
-
},
|
713
|
-
handleChainError: async (e, runId) => {
|
714
|
-
await handleError(e, runId);
|
715
|
-
},
|
716
|
-
handleToolStart: async (_tool, _input, runId) => {
|
717
|
-
handleStart(runId);
|
718
|
-
},
|
719
|
-
handleToolEnd: async (_output, runId) => {
|
720
|
-
await handleEnd(runId);
|
721
|
-
},
|
722
|
-
handleToolError: async (e, runId) => {
|
723
|
-
await handleError(e, runId);
|
724
|
-
}
|
725
|
-
}
|
448
|
+
// shared/utils.ts
|
449
|
+
import { customAlphabet } from "nanoid/non-secure";
|
450
|
+
var nanoid = customAlphabet(
|
451
|
+
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
452
|
+
7
|
453
|
+
);
|
454
|
+
function createChunkDecoder(complex) {
|
455
|
+
const decoder = new TextDecoder();
|
456
|
+
if (!complex) {
|
457
|
+
return function(chunk) {
|
458
|
+
if (!chunk)
|
459
|
+
return "";
|
460
|
+
return decoder.decode(chunk, { stream: true });
|
461
|
+
};
|
462
|
+
}
|
463
|
+
return function(chunk) {
|
464
|
+
const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
|
465
|
+
return decoded.map(parseStreamPart).filter(Boolean);
|
726
466
|
};
|
727
467
|
}
|
468
|
+
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
469
|
+
var COMPLEX_HEADER = "X-Experimental-Stream-Data";
|
728
470
|
|
729
471
|
// streams/openai-stream.ts
|
730
472
|
function parseOpenAIStream() {
|
731
473
|
const extract = chunkToText();
|
732
474
|
return (data) => extract(JSON.parse(data));
|
733
475
|
}
|
734
|
-
async function*
|
476
|
+
async function* streamable(stream) {
|
735
477
|
const extract = chunkToText();
|
736
478
|
for await (let chunk of stream) {
|
737
479
|
if ("promptFilterResults" in chunk) {
|
@@ -820,7 +562,7 @@ function OpenAIStream(res, callbacks) {
|
|
820
562
|
const cb = callbacks;
|
821
563
|
let stream;
|
822
564
|
if (Symbol.asyncIterator in res) {
|
823
|
-
stream = readableFromAsyncIterable(
|
565
|
+
stream = readableFromAsyncIterable(streamable(res)).pipeThrough(
|
824
566
|
createCallbacksTransformer(
|
825
567
|
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
826
568
|
...cb,
|
@@ -1022,7 +764,281 @@ function createFunctionCallTransformer(callbacks) {
|
|
1022
764
|
}
|
1023
765
|
}
|
1024
766
|
}
|
1025
|
-
});
|
767
|
+
});
|
768
|
+
}
|
769
|
+
|
770
|
+
// streams/streaming-text-response.ts
|
771
|
+
var StreamingTextResponse = class extends Response {
|
772
|
+
constructor(res, init, data) {
|
773
|
+
let processedStream = res;
|
774
|
+
if (data) {
|
775
|
+
processedStream = res.pipeThrough(data.stream);
|
776
|
+
}
|
777
|
+
super(processedStream, {
|
778
|
+
...init,
|
779
|
+
status: 200,
|
780
|
+
headers: {
|
781
|
+
"Content-Type": "text/plain; charset=utf-8",
|
782
|
+
[COMPLEX_HEADER]: data ? "true" : "false",
|
783
|
+
...init == null ? void 0 : init.headers
|
784
|
+
}
|
785
|
+
});
|
786
|
+
}
|
787
|
+
};
|
788
|
+
function streamToResponse(res, response, init) {
|
789
|
+
response.writeHead((init == null ? void 0 : init.status) || 200, {
|
790
|
+
"Content-Type": "text/plain; charset=utf-8",
|
791
|
+
...init == null ? void 0 : init.headers
|
792
|
+
});
|
793
|
+
const reader = res.getReader();
|
794
|
+
function read() {
|
795
|
+
reader.read().then(({ done, value }) => {
|
796
|
+
if (done) {
|
797
|
+
response.end();
|
798
|
+
return;
|
799
|
+
}
|
800
|
+
response.write(value);
|
801
|
+
read();
|
802
|
+
});
|
803
|
+
}
|
804
|
+
read();
|
805
|
+
}
|
806
|
+
|
807
|
+
// streams/huggingface-stream.ts
|
808
|
+
function createParser2(res) {
|
809
|
+
const trimStartOfStream = trimStartOfStreamHelper();
|
810
|
+
return new ReadableStream({
|
811
|
+
async pull(controller) {
|
812
|
+
var _a, _b;
|
813
|
+
const { value, done } = await res.next();
|
814
|
+
if (done) {
|
815
|
+
controller.close();
|
816
|
+
return;
|
817
|
+
}
|
818
|
+
const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
|
819
|
+
if (!text)
|
820
|
+
return;
|
821
|
+
if (value.generated_text != null && value.generated_text.length > 0) {
|
822
|
+
return;
|
823
|
+
}
|
824
|
+
if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
|
825
|
+
return;
|
826
|
+
}
|
827
|
+
controller.enqueue(text);
|
828
|
+
}
|
829
|
+
});
|
830
|
+
}
|
831
|
+
function HuggingFaceStream(res, callbacks) {
|
832
|
+
return createParser2(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
833
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
834
|
+
);
|
835
|
+
}
|
836
|
+
|
837
|
+
// streams/cohere-stream.ts
|
838
|
+
var utf8Decoder = new TextDecoder("utf-8");
|
839
|
+
async function processLines(lines, controller) {
|
840
|
+
for (const line of lines) {
|
841
|
+
const { text, is_finished } = JSON.parse(line);
|
842
|
+
if (!is_finished) {
|
843
|
+
controller.enqueue(text);
|
844
|
+
}
|
845
|
+
}
|
846
|
+
}
|
847
|
+
async function readAndProcessLines(reader, controller) {
|
848
|
+
let segment = "";
|
849
|
+
while (true) {
|
850
|
+
const { value: chunk, done } = await reader.read();
|
851
|
+
if (done) {
|
852
|
+
break;
|
853
|
+
}
|
854
|
+
segment += utf8Decoder.decode(chunk, { stream: true });
|
855
|
+
const linesArray = segment.split(/\r\n|\n|\r/g);
|
856
|
+
segment = linesArray.pop() || "";
|
857
|
+
await processLines(linesArray, controller);
|
858
|
+
}
|
859
|
+
if (segment) {
|
860
|
+
const linesArray = [segment];
|
861
|
+
await processLines(linesArray, controller);
|
862
|
+
}
|
863
|
+
controller.close();
|
864
|
+
}
|
865
|
+
function createParser3(res) {
|
866
|
+
var _a;
|
867
|
+
const reader = (_a = res.body) == null ? void 0 : _a.getReader();
|
868
|
+
return new ReadableStream({
|
869
|
+
async start(controller) {
|
870
|
+
if (!reader) {
|
871
|
+
controller.close();
|
872
|
+
return;
|
873
|
+
}
|
874
|
+
await readAndProcessLines(reader, controller);
|
875
|
+
}
|
876
|
+
});
|
877
|
+
}
|
878
|
+
async function* streamable2(stream) {
|
879
|
+
for await (const chunk of stream) {
|
880
|
+
if (chunk.eventType === "text-generation") {
|
881
|
+
const text = chunk.text;
|
882
|
+
if (text)
|
883
|
+
yield text;
|
884
|
+
}
|
885
|
+
}
|
886
|
+
}
|
887
|
+
function CohereStream(reader, callbacks) {
|
888
|
+
if (Symbol.asyncIterator in reader) {
|
889
|
+
return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
890
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
891
|
+
);
|
892
|
+
} else {
|
893
|
+
return createParser3(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
894
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
895
|
+
);
|
896
|
+
}
|
897
|
+
}
|
898
|
+
|
899
|
+
// streams/anthropic-stream.ts
|
900
|
+
function parseAnthropicStream() {
|
901
|
+
let previous = "";
|
902
|
+
return (data) => {
|
903
|
+
const json = JSON.parse(data);
|
904
|
+
if ("error" in json) {
|
905
|
+
throw new Error(`${json.error.type}: ${json.error.message}`);
|
906
|
+
}
|
907
|
+
if (!("completion" in json)) {
|
908
|
+
return;
|
909
|
+
}
|
910
|
+
const text = json.completion;
|
911
|
+
if (!previous || text.length > previous.length && text.startsWith(previous)) {
|
912
|
+
const delta = text.slice(previous.length);
|
913
|
+
previous = text;
|
914
|
+
return delta;
|
915
|
+
}
|
916
|
+
return text;
|
917
|
+
};
|
918
|
+
}
|
919
|
+
async function* streamable3(stream) {
|
920
|
+
for await (const chunk of stream) {
|
921
|
+
if ("completion" in chunk) {
|
922
|
+
const text = chunk.completion;
|
923
|
+
if (text)
|
924
|
+
yield text;
|
925
|
+
} else if ("delta" in chunk) {
|
926
|
+
const { delta } = chunk;
|
927
|
+
if ("text" in delta) {
|
928
|
+
const text = delta.text;
|
929
|
+
if (text)
|
930
|
+
yield text;
|
931
|
+
}
|
932
|
+
}
|
933
|
+
}
|
934
|
+
}
|
935
|
+
function AnthropicStream(res, cb) {
|
936
|
+
if (Symbol.asyncIterator in res) {
|
937
|
+
return readableFromAsyncIterable(streamable3(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
|
938
|
+
} else {
|
939
|
+
return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
|
940
|
+
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
941
|
+
);
|
942
|
+
}
|
943
|
+
}
|
944
|
+
|
945
|
+
// streams/inkeep-stream.ts
|
946
|
+
function InkeepStream(res, callbacks) {
|
947
|
+
if (!res.body) {
|
948
|
+
throw new Error("Response body is null");
|
949
|
+
}
|
950
|
+
let chat_session_id = "";
|
951
|
+
let records_cited;
|
952
|
+
const inkeepEventParser = (data, options) => {
|
953
|
+
var _a, _b;
|
954
|
+
const { event } = options;
|
955
|
+
if (event === "records_cited") {
|
956
|
+
records_cited = JSON.parse(data);
|
957
|
+
(_a = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a.call(callbacks, records_cited);
|
958
|
+
}
|
959
|
+
if (event === "message_chunk") {
|
960
|
+
const inkeepMessageChunk = JSON.parse(data);
|
961
|
+
chat_session_id = (_b = inkeepMessageChunk.chat_session_id) != null ? _b : chat_session_id;
|
962
|
+
return inkeepMessageChunk.content_chunk;
|
963
|
+
}
|
964
|
+
return;
|
965
|
+
};
|
966
|
+
let { onRecordsCited, ...passThroughCallbacks } = callbacks || {};
|
967
|
+
passThroughCallbacks = {
|
968
|
+
...passThroughCallbacks,
|
969
|
+
onFinal: (completion) => {
|
970
|
+
var _a;
|
971
|
+
const inkeepOnFinalMetadata = {
|
972
|
+
chat_session_id,
|
973
|
+
records_cited
|
974
|
+
};
|
975
|
+
(_a = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a.call(callbacks, completion, inkeepOnFinalMetadata);
|
976
|
+
}
|
977
|
+
};
|
978
|
+
return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
|
979
|
+
createStreamDataTransformer(passThroughCallbacks == null ? void 0 : passThroughCallbacks.experimental_streamData)
|
980
|
+
);
|
981
|
+
}
|
982
|
+
|
983
|
+
// streams/langchain-stream.ts
|
984
|
+
function LangChainStream(callbacks) {
|
985
|
+
const stream = new TransformStream();
|
986
|
+
const writer = stream.writable.getWriter();
|
987
|
+
const runs = /* @__PURE__ */ new Set();
|
988
|
+
const handleError = async (e, runId) => {
|
989
|
+
runs.delete(runId);
|
990
|
+
await writer.ready;
|
991
|
+
await writer.abort(e);
|
992
|
+
};
|
993
|
+
const handleStart = async (runId) => {
|
994
|
+
runs.add(runId);
|
995
|
+
};
|
996
|
+
const handleEnd = async (runId) => {
|
997
|
+
runs.delete(runId);
|
998
|
+
if (runs.size === 0) {
|
999
|
+
await writer.ready;
|
1000
|
+
await writer.close();
|
1001
|
+
}
|
1002
|
+
};
|
1003
|
+
return {
|
1004
|
+
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
1005
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
1006
|
+
),
|
1007
|
+
writer,
|
1008
|
+
handlers: {
|
1009
|
+
handleLLMNewToken: async (token) => {
|
1010
|
+
await writer.ready;
|
1011
|
+
await writer.write(token);
|
1012
|
+
},
|
1013
|
+
handleLLMStart: async (_llm, _prompts, runId) => {
|
1014
|
+
handleStart(runId);
|
1015
|
+
},
|
1016
|
+
handleLLMEnd: async (_output, runId) => {
|
1017
|
+
await handleEnd(runId);
|
1018
|
+
},
|
1019
|
+
handleLLMError: async (e, runId) => {
|
1020
|
+
await handleError(e, runId);
|
1021
|
+
},
|
1022
|
+
handleChainStart: async (_chain, _inputs, runId) => {
|
1023
|
+
handleStart(runId);
|
1024
|
+
},
|
1025
|
+
handleChainEnd: async (_outputs, runId) => {
|
1026
|
+
await handleEnd(runId);
|
1027
|
+
},
|
1028
|
+
handleChainError: async (e, runId) => {
|
1029
|
+
await handleError(e, runId);
|
1030
|
+
},
|
1031
|
+
handleToolStart: async (_tool, _input, runId) => {
|
1032
|
+
handleStart(runId);
|
1033
|
+
},
|
1034
|
+
handleToolEnd: async (_output, runId) => {
|
1035
|
+
await handleEnd(runId);
|
1036
|
+
},
|
1037
|
+
handleToolError: async (e, runId) => {
|
1038
|
+
await handleError(e, runId);
|
1039
|
+
}
|
1040
|
+
}
|
1041
|
+
};
|
1026
1042
|
}
|
1027
1043
|
|
1028
1044
|
// streams/replicate-stream.ts
|
@@ -1047,6 +1063,79 @@ async function ReplicateStream(res, cb, options) {
|
|
1047
1063
|
);
|
1048
1064
|
}
|
1049
1065
|
|
1066
|
+
// streams/assistant-response.ts
|
1067
|
+
function experimental_AssistantResponse({ threadId, messageId }, process2) {
|
1068
|
+
const stream = new ReadableStream({
|
1069
|
+
async start(controller) {
|
1070
|
+
var _a;
|
1071
|
+
const textEncoder = new TextEncoder();
|
1072
|
+
const sendMessage = (message) => {
|
1073
|
+
controller.enqueue(
|
1074
|
+
textEncoder.encode(formatStreamPart("assistant_message", message))
|
1075
|
+
);
|
1076
|
+
};
|
1077
|
+
const sendDataMessage = (message) => {
|
1078
|
+
controller.enqueue(
|
1079
|
+
textEncoder.encode(formatStreamPart("data_message", message))
|
1080
|
+
);
|
1081
|
+
};
|
1082
|
+
const sendError = (errorMessage) => {
|
1083
|
+
controller.enqueue(
|
1084
|
+
textEncoder.encode(formatStreamPart("error", errorMessage))
|
1085
|
+
);
|
1086
|
+
};
|
1087
|
+
controller.enqueue(
|
1088
|
+
textEncoder.encode(
|
1089
|
+
formatStreamPart("assistant_control_data", {
|
1090
|
+
threadId,
|
1091
|
+
messageId
|
1092
|
+
})
|
1093
|
+
)
|
1094
|
+
);
|
1095
|
+
try {
|
1096
|
+
await process2({
|
1097
|
+
threadId,
|
1098
|
+
messageId,
|
1099
|
+
sendMessage,
|
1100
|
+
sendDataMessage
|
1101
|
+
});
|
1102
|
+
} catch (error) {
|
1103
|
+
sendError((_a = error.message) != null ? _a : `${error}`);
|
1104
|
+
} finally {
|
1105
|
+
controller.close();
|
1106
|
+
}
|
1107
|
+
},
|
1108
|
+
pull(controller) {
|
1109
|
+
},
|
1110
|
+
cancel() {
|
1111
|
+
}
|
1112
|
+
});
|
1113
|
+
return new Response(stream, {
|
1114
|
+
status: 200,
|
1115
|
+
headers: {
|
1116
|
+
"Content-Type": "text/plain; charset=utf-8"
|
1117
|
+
}
|
1118
|
+
});
|
1119
|
+
}
|
1120
|
+
|
1121
|
+
// streams/google-generative-ai-stream.ts
|
1122
|
+
async function* streamable4(response) {
|
1123
|
+
var _a, _b, _c;
|
1124
|
+
for await (const chunk of response.stream) {
|
1125
|
+
const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
|
1126
|
+
if (parts === void 0) {
|
1127
|
+
continue;
|
1128
|
+
}
|
1129
|
+
const firstPart = parts[0];
|
1130
|
+
if (typeof firstPart.text === "string") {
|
1131
|
+
yield firstPart.text;
|
1132
|
+
}
|
1133
|
+
}
|
1134
|
+
}
|
1135
|
+
function GoogleGenerativeAIStream(response, cb) {
|
1136
|
+
return readableFromAsyncIterable(streamable4(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
|
1137
|
+
}
|
1138
|
+
|
1050
1139
|
// shared/read-data-stream.ts
|
1051
1140
|
var NEWLINE = "\n".charCodeAt(0);
|
1052
1141
|
function concatChunks(chunks, totalLength) {
|
@@ -1091,6 +1180,11 @@ async function* readDataStream(reader, {
|
|
1091
1180
|
}
|
1092
1181
|
|
1093
1182
|
// shared/parse-complex-response.ts
|
1183
|
+
function assignAnnotationsToMessage(message, annotations) {
|
1184
|
+
if (!message || !annotations || !annotations.length)
|
1185
|
+
return message;
|
1186
|
+
return { ...message, annotations: [...annotations] };
|
1187
|
+
}
|
1094
1188
|
async function parseComplexResponse({
|
1095
1189
|
reader,
|
1096
1190
|
abortControllerRef,
|
@@ -1103,6 +1197,7 @@ async function parseComplexResponse({
|
|
1103
1197
|
const prefixMap = {
|
1104
1198
|
data: []
|
1105
1199
|
};
|
1200
|
+
let message_annotations = void 0;
|
1106
1201
|
for await (const { type, value } of readDataStream(reader, {
|
1107
1202
|
isAborted: () => (abortControllerRef == null ? void 0 : abortControllerRef.current) === null
|
1108
1203
|
})) {
|
@@ -1121,22 +1216,6 @@ async function parseComplexResponse({
|
|
1121
1216
|
};
|
1122
1217
|
}
|
1123
1218
|
}
|
1124
|
-
if (type == "message_annotations") {
|
1125
|
-
if (prefixMap["text"]) {
|
1126
|
-
prefixMap["text"] = {
|
1127
|
-
...prefixMap["text"],
|
1128
|
-
annotations: [...prefixMap["text"].annotations || [], ...value]
|
1129
|
-
};
|
1130
|
-
} else {
|
1131
|
-
prefixMap["text"] = {
|
1132
|
-
id: generateId(),
|
1133
|
-
role: "assistant",
|
1134
|
-
content: "",
|
1135
|
-
annotations: [...value],
|
1136
|
-
createdAt
|
1137
|
-
};
|
1138
|
-
}
|
1139
|
-
}
|
1140
1219
|
let functionCallMessage = null;
|
1141
1220
|
if (type === "function_call") {
|
1142
1221
|
prefixMap["function_call"] = {
|
@@ -1163,12 +1242,41 @@ async function parseComplexResponse({
|
|
1163
1242
|
if (type === "data") {
|
1164
1243
|
prefixMap["data"].push(...value);
|
1165
1244
|
}
|
1166
|
-
|
1167
|
-
|
1168
|
-
|
1169
|
-
|
1170
|
-
|
1171
|
-
|
1245
|
+
let responseMessage = prefixMap["text"];
|
1246
|
+
if (type === "message_annotations") {
|
1247
|
+
if (!message_annotations) {
|
1248
|
+
message_annotations = [...value];
|
1249
|
+
} else {
|
1250
|
+
message_annotations.push(...value);
|
1251
|
+
}
|
1252
|
+
functionCallMessage = assignAnnotationsToMessage(
|
1253
|
+
prefixMap["function_call"],
|
1254
|
+
message_annotations
|
1255
|
+
);
|
1256
|
+
toolCallMessage = assignAnnotationsToMessage(
|
1257
|
+
prefixMap["tool_calls"],
|
1258
|
+
message_annotations
|
1259
|
+
);
|
1260
|
+
responseMessage = assignAnnotationsToMessage(
|
1261
|
+
prefixMap["text"],
|
1262
|
+
message_annotations
|
1263
|
+
);
|
1264
|
+
}
|
1265
|
+
if (message_annotations == null ? void 0 : message_annotations.length) {
|
1266
|
+
const messagePrefixKeys = [
|
1267
|
+
"text",
|
1268
|
+
"function_call",
|
1269
|
+
"tool_calls"
|
1270
|
+
];
|
1271
|
+
messagePrefixKeys.forEach((key) => {
|
1272
|
+
if (prefixMap[key]) {
|
1273
|
+
prefixMap[key].annotations = [...message_annotations];
|
1274
|
+
}
|
1275
|
+
});
|
1276
|
+
}
|
1277
|
+
const merged = [functionCallMessage, toolCallMessage, responseMessage].filter(Boolean).map((message) => ({
|
1278
|
+
...assignAnnotationsToMessage(message, message_annotations)
|
1279
|
+
}));
|
1172
1280
|
update(merged, [...prefixMap["data"]]);
|
1173
1281
|
}
|
1174
1282
|
onFinish == null ? void 0 : onFinish(prefixMap);
|
@@ -1256,43 +1364,6 @@ var experimental_StreamingReactResponse = class {
|
|
1256
1364
|
return next;
|
1257
1365
|
}
|
1258
1366
|
};
|
1259
|
-
|
1260
|
-
// streams/streaming-text-response.ts
|
1261
|
-
var StreamingTextResponse = class extends Response {
|
1262
|
-
constructor(res, init, data) {
|
1263
|
-
let processedStream = res;
|
1264
|
-
if (data) {
|
1265
|
-
processedStream = res.pipeThrough(data.stream);
|
1266
|
-
}
|
1267
|
-
super(processedStream, {
|
1268
|
-
...init,
|
1269
|
-
status: 200,
|
1270
|
-
headers: {
|
1271
|
-
"Content-Type": "text/plain; charset=utf-8",
|
1272
|
-
[COMPLEX_HEADER]: data ? "true" : "false",
|
1273
|
-
...init == null ? void 0 : init.headers
|
1274
|
-
}
|
1275
|
-
});
|
1276
|
-
}
|
1277
|
-
};
|
1278
|
-
function streamToResponse(res, response, init) {
|
1279
|
-
response.writeHead((init == null ? void 0 : init.status) || 200, {
|
1280
|
-
"Content-Type": "text/plain; charset=utf-8",
|
1281
|
-
...init == null ? void 0 : init.headers
|
1282
|
-
});
|
1283
|
-
const reader = res.getReader();
|
1284
|
-
function read() {
|
1285
|
-
reader.read().then(({ done, value }) => {
|
1286
|
-
if (done) {
|
1287
|
-
response.end();
|
1288
|
-
return;
|
1289
|
-
}
|
1290
|
-
response.write(value);
|
1291
|
-
read();
|
1292
|
-
});
|
1293
|
-
}
|
1294
|
-
read();
|
1295
|
-
}
|
1296
1367
|
export {
|
1297
1368
|
AIStream,
|
1298
1369
|
AWSBedrockAnthropicStream,
|
@@ -1304,6 +1375,7 @@ export {
|
|
1304
1375
|
CohereStream,
|
1305
1376
|
GoogleGenerativeAIStream,
|
1306
1377
|
HuggingFaceStream,
|
1378
|
+
InkeepStream,
|
1307
1379
|
LangChainStream,
|
1308
1380
|
OpenAIStream,
|
1309
1381
|
ReplicateStream,
|