ai 2.2.1 → 2.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +214 -101
- package/dist/index.js +290 -92
- package/dist/index.mjs +283 -92
- package/package.json +1 -1
- package/react/dist/index.d.ts +2 -0
- package/react/dist/index.js +194 -52
- package/react/dist/index.mjs +194 -52
- package/solid/dist/index.js +34 -4
- package/solid/dist/index.mjs +34 -4
- package/svelte/dist/index.js +34 -4
- package/svelte/dist/index.mjs +34 -4
- package/vue/dist/index.js +34 -4
- package/vue/dist/index.mjs +34 -4
package/dist/index.js
CHANGED
@@ -22,15 +22,22 @@ var streams_exports = {};
|
|
22
22
|
__export(streams_exports, {
|
23
23
|
AIStream: () => AIStream,
|
24
24
|
AnthropicStream: () => AnthropicStream,
|
25
|
+
COMPLEX_HEADER: () => COMPLEX_HEADER,
|
25
26
|
CohereStream: () => CohereStream,
|
26
27
|
HuggingFaceStream: () => HuggingFaceStream,
|
27
28
|
LangChainStream: () => LangChainStream,
|
28
29
|
OpenAIStream: () => OpenAIStream,
|
29
30
|
ReplicateStream: () => ReplicateStream,
|
31
|
+
StreamStringPrefixes: () => StreamStringPrefixes,
|
30
32
|
StreamingTextResponse: () => StreamingTextResponse,
|
31
33
|
createCallbacksTransformer: () => createCallbacksTransformer,
|
32
34
|
createChunkDecoder: () => createChunkDecoder,
|
33
35
|
createEventStreamTransformer: () => createEventStreamTransformer,
|
36
|
+
createStreamDataTransformer: () => createStreamDataTransformer,
|
37
|
+
experimental_StreamData: () => experimental_StreamData,
|
38
|
+
getStreamString: () => getStreamString,
|
39
|
+
getStreamStringTypeAndValue: () => getStreamStringTypeAndValue,
|
40
|
+
isStreamStringEqualToType: () => isStreamStringEqualToType,
|
34
41
|
nanoid: () => nanoid,
|
35
42
|
readableFromAsyncIterable: () => readableFromAsyncIterable,
|
36
43
|
streamToResponse: () => streamToResponse,
|
@@ -66,28 +73,36 @@ function createEventStreamTransformer(customParser) {
|
|
66
73
|
}
|
67
74
|
});
|
68
75
|
}
|
69
|
-
function createCallbacksTransformer(
|
76
|
+
function createCallbacksTransformer(cb) {
|
70
77
|
const textEncoder = new TextEncoder();
|
71
78
|
let aggregatedResponse = "";
|
72
|
-
const
|
79
|
+
const callbacks = cb || {};
|
73
80
|
return new TransformStream({
|
74
81
|
async start() {
|
75
|
-
if (onStart)
|
76
|
-
await onStart();
|
82
|
+
if (callbacks.onStart)
|
83
|
+
await callbacks.onStart();
|
77
84
|
},
|
78
85
|
async transform(message, controller) {
|
79
86
|
controller.enqueue(textEncoder.encode(message));
|
80
|
-
if (onToken)
|
81
|
-
await onToken(message);
|
82
|
-
if (onCompletion)
|
87
|
+
if (callbacks.onToken)
|
88
|
+
await callbacks.onToken(message);
|
89
|
+
if (callbacks.onCompletion)
|
83
90
|
aggregatedResponse += message;
|
84
91
|
},
|
85
92
|
async flush() {
|
86
|
-
|
87
|
-
|
93
|
+
const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
|
94
|
+
if (callbacks.onCompletion) {
|
95
|
+
await callbacks.onCompletion(aggregatedResponse);
|
96
|
+
}
|
97
|
+
if (callbacks.onFinal && !isOpenAICallbacks) {
|
98
|
+
await callbacks.onFinal(aggregatedResponse);
|
99
|
+
}
|
88
100
|
}
|
89
101
|
});
|
90
102
|
}
|
103
|
+
function isOfTypeOpenAIStreamCallbacks(callbacks) {
|
104
|
+
return "experimental_onFunctionCall" in callbacks;
|
105
|
+
}
|
91
106
|
function trimStartOfStreamHelper() {
|
92
107
|
let isStreamStart = true;
|
93
108
|
return (text) => {
|
@@ -147,6 +162,140 @@ function readableFromAsyncIterable(iterable) {
|
|
147
162
|
});
|
148
163
|
}
|
149
164
|
|
165
|
+
// shared/utils.ts
|
166
|
+
var import_non_secure = require("nanoid/non-secure");
|
167
|
+
var nanoid = (0, import_non_secure.customAlphabet)(
|
168
|
+
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
169
|
+
7
|
170
|
+
);
|
171
|
+
function createChunkDecoder(complex) {
|
172
|
+
const decoder = new TextDecoder();
|
173
|
+
if (!complex) {
|
174
|
+
return function(chunk) {
|
175
|
+
if (!chunk)
|
176
|
+
return "";
|
177
|
+
return decoder.decode(chunk, { stream: true });
|
178
|
+
};
|
179
|
+
}
|
180
|
+
return function(chunk) {
|
181
|
+
const decoded = decoder.decode(chunk, { stream: true }).split("\n");
|
182
|
+
return decoded.map(getStreamStringTypeAndValue).filter(Boolean);
|
183
|
+
};
|
184
|
+
}
|
185
|
+
var StreamStringPrefixes = {
|
186
|
+
text: 0,
|
187
|
+
function_call: 1,
|
188
|
+
data: 2
|
189
|
+
// user_err: 3?
|
190
|
+
};
|
191
|
+
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
192
|
+
var getStreamString = (type, value) => `${StreamStringPrefixes[type]}:${JSON.stringify(value)}
|
193
|
+
`;
|
194
|
+
var getStreamStringTypeAndValue = (line) => {
|
195
|
+
const firstSeperatorIndex = line.indexOf(":");
|
196
|
+
const prefix = line.slice(0, firstSeperatorIndex);
|
197
|
+
const type = Object.keys(StreamStringPrefixes).find(
|
198
|
+
(key) => StreamStringPrefixes[key] === Number(prefix)
|
199
|
+
);
|
200
|
+
const val = line.slice(firstSeperatorIndex + 1);
|
201
|
+
let parsedVal = val;
|
202
|
+
if (!val) {
|
203
|
+
return { type, value: "" };
|
204
|
+
}
|
205
|
+
try {
|
206
|
+
parsedVal = JSON.parse(val);
|
207
|
+
} catch (e) {
|
208
|
+
console.error("Failed to parse JSON value:", val);
|
209
|
+
}
|
210
|
+
return { type, value: parsedVal };
|
211
|
+
};
|
212
|
+
var COMPLEX_HEADER = "X-Experimental-Stream-Data";
|
213
|
+
|
214
|
+
// streams/stream-data.ts
|
215
|
+
var experimental_StreamData = class {
|
216
|
+
constructor() {
|
217
|
+
this.encoder = new TextEncoder();
|
218
|
+
this.controller = null;
|
219
|
+
// closing the stream is synchronous, but we want to return a promise
|
220
|
+
// in case we're doing async work
|
221
|
+
this.isClosedPromise = null;
|
222
|
+
this.isClosedPromiseResolver = void 0;
|
223
|
+
this.isClosed = false;
|
224
|
+
// array to store appended data
|
225
|
+
this.data = [];
|
226
|
+
this.isClosedPromise = new Promise((resolve) => {
|
227
|
+
this.isClosedPromiseResolver = resolve;
|
228
|
+
});
|
229
|
+
const self = this;
|
230
|
+
this.stream = new TransformStream({
|
231
|
+
start: async (controller) => {
|
232
|
+
self.controller = controller;
|
233
|
+
},
|
234
|
+
transform: async (chunk, controller) => {
|
235
|
+
controller.enqueue(chunk);
|
236
|
+
if (self.data.length > 0) {
|
237
|
+
const encodedData = self.encoder.encode(
|
238
|
+
getStreamString("data", JSON.stringify(self.data))
|
239
|
+
);
|
240
|
+
self.data = [];
|
241
|
+
controller.enqueue(encodedData);
|
242
|
+
}
|
243
|
+
},
|
244
|
+
async flush(controller) {
|
245
|
+
const warningTimeout = process.env.NODE_ENV === "development" ? setTimeout(() => {
|
246
|
+
console.warn(
|
247
|
+
"The data stream is hanging. Did you forget to close it with `data.close()`?"
|
248
|
+
);
|
249
|
+
}, 3e3) : null;
|
250
|
+
await self.isClosedPromise;
|
251
|
+
if (warningTimeout !== null) {
|
252
|
+
clearTimeout(warningTimeout);
|
253
|
+
}
|
254
|
+
if (self.data.length) {
|
255
|
+
const encodedData = self.encoder.encode(
|
256
|
+
getStreamString("data", JSON.stringify(self.data))
|
257
|
+
);
|
258
|
+
controller.enqueue(encodedData);
|
259
|
+
}
|
260
|
+
}
|
261
|
+
});
|
262
|
+
}
|
263
|
+
async close() {
|
264
|
+
var _a;
|
265
|
+
if (this.isClosed) {
|
266
|
+
throw new Error("Data Stream has already been closed.");
|
267
|
+
}
|
268
|
+
if (!this.controller) {
|
269
|
+
throw new Error("Stream controller is not initialized.");
|
270
|
+
}
|
271
|
+
(_a = this.isClosedPromiseResolver) == null ? void 0 : _a.call(this);
|
272
|
+
this.isClosed = true;
|
273
|
+
}
|
274
|
+
append(value) {
|
275
|
+
if (this.isClosed) {
|
276
|
+
throw new Error("Data Stream has already been closed.");
|
277
|
+
}
|
278
|
+
this.data.push(value);
|
279
|
+
}
|
280
|
+
};
|
281
|
+
function createStreamDataTransformer(experimental_streamData) {
|
282
|
+
if (!experimental_streamData) {
|
283
|
+
return new TransformStream({
|
284
|
+
transform: async (chunk, controller) => {
|
285
|
+
controller.enqueue(chunk);
|
286
|
+
}
|
287
|
+
});
|
288
|
+
}
|
289
|
+
const encoder = new TextEncoder();
|
290
|
+
const decoder = new TextDecoder();
|
291
|
+
return new TransformStream({
|
292
|
+
transform: async (chunk, controller) => {
|
293
|
+
const message = decoder.decode(chunk);
|
294
|
+
controller.enqueue(encoder.encode(getStreamString("text", message)));
|
295
|
+
}
|
296
|
+
});
|
297
|
+
}
|
298
|
+
|
150
299
|
// streams/openai-stream.ts
|
151
300
|
function parseOpenAIStream() {
|
152
301
|
const extract = chunkToText();
|
@@ -166,11 +315,11 @@ function chunkToText() {
|
|
166
315
|
const trimStartOfStream = trimStartOfStreamHelper();
|
167
316
|
let isFunctionStreamingIn;
|
168
317
|
return (json) => {
|
169
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j
|
170
|
-
if ((_c = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.function_call) == null ? void 0 : _c.name) {
|
318
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
319
|
+
if (isChatCompletionChunk(json) && ((_c = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.function_call) == null ? void 0 : _c.name)) {
|
171
320
|
isFunctionStreamingIn = true;
|
172
321
|
return `{"function_call": {"name": "${(_e = (_d = json.choices[0]) == null ? void 0 : _d.delta) == null ? void 0 : _e.function_call.name}", "arguments": "`;
|
173
|
-
} else if ((_h = (_g = (_f = json.choices[0]) == null ? void 0 : _f.delta) == null ? void 0 : _g.function_call) == null ? void 0 : _h.arguments) {
|
322
|
+
} else if (isChatCompletionChunk(json) && ((_h = (_g = (_f = json.choices[0]) == null ? void 0 : _f.delta) == null ? void 0 : _g.function_call) == null ? void 0 : _h.arguments)) {
|
174
323
|
const argumentChunk = json.choices[0].delta.function_call.arguments;
|
175
324
|
let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
|
176
325
|
return `${escapedPartialJson}`;
|
@@ -179,38 +328,66 @@ function chunkToText() {
|
|
179
328
|
return '"}}';
|
180
329
|
}
|
181
330
|
const text = trimStartOfStream(
|
182
|
-
(
|
331
|
+
isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
|
183
332
|
);
|
184
333
|
return text;
|
185
334
|
};
|
186
335
|
}
|
187
336
|
var __internal__OpenAIFnMessagesSymbol = Symbol("internal_openai_fn_messages");
|
337
|
+
function isChatCompletionChunk(data) {
|
338
|
+
return "choices" in data && "delta" in data.choices[0];
|
339
|
+
}
|
340
|
+
function isCompletion(data) {
|
341
|
+
return "choices" in data && "text" in data.choices[0];
|
342
|
+
}
|
188
343
|
function OpenAIStream(res, callbacks) {
|
189
344
|
const cb = callbacks;
|
190
345
|
let stream;
|
191
346
|
if (Symbol.asyncIterator in res) {
|
192
347
|
stream = readableFromAsyncIterable(streamable(res)).pipeThrough(
|
193
|
-
createCallbacksTransformer(
|
348
|
+
createCallbacksTransformer(
|
349
|
+
(cb == null ? void 0 : cb.experimental_onFunctionCall) ? {
|
350
|
+
...cb,
|
351
|
+
onFinal: void 0
|
352
|
+
} : {
|
353
|
+
...cb
|
354
|
+
}
|
355
|
+
)
|
194
356
|
);
|
195
357
|
} else {
|
196
|
-
stream = AIStream(
|
358
|
+
stream = AIStream(
|
359
|
+
res,
|
360
|
+
parseOpenAIStream(),
|
361
|
+
(cb == null ? void 0 : cb.experimental_onFunctionCall) ? {
|
362
|
+
...cb,
|
363
|
+
onFinal: void 0
|
364
|
+
} : {
|
365
|
+
...cb
|
366
|
+
}
|
367
|
+
);
|
197
368
|
}
|
198
369
|
if (cb && cb.experimental_onFunctionCall) {
|
199
370
|
const functionCallTransformer = createFunctionCallTransformer(cb);
|
200
371
|
return stream.pipeThrough(functionCallTransformer);
|
201
372
|
} else {
|
202
|
-
return stream
|
373
|
+
return stream.pipeThrough(
|
374
|
+
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
375
|
+
);
|
203
376
|
}
|
204
377
|
}
|
205
378
|
function createFunctionCallTransformer(callbacks) {
|
206
379
|
const textEncoder = new TextEncoder();
|
207
380
|
let isFirstChunk = true;
|
208
381
|
let aggregatedResponse = "";
|
382
|
+
let aggregatedFinalCompletionResponse = "";
|
209
383
|
let isFunctionStreamingIn = false;
|
210
384
|
let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
|
385
|
+
const isComplexMode = callbacks == null ? void 0 : callbacks.experimental_streamData;
|
386
|
+
const decode = createChunkDecoder();
|
211
387
|
return new TransformStream({
|
212
388
|
async transform(chunk, controller) {
|
213
|
-
const message =
|
389
|
+
const message = decode(chunk);
|
390
|
+
aggregatedFinalCompletionResponse += message;
|
214
391
|
const shouldHandleAsFunction = isFirstChunk && message.startsWith('{"function_call":');
|
215
392
|
if (shouldHandleAsFunction) {
|
216
393
|
isFunctionStreamingIn = true;
|
@@ -219,64 +396,80 @@ function createFunctionCallTransformer(callbacks) {
|
|
219
396
|
return;
|
220
397
|
}
|
221
398
|
if (!isFunctionStreamingIn) {
|
222
|
-
controller.enqueue(
|
399
|
+
controller.enqueue(
|
400
|
+
isComplexMode ? textEncoder.encode(getStreamString("text", message)) : chunk
|
401
|
+
);
|
223
402
|
return;
|
224
403
|
} else {
|
225
404
|
aggregatedResponse += message;
|
226
405
|
}
|
227
406
|
},
|
228
407
|
async flush(controller) {
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
408
|
+
try {
|
409
|
+
const isEndOfFunction = !isFirstChunk && callbacks.experimental_onFunctionCall && isFunctionStreamingIn;
|
410
|
+
if (isEndOfFunction && callbacks.experimental_onFunctionCall) {
|
411
|
+
isFunctionStreamingIn = false;
|
412
|
+
const payload = JSON.parse(aggregatedResponse);
|
413
|
+
const argumentsPayload = JSON.parse(payload.function_call.arguments);
|
414
|
+
let newFunctionCallMessages = [
|
415
|
+
...functionCallMessages
|
416
|
+
];
|
417
|
+
const functionResponse = await callbacks.experimental_onFunctionCall(
|
418
|
+
{
|
419
|
+
name: payload.function_call.name,
|
420
|
+
arguments: argumentsPayload
|
421
|
+
},
|
422
|
+
(result) => {
|
423
|
+
newFunctionCallMessages = [
|
424
|
+
...functionCallMessages,
|
425
|
+
{
|
426
|
+
role: "assistant",
|
427
|
+
content: "",
|
428
|
+
function_call: payload.function_call
|
429
|
+
},
|
430
|
+
{
|
431
|
+
role: "function",
|
432
|
+
name: payload.function_call.name,
|
433
|
+
content: JSON.stringify(result)
|
434
|
+
}
|
435
|
+
];
|
436
|
+
return newFunctionCallMessages;
|
437
|
+
}
|
438
|
+
);
|
439
|
+
if (!functionResponse) {
|
440
|
+
controller.enqueue(
|
441
|
+
textEncoder.encode(
|
442
|
+
isComplexMode ? getStreamString("function_call", aggregatedResponse) : aggregatedResponse
|
443
|
+
)
|
444
|
+
);
|
445
|
+
return;
|
446
|
+
} else if (typeof functionResponse === "string") {
|
447
|
+
controller.enqueue(
|
448
|
+
isComplexMode ? textEncoder.encode(getStreamString("text", functionResponse)) : textEncoder.encode(functionResponse)
|
449
|
+
);
|
450
|
+
return;
|
255
451
|
}
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
});
|
273
|
-
const reader = openAIStream.getReader();
|
274
|
-
while (true) {
|
275
|
-
const { done, value } = await reader.read();
|
276
|
-
if (done) {
|
277
|
-
break;
|
452
|
+
const filteredCallbacks = {
|
453
|
+
...callbacks,
|
454
|
+
onStart: void 0
|
455
|
+
};
|
456
|
+
callbacks.onFinal = void 0;
|
457
|
+
const openAIStream = OpenAIStream(functionResponse, {
|
458
|
+
...filteredCallbacks,
|
459
|
+
[__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
|
460
|
+
});
|
461
|
+
const reader = openAIStream.getReader();
|
462
|
+
while (true) {
|
463
|
+
const { done, value } = await reader.read();
|
464
|
+
if (done) {
|
465
|
+
break;
|
466
|
+
}
|
467
|
+
controller.enqueue(value);
|
278
468
|
}
|
279
|
-
|
469
|
+
}
|
470
|
+
} finally {
|
471
|
+
if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
|
472
|
+
await callbacks.onFinal(aggregatedFinalCompletionResponse);
|
280
473
|
}
|
281
474
|
}
|
282
475
|
}
|
@@ -285,12 +478,17 @@ function createFunctionCallTransformer(callbacks) {
|
|
285
478
|
|
286
479
|
// streams/streaming-text-response.ts
|
287
480
|
var StreamingTextResponse = class extends Response {
|
288
|
-
constructor(res, init) {
|
289
|
-
|
481
|
+
constructor(res, init, data) {
|
482
|
+
let processedStream = res;
|
483
|
+
if (data) {
|
484
|
+
processedStream = res.pipeThrough(data.stream);
|
485
|
+
}
|
486
|
+
super(processedStream, {
|
290
487
|
...init,
|
291
488
|
status: 200,
|
292
489
|
headers: {
|
293
490
|
"Content-Type": "text/plain; charset=utf-8",
|
491
|
+
[COMPLEX_HEADER]: data ? "true" : "false",
|
294
492
|
...init == null ? void 0 : init.headers
|
295
493
|
}
|
296
494
|
});
|
@@ -342,7 +540,9 @@ function createParser2(res) {
|
|
342
540
|
});
|
343
541
|
}
|
344
542
|
function HuggingFaceStream(res, callbacks) {
|
345
|
-
return createParser2(res).pipeThrough(createCallbacksTransformer(callbacks))
|
543
|
+
return createParser2(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
544
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
545
|
+
);
|
346
546
|
}
|
347
547
|
|
348
548
|
// streams/cohere-stream.ts
|
@@ -389,7 +589,9 @@ function createParser3(res) {
|
|
389
589
|
});
|
390
590
|
}
|
391
591
|
function CohereStream(reader, callbacks) {
|
392
|
-
return createParser3(reader).pipeThrough(createCallbacksTransformer(callbacks))
|
592
|
+
return createParser3(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
593
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
594
|
+
);
|
393
595
|
}
|
394
596
|
|
395
597
|
// streams/anthropic-stream.ts
|
@@ -421,11 +623,11 @@ async function* streamable2(stream) {
|
|
421
623
|
}
|
422
624
|
function AnthropicStream(res, cb) {
|
423
625
|
if (Symbol.asyncIterator in res) {
|
424
|
-
return readableFromAsyncIterable(streamable2(res)).pipeThrough(
|
425
|
-
createCallbacksTransformer(cb)
|
426
|
-
);
|
626
|
+
return readableFromAsyncIterable(streamable2(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
|
427
627
|
} else {
|
428
|
-
return AIStream(res, parseAnthropicStream(), cb)
|
628
|
+
return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
|
629
|
+
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
630
|
+
);
|
429
631
|
}
|
430
632
|
}
|
431
633
|
|
@@ -450,7 +652,9 @@ function LangChainStream(callbacks) {
|
|
450
652
|
}
|
451
653
|
};
|
452
654
|
return {
|
453
|
-
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks))
|
655
|
+
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
656
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
657
|
+
),
|
454
658
|
handlers: {
|
455
659
|
handleLLMNewToken: async (token) => {
|
456
660
|
await writer.ready;
|
@@ -503,36 +707,30 @@ async function ReplicateStream(res, cb) {
|
|
503
707
|
Accept: "text/event-stream"
|
504
708
|
}
|
505
709
|
});
|
506
|
-
return AIStream(eventStream, void 0, cb)
|
507
|
-
|
508
|
-
|
509
|
-
// shared/utils.ts
|
510
|
-
var import_non_secure = require("nanoid/non-secure");
|
511
|
-
var nanoid = (0, import_non_secure.customAlphabet)(
|
512
|
-
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
513
|
-
7
|
514
|
-
);
|
515
|
-
function createChunkDecoder() {
|
516
|
-
const decoder = new TextDecoder();
|
517
|
-
return function(chunk) {
|
518
|
-
if (!chunk)
|
519
|
-
return "";
|
520
|
-
return decoder.decode(chunk, { stream: true });
|
521
|
-
};
|
710
|
+
return AIStream(eventStream, void 0, cb).pipeThrough(
|
711
|
+
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
712
|
+
);
|
522
713
|
}
|
523
714
|
// Annotate the CommonJS export names for ESM import in node:
|
524
715
|
0 && (module.exports = {
|
525
716
|
AIStream,
|
526
717
|
AnthropicStream,
|
718
|
+
COMPLEX_HEADER,
|
527
719
|
CohereStream,
|
528
720
|
HuggingFaceStream,
|
529
721
|
LangChainStream,
|
530
722
|
OpenAIStream,
|
531
723
|
ReplicateStream,
|
724
|
+
StreamStringPrefixes,
|
532
725
|
StreamingTextResponse,
|
533
726
|
createCallbacksTransformer,
|
534
727
|
createChunkDecoder,
|
535
728
|
createEventStreamTransformer,
|
729
|
+
createStreamDataTransformer,
|
730
|
+
experimental_StreamData,
|
731
|
+
getStreamString,
|
732
|
+
getStreamStringTypeAndValue,
|
733
|
+
isStreamStringEqualToType,
|
536
734
|
nanoid,
|
537
735
|
readableFromAsyncIterable,
|
538
736
|
streamToResponse,
|