ai 2.2.29 → 2.2.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -28,6 +28,7 @@ __export(streams_exports, {
28
28
  AnthropicStream: () => AnthropicStream,
29
29
  COMPLEX_HEADER: () => COMPLEX_HEADER,
30
30
  CohereStream: () => CohereStream,
31
+ GoogleGenerativeAIStream: () => GoogleGenerativeAIStream,
31
32
  HuggingFaceStream: () => HuggingFaceStream,
32
33
  LangChainStream: () => LangChainStream,
33
34
  OpenAIStream: () => OpenAIStream,
@@ -48,121 +49,8 @@ __export(streams_exports, {
48
49
  });
49
50
  module.exports = __toCommonJS(streams_exports);
50
51
 
51
- // streams/ai-stream.ts
52
- var import_eventsource_parser = require("eventsource-parser");
53
- function createEventStreamTransformer(customParser) {
54
- const textDecoder = new TextDecoder();
55
- let eventSourceParser;
56
- return new TransformStream({
57
- async start(controller) {
58
- eventSourceParser = (0, import_eventsource_parser.createParser)(
59
- (event) => {
60
- if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
61
- // @see https://replicate.com/docs/streaming
62
- event.event === "done") {
63
- controller.terminate();
64
- return;
65
- }
66
- if ("data" in event) {
67
- const parsedMessage = customParser ? customParser(event.data) : event.data;
68
- if (parsedMessage)
69
- controller.enqueue(parsedMessage);
70
- }
71
- }
72
- );
73
- },
74
- transform(chunk) {
75
- eventSourceParser.feed(textDecoder.decode(chunk));
76
- }
77
- });
78
- }
79
- function createCallbacksTransformer(cb) {
80
- const textEncoder = new TextEncoder();
81
- let aggregatedResponse = "";
82
- const callbacks = cb || {};
83
- return new TransformStream({
84
- async start() {
85
- if (callbacks.onStart)
86
- await callbacks.onStart();
87
- },
88
- async transform(message, controller) {
89
- controller.enqueue(textEncoder.encode(message));
90
- aggregatedResponse += message;
91
- if (callbacks.onToken)
92
- await callbacks.onToken(message);
93
- },
94
- async flush() {
95
- const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
96
- if (callbacks.onCompletion) {
97
- await callbacks.onCompletion(aggregatedResponse);
98
- }
99
- if (callbacks.onFinal && !isOpenAICallbacks) {
100
- await callbacks.onFinal(aggregatedResponse);
101
- }
102
- }
103
- });
104
- }
105
- function isOfTypeOpenAIStreamCallbacks(callbacks) {
106
- return "experimental_onFunctionCall" in callbacks;
107
- }
108
- function trimStartOfStreamHelper() {
109
- let isStreamStart = true;
110
- return (text) => {
111
- if (isStreamStart) {
112
- text = text.trimStart();
113
- if (text)
114
- isStreamStart = false;
115
- }
116
- return text;
117
- };
118
- }
119
- function AIStream(response, customParser, callbacks) {
120
- if (!response.ok) {
121
- if (response.body) {
122
- const reader = response.body.getReader();
123
- return new ReadableStream({
124
- async start(controller) {
125
- const { done, value } = await reader.read();
126
- if (!done) {
127
- const errorText = new TextDecoder().decode(value);
128
- controller.error(new Error(`Response error: ${errorText}`));
129
- }
130
- }
131
- });
132
- } else {
133
- return new ReadableStream({
134
- start(controller) {
135
- controller.error(new Error("Response error: No response body"));
136
- }
137
- });
138
- }
139
- }
140
- const responseBodyStream = response.body || createEmptyReadableStream();
141
- return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
142
- }
143
- function createEmptyReadableStream() {
144
- return new ReadableStream({
145
- start(controller) {
146
- controller.close();
147
- }
148
- });
149
- }
150
- function readableFromAsyncIterable(iterable) {
151
- let it = iterable[Symbol.asyncIterator]();
152
- return new ReadableStream({
153
- async pull(controller) {
154
- const { done, value } = await it.next();
155
- if (done)
156
- controller.close();
157
- else
158
- controller.enqueue(value);
159
- },
160
- async cancel(reason) {
161
- var _a;
162
- await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
163
- }
164
- });
165
- }
52
+ // shared/utils.ts
53
+ var import_non_secure = require("nanoid/non-secure");
166
54
 
167
55
  // shared/stream-parts.ts
168
56
  var textStreamPart = {
@@ -260,6 +148,23 @@ var dataMessageStreamPart = {
260
148
  };
261
149
  }
262
150
  };
151
+ var toolCallStreamPart = {
152
+ code: "7",
153
+ name: "tool_calls",
154
+ parse: (value) => {
155
+ if (value == null || typeof value !== "object" || !("tool_calls" in value) || typeof value.tool_calls !== "object" || value.tool_calls == null || !Array.isArray(value.tool_calls) || value.tool_calls.some((tc) => {
156
+ tc == null || typeof tc !== "object" || !("id" in tc) || typeof tc.id !== "string" || !("type" in tc) || typeof tc.type !== "string" || !("function" in tc) || tc.function == null || typeof tc.function !== "object" || !("arguments" in tc.function) || typeof tc.function.name !== "string" || typeof tc.function.arguments !== "string";
157
+ })) {
158
+ throw new Error(
159
+ '"tool_calls" parts expect an object with a ToolCallPayload.'
160
+ );
161
+ }
162
+ return {
163
+ type: "tool_calls",
164
+ value
165
+ };
166
+ }
167
+ };
263
168
  var streamParts = [
264
169
  textStreamPart,
265
170
  functionCallStreamPart,
@@ -267,7 +172,8 @@ var streamParts = [
267
172
  errorStreamPart,
268
173
  assistantMessageStreamPart,
269
174
  assistantControlDataStreamPart,
270
- dataMessageStreamPart
175
+ dataMessageStreamPart,
176
+ toolCallStreamPart
271
177
  ];
272
178
  var streamPartsByCode = {
273
179
  [textStreamPart.code]: textStreamPart,
@@ -276,7 +182,8 @@ var streamPartsByCode = {
276
182
  [errorStreamPart.code]: errorStreamPart,
277
183
  [assistantMessageStreamPart.code]: assistantMessageStreamPart,
278
184
  [assistantControlDataStreamPart.code]: assistantControlDataStreamPart,
279
- [dataMessageStreamPart.code]: dataMessageStreamPart
185
+ [dataMessageStreamPart.code]: dataMessageStreamPart,
186
+ [toolCallStreamPart.code]: toolCallStreamPart
280
187
  };
281
188
  var StreamStringPrefixes = {
282
189
  [textStreamPart.name]: textStreamPart.code,
@@ -285,7 +192,8 @@ var StreamStringPrefixes = {
285
192
  [errorStreamPart.name]: errorStreamPart.code,
286
193
  [assistantMessageStreamPart.name]: assistantMessageStreamPart.code,
287
194
  [assistantControlDataStreamPart.name]: assistantControlDataStreamPart.code,
288
- [dataMessageStreamPart.name]: dataMessageStreamPart.code
195
+ [dataMessageStreamPart.name]: dataMessageStreamPart.code,
196
+ [toolCallStreamPart.name]: toolCallStreamPart.code
289
197
  };
290
198
  var validCodes = streamParts.map((part) => part.code);
291
199
  var parseStreamPart = (line) => {
@@ -311,135 +219,7 @@ function formatStreamPart(type, value) {
311
219
  `;
312
220
  }
313
221
 
314
- // streams/stream-data.ts
315
- var experimental_StreamData = class {
316
- constructor() {
317
- this.encoder = new TextEncoder();
318
- this.controller = null;
319
- // closing the stream is synchronous, but we want to return a promise
320
- // in case we're doing async work
321
- this.isClosedPromise = null;
322
- this.isClosedPromiseResolver = void 0;
323
- this.isClosed = false;
324
- // array to store appended data
325
- this.data = [];
326
- this.isClosedPromise = new Promise((resolve) => {
327
- this.isClosedPromiseResolver = resolve;
328
- });
329
- const self = this;
330
- this.stream = new TransformStream({
331
- start: async (controller) => {
332
- self.controller = controller;
333
- },
334
- transform: async (chunk, controller) => {
335
- if (self.data.length > 0) {
336
- const encodedData = self.encoder.encode(
337
- formatStreamPart("data", self.data)
338
- );
339
- self.data = [];
340
- controller.enqueue(encodedData);
341
- }
342
- controller.enqueue(chunk);
343
- },
344
- async flush(controller) {
345
- const warningTimeout = process.env.NODE_ENV === "development" ? setTimeout(() => {
346
- console.warn(
347
- "The data stream is hanging. Did you forget to close it with `data.close()`?"
348
- );
349
- }, 3e3) : null;
350
- await self.isClosedPromise;
351
- if (warningTimeout !== null) {
352
- clearTimeout(warningTimeout);
353
- }
354
- if (self.data.length) {
355
- const encodedData = self.encoder.encode(
356
- formatStreamPart("data", self.data)
357
- );
358
- controller.enqueue(encodedData);
359
- }
360
- }
361
- });
362
- }
363
- async close() {
364
- var _a;
365
- if (this.isClosed) {
366
- throw new Error("Data Stream has already been closed.");
367
- }
368
- if (!this.controller) {
369
- throw new Error("Stream controller is not initialized.");
370
- }
371
- (_a = this.isClosedPromiseResolver) == null ? void 0 : _a.call(this);
372
- this.isClosed = true;
373
- }
374
- append(value) {
375
- if (this.isClosed) {
376
- throw new Error("Data Stream has already been closed.");
377
- }
378
- this.data.push(value);
379
- }
380
- };
381
- function createStreamDataTransformer(experimental_streamData) {
382
- if (!experimental_streamData) {
383
- return new TransformStream({
384
- transform: async (chunk, controller) => {
385
- controller.enqueue(chunk);
386
- }
387
- });
388
- }
389
- const encoder = new TextEncoder();
390
- const decoder = new TextDecoder();
391
- return new TransformStream({
392
- transform: async (chunk, controller) => {
393
- const message = decoder.decode(chunk);
394
- controller.enqueue(encoder.encode(formatStreamPart("text", message)));
395
- }
396
- });
397
- }
398
-
399
- // streams/aws-bedrock-stream.ts
400
- async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
401
- var _a, _b;
402
- const decoder = new TextDecoder();
403
- for await (const chunk of (_a = response.body) != null ? _a : []) {
404
- const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
405
- if (bytes != null) {
406
- const chunkText = decoder.decode(bytes);
407
- const chunkJSON = JSON.parse(chunkText);
408
- const delta = extractTextDeltaFromChunk(chunkJSON);
409
- if (delta != null) {
410
- yield delta;
411
- }
412
- }
413
- }
414
- }
415
- function AWSBedrockAnthropicStream(response, callbacks) {
416
- return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
417
- }
418
- function AWSBedrockCohereStream(response, callbacks) {
419
- return AWSBedrockStream(
420
- response,
421
- callbacks,
422
- // As of 2023-11-17, Bedrock does not support streaming for Cohere,
423
- // so we take the full generation:
424
- (chunk) => {
425
- var _a, _b;
426
- return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
427
- }
428
- );
429
- }
430
- function AWSBedrockLlama2Stream(response, callbacks) {
431
- return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
432
- }
433
- function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
434
- return readableFromAsyncIterable(
435
- asDeltaIterable(response, extractTextDeltaFromChunk)
436
- ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
437
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
438
- );
439
- }
440
-
441
222
  // shared/utils.ts
442
- var import_non_secure = require("nanoid/non-secure");
443
223
  var nanoid = (0, import_non_secure.customAlphabet)(
444
224
  "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
445
225
  7
@@ -461,255 +241,346 @@ function createChunkDecoder(complex) {
461
241
  var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
462
242
  var COMPLEX_HEADER = "X-Experimental-Stream-Data";
463
243
 
464
- // streams/openai-stream.ts
465
- function parseOpenAIStream() {
466
- const extract = chunkToText();
467
- return (data) => {
468
- return extract(JSON.parse(data));
469
- };
244
+ // streams/ai-stream.ts
245
+ var import_eventsource_parser = require("eventsource-parser");
246
+ function createEventStreamTransformer(customParser) {
247
+ const textDecoder = new TextDecoder();
248
+ let eventSourceParser;
249
+ return new TransformStream({
250
+ async start(controller) {
251
+ eventSourceParser = (0, import_eventsource_parser.createParser)(
252
+ (event) => {
253
+ if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
254
+ // @see https://replicate.com/docs/streaming
255
+ event.event === "done") {
256
+ controller.terminate();
257
+ return;
258
+ }
259
+ if ("data" in event) {
260
+ const parsedMessage = customParser ? customParser(event.data) : event.data;
261
+ if (parsedMessage)
262
+ controller.enqueue(parsedMessage);
263
+ }
264
+ }
265
+ );
266
+ },
267
+ transform(chunk) {
268
+ eventSourceParser.feed(textDecoder.decode(chunk));
269
+ }
270
+ });
470
271
  }
471
- async function* streamable(stream) {
472
- const extract = chunkToText();
473
- for await (const chunk of stream) {
474
- const text = extract(chunk);
475
- if (text)
476
- yield text;
477
- }
272
+ function createCallbacksTransformer(cb) {
273
+ const textEncoder = new TextEncoder();
274
+ let aggregatedResponse = "";
275
+ const callbacks = cb || {};
276
+ return new TransformStream({
277
+ async start() {
278
+ if (callbacks.onStart)
279
+ await callbacks.onStart();
280
+ },
281
+ async transform(message, controller) {
282
+ controller.enqueue(textEncoder.encode(message));
283
+ aggregatedResponse += message;
284
+ if (callbacks.onToken)
285
+ await callbacks.onToken(message);
286
+ },
287
+ async flush() {
288
+ const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
289
+ if (callbacks.onCompletion) {
290
+ await callbacks.onCompletion(aggregatedResponse);
291
+ }
292
+ if (callbacks.onFinal && !isOpenAICallbacks) {
293
+ await callbacks.onFinal(aggregatedResponse);
294
+ }
295
+ }
296
+ });
478
297
  }
479
- function chunkToText() {
480
- const trimStartOfStream = trimStartOfStreamHelper();
481
- let isFunctionStreamingIn;
482
- return (json) => {
483
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
484
- if (isChatCompletionChunk(json) && ((_c = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.function_call) == null ? void 0 : _c.name)) {
485
- isFunctionStreamingIn = true;
486
- return `{"function_call": {"name": "${(_e = (_d = json.choices[0]) == null ? void 0 : _d.delta) == null ? void 0 : _e.function_call.name}", "arguments": "`;
487
- } else if (isChatCompletionChunk(json) && ((_h = (_g = (_f = json.choices[0]) == null ? void 0 : _f.delta) == null ? void 0 : _g.function_call) == null ? void 0 : _h.arguments)) {
488
- const argumentChunk = json.choices[0].delta.function_call.arguments;
489
- let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
490
- return `${escapedPartialJson}`;
491
- } else if (isFunctionStreamingIn && (((_i = json.choices[0]) == null ? void 0 : _i.finish_reason) === "function_call" || ((_j = json.choices[0]) == null ? void 0 : _j.finish_reason) === "stop")) {
492
- isFunctionStreamingIn = false;
493
- return '"}}';
298
+ function isOfTypeOpenAIStreamCallbacks(callbacks) {
299
+ return "experimental_onFunctionCall" in callbacks;
300
+ }
301
+ function trimStartOfStreamHelper() {
302
+ let isStreamStart = true;
303
+ return (text) => {
304
+ if (isStreamStart) {
305
+ text = text.trimStart();
306
+ if (text)
307
+ isStreamStart = false;
494
308
  }
495
- const text = trimStartOfStream(
496
- isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
497
- );
498
309
  return text;
499
310
  };
500
311
  }
501
- var __internal__OpenAIFnMessagesSymbol = Symbol(
502
- "internal_openai_fn_messages"
503
- );
504
- function isChatCompletionChunk(data) {
505
- return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
312
+ function AIStream(response, customParser, callbacks) {
313
+ if (!response.ok) {
314
+ if (response.body) {
315
+ const reader = response.body.getReader();
316
+ return new ReadableStream({
317
+ async start(controller) {
318
+ const { done, value } = await reader.read();
319
+ if (!done) {
320
+ const errorText = new TextDecoder().decode(value);
321
+ controller.error(new Error(`Response error: ${errorText}`));
322
+ }
323
+ }
324
+ });
325
+ } else {
326
+ return new ReadableStream({
327
+ start(controller) {
328
+ controller.error(new Error("Response error: No response body"));
329
+ }
330
+ });
331
+ }
332
+ }
333
+ const responseBodyStream = response.body || createEmptyReadableStream();
334
+ return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
506
335
  }
507
- function isCompletion(data) {
508
- return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
336
+ function createEmptyReadableStream() {
337
+ return new ReadableStream({
338
+ start(controller) {
339
+ controller.close();
340
+ }
341
+ });
509
342
  }
510
- function OpenAIStream(res, callbacks) {
511
- const cb = callbacks;
512
- let stream;
513
- if (Symbol.asyncIterator in res) {
514
- stream = readableFromAsyncIterable(streamable(res)).pipeThrough(
515
- createCallbacksTransformer(
516
- (cb == null ? void 0 : cb.experimental_onFunctionCall) ? {
517
- ...cb,
518
- onFinal: void 0
519
- } : {
520
- ...cb
343
+ function readableFromAsyncIterable(iterable) {
344
+ let it = iterable[Symbol.asyncIterator]();
345
+ return new ReadableStream({
346
+ async pull(controller) {
347
+ const { done, value } = await it.next();
348
+ if (done)
349
+ controller.close();
350
+ else
351
+ controller.enqueue(value);
352
+ },
353
+ async cancel(reason) {
354
+ var _a;
355
+ await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
356
+ }
357
+ });
358
+ }
359
+
360
+ // streams/stream-data.ts
361
+ var experimental_StreamData = class {
362
+ constructor() {
363
+ this.encoder = new TextEncoder();
364
+ this.controller = null;
365
+ // closing the stream is synchronous, but we want to return a promise
366
+ // in case we're doing async work
367
+ this.isClosedPromise = null;
368
+ this.isClosedPromiseResolver = void 0;
369
+ this.isClosed = false;
370
+ // array to store appended data
371
+ this.data = [];
372
+ this.isClosedPromise = new Promise((resolve) => {
373
+ this.isClosedPromiseResolver = resolve;
374
+ });
375
+ const self = this;
376
+ this.stream = new TransformStream({
377
+ start: async (controller) => {
378
+ self.controller = controller;
379
+ },
380
+ transform: async (chunk, controller) => {
381
+ if (self.data.length > 0) {
382
+ const encodedData = self.encoder.encode(
383
+ formatStreamPart("data", self.data)
384
+ );
385
+ self.data = [];
386
+ controller.enqueue(encodedData);
387
+ }
388
+ controller.enqueue(chunk);
389
+ },
390
+ async flush(controller) {
391
+ const warningTimeout = process.env.NODE_ENV === "development" ? setTimeout(() => {
392
+ console.warn(
393
+ "The data stream is hanging. Did you forget to close it with `data.close()`?"
394
+ );
395
+ }, 3e3) : null;
396
+ await self.isClosedPromise;
397
+ if (warningTimeout !== null) {
398
+ clearTimeout(warningTimeout);
399
+ }
400
+ if (self.data.length) {
401
+ const encodedData = self.encoder.encode(
402
+ formatStreamPart("data", self.data)
403
+ );
404
+ controller.enqueue(encodedData);
521
405
  }
522
- )
523
- );
524
- } else {
525
- stream = AIStream(
526
- res,
527
- parseOpenAIStream(),
528
- (cb == null ? void 0 : cb.experimental_onFunctionCall) ? {
529
- ...cb,
530
- onFinal: void 0
531
- } : {
532
- ...cb
533
406
  }
534
- );
407
+ });
535
408
  }
536
- if (cb && cb.experimental_onFunctionCall) {
537
- const functionCallTransformer = createFunctionCallTransformer(cb);
538
- return stream.pipeThrough(functionCallTransformer);
409
+ async close() {
410
+ var _a;
411
+ if (this.isClosed) {
412
+ throw new Error("Data Stream has already been closed.");
413
+ }
414
+ if (!this.controller) {
415
+ throw new Error("Stream controller is not initialized.");
416
+ }
417
+ (_a = this.isClosedPromiseResolver) == null ? void 0 : _a.call(this);
418
+ this.isClosed = true;
419
+ }
420
+ append(value) {
421
+ if (this.isClosed) {
422
+ throw new Error("Data Stream has already been closed.");
423
+ }
424
+ this.data.push(value);
425
+ }
426
+ };
427
+ function createStreamDataTransformer(experimental_streamData) {
428
+ if (!experimental_streamData) {
429
+ return new TransformStream({
430
+ transform: async (chunk, controller) => {
431
+ controller.enqueue(chunk);
432
+ }
433
+ });
434
+ }
435
+ const encoder = new TextEncoder();
436
+ const decoder = new TextDecoder();
437
+ return new TransformStream({
438
+ transform: async (chunk, controller) => {
439
+ const message = decoder.decode(chunk);
440
+ controller.enqueue(encoder.encode(formatStreamPart("text", message)));
441
+ }
442
+ });
443
+ }
444
+
445
+ // streams/anthropic-stream.ts
446
+ function parseAnthropicStream() {
447
+ let previous = "";
448
+ return (data) => {
449
+ const json = JSON.parse(data);
450
+ if ("error" in json) {
451
+ throw new Error(`${json.error.type}: ${json.error.message}`);
452
+ }
453
+ if (!("completion" in json)) {
454
+ return;
455
+ }
456
+ const text = json.completion;
457
+ if (!previous || text.length > previous.length && text.startsWith(previous)) {
458
+ const delta = text.slice(previous.length);
459
+ previous = text;
460
+ return delta;
461
+ }
462
+ return text;
463
+ };
464
+ }
465
+ async function* streamable(stream) {
466
+ for await (const chunk of stream) {
467
+ if ("completion" in chunk) {
468
+ const text = chunk.completion;
469
+ if (text)
470
+ yield text;
471
+ } else if ("delta" in chunk) {
472
+ const { delta } = chunk;
473
+ if ("text" in delta) {
474
+ const text = delta.text;
475
+ if (text)
476
+ yield text;
477
+ }
478
+ }
479
+ }
480
+ }
481
+ function AnthropicStream(res, cb) {
482
+ if (Symbol.asyncIterator in res) {
483
+ return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
539
484
  } else {
540
- return stream.pipeThrough(
485
+ return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
541
486
  createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
542
487
  );
543
488
  }
544
489
  }
545
- function createFunctionCallTransformer(callbacks) {
546
- const textEncoder = new TextEncoder();
547
- let isFirstChunk = true;
548
- let aggregatedResponse = "";
549
- let aggregatedFinalCompletionResponse = "";
550
- let isFunctionStreamingIn = false;
551
- let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
552
- const isComplexMode = callbacks == null ? void 0 : callbacks.experimental_streamData;
553
- const decode = createChunkDecoder();
554
- return new TransformStream({
555
- async transform(chunk, controller) {
556
- const message = decode(chunk);
557
- aggregatedFinalCompletionResponse += message;
558
- const shouldHandleAsFunction = isFirstChunk && message.startsWith('{"function_call":');
559
- if (shouldHandleAsFunction) {
560
- isFunctionStreamingIn = true;
561
- aggregatedResponse += message;
562
- isFirstChunk = false;
563
- return;
564
- }
565
- if (!isFunctionStreamingIn) {
490
+
491
+ // streams/assistant-response.ts
492
+ function experimental_AssistantResponse({ threadId, messageId }, process2) {
493
+ const stream = new ReadableStream({
494
+ async start(controller) {
495
+ var _a;
496
+ const textEncoder = new TextEncoder();
497
+ const sendMessage = (message) => {
566
498
  controller.enqueue(
567
- isComplexMode ? textEncoder.encode(formatStreamPart("text", message)) : chunk
499
+ textEncoder.encode(formatStreamPart("assistant_message", message))
568
500
  );
569
- return;
570
- } else {
571
- aggregatedResponse += message;
572
- }
573
- },
574
- async flush(controller) {
501
+ };
502
+ const sendDataMessage = (message) => {
503
+ controller.enqueue(
504
+ textEncoder.encode(formatStreamPart("data_message", message))
505
+ );
506
+ };
507
+ const sendError = (errorMessage) => {
508
+ controller.enqueue(
509
+ textEncoder.encode(formatStreamPart("error", errorMessage))
510
+ );
511
+ };
512
+ controller.enqueue(
513
+ textEncoder.encode(
514
+ formatStreamPart("assistant_control_data", {
515
+ threadId,
516
+ messageId
517
+ })
518
+ )
519
+ );
575
520
  try {
576
- const isEndOfFunction = !isFirstChunk && callbacks.experimental_onFunctionCall && isFunctionStreamingIn;
577
- if (isEndOfFunction && callbacks.experimental_onFunctionCall) {
578
- isFunctionStreamingIn = false;
579
- const payload = JSON.parse(aggregatedResponse);
580
- const argumentsPayload = JSON.parse(payload.function_call.arguments);
581
- let newFunctionCallMessages = [
582
- ...functionCallMessages
583
- ];
584
- const functionResponse = await callbacks.experimental_onFunctionCall(
585
- {
586
- name: payload.function_call.name,
587
- arguments: argumentsPayload
588
- },
589
- (result) => {
590
- newFunctionCallMessages = [
591
- ...functionCallMessages,
592
- {
593
- role: "assistant",
594
- content: "",
595
- function_call: payload.function_call
596
- },
597
- {
598
- role: "function",
599
- name: payload.function_call.name,
600
- content: JSON.stringify(result)
601
- }
602
- ];
603
- return newFunctionCallMessages;
604
- }
605
- );
606
- if (!functionResponse) {
607
- controller.enqueue(
608
- textEncoder.encode(
609
- isComplexMode ? formatStreamPart(
610
- "function_call",
611
- // parse to prevent double-encoding:
612
- JSON.parse(aggregatedResponse)
613
- ) : aggregatedResponse
614
- )
615
- );
616
- return;
617
- } else if (typeof functionResponse === "string") {
618
- controller.enqueue(
619
- isComplexMode ? textEncoder.encode(formatStreamPart("text", functionResponse)) : textEncoder.encode(functionResponse)
620
- );
621
- return;
622
- }
623
- const filteredCallbacks = {
624
- ...callbacks,
625
- onStart: void 0
626
- };
627
- callbacks.onFinal = void 0;
628
- const openAIStream = OpenAIStream(functionResponse, {
629
- ...filteredCallbacks,
630
- [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
631
- });
632
- const reader = openAIStream.getReader();
633
- while (true) {
634
- const { done, value } = await reader.read();
635
- if (done) {
636
- break;
637
- }
638
- controller.enqueue(value);
639
- }
640
- }
521
+ await process2({
522
+ threadId,
523
+ messageId,
524
+ sendMessage,
525
+ sendDataMessage
526
+ });
527
+ } catch (error) {
528
+ sendError((_a = error.message) != null ? _a : `${error}`);
641
529
  } finally {
642
- if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
643
- await callbacks.onFinal(aggregatedFinalCompletionResponse);
644
- }
530
+ controller.close();
645
531
  }
532
+ },
533
+ pull(controller) {
534
+ },
535
+ cancel() {
646
536
  }
647
537
  });
648
- }
649
-
650
- // streams/streaming-text-response.ts
651
- var StreamingTextResponse = class extends Response {
652
- constructor(res, init, data) {
653
- let processedStream = res;
654
- if (data) {
655
- processedStream = res.pipeThrough(data.stream);
538
+ return new Response(stream, {
539
+ status: 200,
540
+ headers: {
541
+ "Content-Type": "text/plain; charset=utf-8"
656
542
  }
657
- super(processedStream, {
658
- ...init,
659
- status: 200,
660
- headers: {
661
- "Content-Type": "text/plain; charset=utf-8",
662
- [COMPLEX_HEADER]: data ? "true" : "false",
663
- ...init == null ? void 0 : init.headers
664
- }
665
- });
666
- }
667
- };
668
- function streamToResponse(res, response, init) {
669
- response.writeHead((init == null ? void 0 : init.status) || 200, {
670
- "Content-Type": "text/plain; charset=utf-8",
671
- ...init == null ? void 0 : init.headers
672
543
  });
673
- const reader = res.getReader();
674
- function read() {
675
- reader.read().then(({ done, value }) => {
676
- if (done) {
677
- response.end();
678
- return;
544
+ }
545
+
546
+ // streams/aws-bedrock-stream.ts
547
+ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
548
+ var _a, _b;
549
+ const decoder = new TextDecoder();
550
+ for await (const chunk of (_a = response.body) != null ? _a : []) {
551
+ const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
552
+ if (bytes != null) {
553
+ const chunkText = decoder.decode(bytes);
554
+ const chunkJSON = JSON.parse(chunkText);
555
+ const delta = extractTextDeltaFromChunk(chunkJSON);
556
+ if (delta != null) {
557
+ yield delta;
679
558
  }
680
- response.write(value);
681
- read();
682
- });
559
+ }
683
560
  }
684
- read();
685
561
  }
686
-
687
- // streams/huggingface-stream.ts
688
- function createParser2(res) {
689
- const trimStartOfStream = trimStartOfStreamHelper();
690
- return new ReadableStream({
691
- async pull(controller) {
562
+ function AWSBedrockAnthropicStream(response, callbacks) {
563
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
564
+ }
565
+ function AWSBedrockCohereStream(response, callbacks) {
566
+ return AWSBedrockStream(
567
+ response,
568
+ callbacks,
569
+ // As of 2023-11-17, Bedrock does not support streaming for Cohere,
570
+ // so we take the full generation:
571
+ (chunk) => {
692
572
  var _a, _b;
693
- const { value, done } = await res.next();
694
- if (done) {
695
- controller.close();
696
- return;
697
- }
698
- const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
699
- if (!text)
700
- return;
701
- if (value.generated_text != null && value.generated_text.length > 0) {
702
- return;
703
- }
704
- if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
705
- return;
706
- }
707
- controller.enqueue(text);
573
+ return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
708
574
  }
709
- });
575
+ );
710
576
  }
711
- function HuggingFaceStream(res, callbacks) {
712
- return createParser2(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
577
+ function AWSBedrockLlama2Stream(response, callbacks) {
578
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
579
+ }
580
+ function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
581
+ return readableFromAsyncIterable(
582
+ asDeltaIterable(response, extractTextDeltaFromChunk)
583
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
713
584
  createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
714
585
  );
715
586
  }
@@ -742,7 +613,7 @@ async function readAndProcessLines(reader, controller) {
742
613
  }
743
614
  controller.close();
744
615
  }
745
- function createParser3(res) {
616
+ function createParser2(res) {
746
617
  var _a;
747
618
  const reader = (_a = res.body) == null ? void 0 : _a.getReader();
748
619
  return new ReadableStream({
@@ -756,46 +627,57 @@ function createParser3(res) {
756
627
  });
757
628
  }
758
629
  function CohereStream(reader, callbacks) {
759
- return createParser3(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
630
+ return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
760
631
  createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
761
632
  );
762
633
  }
763
634
 
764
- // streams/anthropic-stream.ts
765
- function parseAnthropicStream() {
766
- let previous = "";
767
- return (data) => {
768
- const json = JSON.parse(data);
769
- if ("error" in json) {
770
- throw new Error(`${json.error.type}: ${json.error.message}`);
635
+ // streams/google-generative-ai-stream.ts
636
+ async function* streamable2(response) {
637
+ var _a, _b, _c;
638
+ for await (const chunk of response.stream) {
639
+ const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
640
+ if (parts === void 0) {
641
+ continue;
771
642
  }
772
- if (!("completion" in json)) {
773
- return;
774
- }
775
- const text = json.completion;
776
- if (!previous || text.length > previous.length && text.startsWith(previous)) {
777
- const delta = text.slice(previous.length);
778
- previous = text;
779
- return delta;
643
+ const firstPart = parts[0];
644
+ if (typeof firstPart.text === "string") {
645
+ yield firstPart.text;
780
646
  }
781
- return text;
782
- };
783
- }
784
- async function* streamable2(stream) {
785
- for await (const chunk of stream) {
786
- const text = chunk.completion;
787
- if (text)
788
- yield text;
789
647
  }
790
648
  }
791
- function AnthropicStream(res, cb) {
792
- if (Symbol.asyncIterator in res) {
793
- return readableFromAsyncIterable(streamable2(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
794
- } else {
795
- return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
796
- createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
797
- );
798
- }
649
+ function GoogleGenerativeAIStream(response, cb) {
650
+ return readableFromAsyncIterable(streamable2(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
651
+ }
652
+
653
+ // streams/huggingface-stream.ts
654
+ function createParser3(res) {
655
+ const trimStartOfStream = trimStartOfStreamHelper();
656
+ return new ReadableStream({
657
+ async pull(controller) {
658
+ var _a, _b;
659
+ const { value, done } = await res.next();
660
+ if (done) {
661
+ controller.close();
662
+ return;
663
+ }
664
+ const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
665
+ if (!text)
666
+ return;
667
+ if (value.generated_text != null && value.generated_text.length > 0) {
668
+ return;
669
+ }
670
+ if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
671
+ return;
672
+ }
673
+ controller.enqueue(text);
674
+ }
675
+ });
676
+ }
677
+ function HuggingFaceStream(res, callbacks) {
678
+ return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
679
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
680
+ );
799
681
  }
800
682
 
801
683
  // streams/langchain-stream.ts
@@ -859,6 +741,277 @@ function LangChainStream(callbacks) {
859
741
  };
860
742
  }
861
743
 
744
+ // streams/openai-stream.ts
745
+ function parseOpenAIStream() {
746
+ const extract = chunkToText();
747
+ return (data) => extract(JSON.parse(data));
748
+ }
749
+ async function* streamable3(stream) {
750
+ const extract = chunkToText();
751
+ for await (const chunk of stream) {
752
+ const text = extract(chunk);
753
+ if (text)
754
+ yield text;
755
+ }
756
+ }
757
+ function chunkToText() {
758
+ const trimStartOfStream = trimStartOfStreamHelper();
759
+ let isFunctionStreamingIn;
760
+ return (json) => {
761
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
762
+ if (isChatCompletionChunk(json)) {
763
+ const delta = (_a = json.choices[0]) == null ? void 0 : _a.delta;
764
+ if ((_b = delta.function_call) == null ? void 0 : _b.name) {
765
+ isFunctionStreamingIn = true;
766
+ return `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`;
767
+ } else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
768
+ isFunctionStreamingIn = true;
769
+ const toolCall = delta.tool_calls[0];
770
+ if (toolCall.index === 0) {
771
+ return `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`;
772
+ } else {
773
+ return `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`;
774
+ }
775
+ } else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
776
+ return cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments);
777
+ } else if ((_k = (_j = delta.tool_calls) == null ? void 0 : _j[0].function) == null ? void 0 : _k.arguments) {
778
+ return cleanupArguments((_n = (_m = (_l = delta.tool_calls) == null ? void 0 : _l[0]) == null ? void 0 : _m.function) == null ? void 0 : _n.arguments);
779
+ } else if (isFunctionStreamingIn && (((_o = json.choices[0]) == null ? void 0 : _o.finish_reason) === "function_call" || ((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "stop")) {
780
+ isFunctionStreamingIn = false;
781
+ return '"}}';
782
+ } else if (isFunctionStreamingIn && ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "tool_calls") {
783
+ isFunctionStreamingIn = false;
784
+ return '"}}]}';
785
+ }
786
+ }
787
+ const text = trimStartOfStream(
788
+ isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
789
+ );
790
+ return text;
791
+ };
792
+ function cleanupArguments(argumentChunk) {
793
+ let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
794
+ return `${escapedPartialJson}`;
795
+ }
796
+ }
797
+ var __internal__OpenAIFnMessagesSymbol = Symbol(
798
+ "internal_openai_fn_messages"
799
+ );
800
+ function isChatCompletionChunk(data) {
801
+ return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
802
+ }
803
+ function isCompletion(data) {
804
+ return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
805
+ }
806
+ function OpenAIStream(res, callbacks) {
807
+ const cb = callbacks;
808
+ let stream;
809
+ if (Symbol.asyncIterator in res) {
810
+ stream = readableFromAsyncIterable(streamable3(res)).pipeThrough(
811
+ createCallbacksTransformer(
812
+ (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
813
+ ...cb,
814
+ onFinal: void 0
815
+ } : {
816
+ ...cb
817
+ }
818
+ )
819
+ );
820
+ } else {
821
+ stream = AIStream(
822
+ res,
823
+ parseOpenAIStream(),
824
+ (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
825
+ ...cb,
826
+ onFinal: void 0
827
+ } : {
828
+ ...cb
829
+ }
830
+ );
831
+ }
832
+ if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
833
+ const functionCallTransformer = createFunctionCallTransformer(cb);
834
+ return stream.pipeThrough(functionCallTransformer);
835
+ } else {
836
+ return stream.pipeThrough(
837
+ createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
838
+ );
839
+ }
840
+ }
841
+ function createFunctionCallTransformer(callbacks) {
842
+ const textEncoder = new TextEncoder();
843
+ let isFirstChunk = true;
844
+ let aggregatedResponse = "";
845
+ let aggregatedFinalCompletionResponse = "";
846
+ let isFunctionStreamingIn = false;
847
+ let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
848
+ const isComplexMode = callbacks == null ? void 0 : callbacks.experimental_streamData;
849
+ const decode = createChunkDecoder();
850
+ return new TransformStream({
851
+ async transform(chunk, controller) {
852
+ const message = decode(chunk);
853
+ aggregatedFinalCompletionResponse += message;
854
+ const shouldHandleAsFunction = isFirstChunk && (message.startsWith('{"function_call":') || message.startsWith('{"tool_calls":'));
855
+ if (shouldHandleAsFunction) {
856
+ isFunctionStreamingIn = true;
857
+ aggregatedResponse += message;
858
+ isFirstChunk = false;
859
+ return;
860
+ }
861
+ if (!isFunctionStreamingIn) {
862
+ controller.enqueue(
863
+ isComplexMode ? textEncoder.encode(formatStreamPart("text", message)) : chunk
864
+ );
865
+ return;
866
+ } else {
867
+ aggregatedResponse += message;
868
+ }
869
+ },
870
+ async flush(controller) {
871
+ try {
872
+ if (!isFirstChunk && isFunctionStreamingIn && (callbacks.experimental_onFunctionCall || callbacks.experimental_onToolCall)) {
873
+ isFunctionStreamingIn = false;
874
+ const payload = JSON.parse(aggregatedResponse);
875
+ let newFunctionCallMessages = [
876
+ ...functionCallMessages
877
+ ];
878
+ let functionResponse = void 0;
879
+ if (callbacks.experimental_onFunctionCall) {
880
+ if (payload.function_call === void 0) {
881
+ console.warn(
882
+ "experimental_onFunctionCall should not be defined when using tools"
883
+ );
884
+ }
885
+ const argumentsPayload = JSON.parse(
886
+ payload.function_call.arguments
887
+ );
888
+ functionResponse = await callbacks.experimental_onFunctionCall(
889
+ {
890
+ name: payload.function_call.name,
891
+ arguments: argumentsPayload
892
+ },
893
+ (result) => {
894
+ newFunctionCallMessages = [
895
+ ...functionCallMessages,
896
+ {
897
+ role: "assistant",
898
+ content: "",
899
+ function_call: payload.function_call
900
+ },
901
+ {
902
+ role: "function",
903
+ name: payload.function_call.name,
904
+ content: JSON.stringify(result)
905
+ }
906
+ ];
907
+ return newFunctionCallMessages;
908
+ }
909
+ );
910
+ }
911
+ if (callbacks.experimental_onToolCall) {
912
+ const toolCalls = {
913
+ tools: []
914
+ };
915
+ for (const tool of payload.tool_calls) {
916
+ toolCalls.tools.push({
917
+ id: tool.id,
918
+ type: "function",
919
+ func: {
920
+ name: tool.function.name,
921
+ arguments: tool.function.arguments
922
+ }
923
+ });
924
+ }
925
+ let responseIndex = 0;
926
+ try {
927
+ functionResponse = await callbacks.experimental_onToolCall(
928
+ toolCalls,
929
+ (result) => {
930
+ if (result) {
931
+ const { tool_call_id, function_name, tool_call_result } = result;
932
+ newFunctionCallMessages = [
933
+ ...newFunctionCallMessages,
934
+ // Only append the assistant message if it's the first response
935
+ ...responseIndex === 0 ? [
936
+ {
937
+ role: "assistant",
938
+ content: "",
939
+ tool_calls: payload.tool_calls.map(
940
+ (tc) => ({
941
+ id: tc.id,
942
+ type: "function",
943
+ function: {
944
+ name: tc.function.name,
945
+ // we send the arguments an object to the user, but as the API expects a string, we need to stringify it
946
+ arguments: JSON.stringify(
947
+ tc.function.arguments
948
+ )
949
+ }
950
+ })
951
+ )
952
+ }
953
+ ] : [],
954
+ // Append the function call result message
955
+ {
956
+ role: "tool",
957
+ tool_call_id,
958
+ name: function_name,
959
+ content: JSON.stringify(tool_call_result)
960
+ }
961
+ ];
962
+ responseIndex++;
963
+ }
964
+ return newFunctionCallMessages;
965
+ }
966
+ );
967
+ } catch (e) {
968
+ console.error("Error calling experimental_onToolCall:", e);
969
+ }
970
+ }
971
+ if (!functionResponse) {
972
+ controller.enqueue(
973
+ textEncoder.encode(
974
+ isComplexMode ? formatStreamPart(
975
+ payload.function_call ? "function_call" : "tool_calls",
976
+ // parse to prevent double-encoding:
977
+ JSON.parse(aggregatedResponse)
978
+ ) : aggregatedResponse
979
+ )
980
+ );
981
+ return;
982
+ } else if (typeof functionResponse === "string") {
983
+ controller.enqueue(
984
+ isComplexMode ? textEncoder.encode(formatStreamPart("text", functionResponse)) : textEncoder.encode(functionResponse)
985
+ );
986
+ return;
987
+ }
988
+ const filteredCallbacks = {
989
+ ...callbacks,
990
+ onStart: void 0
991
+ };
992
+ callbacks.onFinal = void 0;
993
+ const openAIStream = OpenAIStream(functionResponse, {
994
+ ...filteredCallbacks,
995
+ [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
996
+ });
997
+ const reader = openAIStream.getReader();
998
+ while (true) {
999
+ const { done, value } = await reader.read();
1000
+ if (done) {
1001
+ break;
1002
+ }
1003
+ controller.enqueue(value);
1004
+ }
1005
+ }
1006
+ } finally {
1007
+ if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
1008
+ await callbacks.onFinal(aggregatedFinalCompletionResponse);
1009
+ }
1010
+ }
1011
+ }
1012
+ });
1013
+ }
1014
+
862
1015
  // streams/replicate-stream.ts
863
1016
  async function ReplicateStream(res, cb, options) {
864
1017
  var _a;
@@ -967,20 +1120,35 @@ async function parseComplexResponse({
967
1120
  };
968
1121
  functionCallMessage = prefixMap["function_call"];
969
1122
  }
1123
+ let toolCallMessage = null;
1124
+ if (type === "tool_calls") {
1125
+ prefixMap["tool_calls"] = {
1126
+ id: generateId(),
1127
+ role: "assistant",
1128
+ content: "",
1129
+ tool_calls: value.tool_calls,
1130
+ createdAt
1131
+ };
1132
+ toolCallMessage = prefixMap["tool_calls"];
1133
+ }
970
1134
  if (type === "data") {
971
1135
  prefixMap["data"].push(...value);
972
1136
  }
973
1137
  const responseMessage = prefixMap["text"];
974
- const merged = [functionCallMessage, responseMessage].filter(
975
- Boolean
976
- );
1138
+ const merged = [
1139
+ functionCallMessage,
1140
+ toolCallMessage,
1141
+ responseMessage
1142
+ ].filter(Boolean);
977
1143
  update(merged, [...prefixMap["data"]]);
978
1144
  }
979
1145
  onFinish == null ? void 0 : onFinish(prefixMap);
980
1146
  return {
981
- messages: [prefixMap.text, prefixMap.function_call].filter(
982
- Boolean
983
- ),
1147
+ messages: [
1148
+ prefixMap.text,
1149
+ prefixMap.function_call,
1150
+ prefixMap.tool_calls
1151
+ ].filter(Boolean),
984
1152
  data: prefixMap.data
985
1153
  };
986
1154
  }
@@ -1060,59 +1228,41 @@ var experimental_StreamingReactResponse = class {
1060
1228
  }
1061
1229
  };
1062
1230
 
1063
- // streams/assistant-response.ts
1064
- function experimental_AssistantResponse({ threadId, messageId }, process2) {
1065
- const stream = new ReadableStream({
1066
- async start(controller) {
1067
- var _a;
1068
- const textEncoder = new TextEncoder();
1069
- const sendMessage = (message) => {
1070
- controller.enqueue(
1071
- textEncoder.encode(formatStreamPart("assistant_message", message))
1072
- );
1073
- };
1074
- const sendDataMessage = (message) => {
1075
- controller.enqueue(
1076
- textEncoder.encode(formatStreamPart("data_message", message))
1077
- );
1078
- };
1079
- const sendError = (errorMessage) => {
1080
- controller.enqueue(
1081
- textEncoder.encode(formatStreamPart("error", errorMessage))
1082
- );
1083
- };
1084
- controller.enqueue(
1085
- textEncoder.encode(
1086
- formatStreamPart("assistant_control_data", {
1087
- threadId,
1088
- messageId
1089
- })
1090
- )
1091
- );
1092
- try {
1093
- await process2({
1094
- threadId,
1095
- messageId,
1096
- sendMessage,
1097
- sendDataMessage
1098
- });
1099
- } catch (error) {
1100
- sendError((_a = error.message) != null ? _a : `${error}`);
1101
- } finally {
1102
- controller.close();
1103
- }
1104
- },
1105
- pull(controller) {
1106
- },
1107
- cancel() {
1108
- }
1109
- });
1110
- return new Response(stream, {
1111
- status: 200,
1112
- headers: {
1113
- "Content-Type": "text/plain; charset=utf-8"
1231
+ // streams/streaming-text-response.ts
1232
+ var StreamingTextResponse = class extends Response {
1233
+ constructor(res, init, data) {
1234
+ let processedStream = res;
1235
+ if (data) {
1236
+ processedStream = res.pipeThrough(data.stream);
1114
1237
  }
1238
+ super(processedStream, {
1239
+ ...init,
1240
+ status: 200,
1241
+ headers: {
1242
+ "Content-Type": "text/plain; charset=utf-8",
1243
+ [COMPLEX_HEADER]: data ? "true" : "false",
1244
+ ...init == null ? void 0 : init.headers
1245
+ }
1246
+ });
1247
+ }
1248
+ };
1249
+ function streamToResponse(res, response, init) {
1250
+ response.writeHead((init == null ? void 0 : init.status) || 200, {
1251
+ "Content-Type": "text/plain; charset=utf-8",
1252
+ ...init == null ? void 0 : init.headers
1115
1253
  });
1254
+ const reader = res.getReader();
1255
+ function read() {
1256
+ reader.read().then(({ done, value }) => {
1257
+ if (done) {
1258
+ response.end();
1259
+ return;
1260
+ }
1261
+ response.write(value);
1262
+ read();
1263
+ });
1264
+ }
1265
+ read();
1116
1266
  }
1117
1267
  // Annotate the CommonJS export names for ESM import in node:
1118
1268
  0 && (module.exports = {
@@ -1124,6 +1274,7 @@ function experimental_AssistantResponse({ threadId, messageId }, process2) {
1124
1274
  AnthropicStream,
1125
1275
  COMPLEX_HEADER,
1126
1276
  CohereStream,
1277
+ GoogleGenerativeAIStream,
1127
1278
  HuggingFaceStream,
1128
1279
  LangChainStream,
1129
1280
  OpenAIStream,