ai 0.0.0-85f9a635-20240518005312

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/LICENSE +13 -0
  2. package/README.md +37 -0
  3. package/dist/index.d.mts +2287 -0
  4. package/dist/index.d.ts +2287 -0
  5. package/dist/index.js +3531 -0
  6. package/dist/index.js.map +1 -0
  7. package/dist/index.mjs +3454 -0
  8. package/dist/index.mjs.map +1 -0
  9. package/package.json +178 -0
  10. package/prompts/dist/index.d.mts +324 -0
  11. package/prompts/dist/index.d.ts +324 -0
  12. package/prompts/dist/index.js +178 -0
  13. package/prompts/dist/index.js.map +1 -0
  14. package/prompts/dist/index.mjs +146 -0
  15. package/prompts/dist/index.mjs.map +1 -0
  16. package/react/dist/index.d.mts +565 -0
  17. package/react/dist/index.d.ts +582 -0
  18. package/react/dist/index.js +1430 -0
  19. package/react/dist/index.js.map +1 -0
  20. package/react/dist/index.mjs +1391 -0
  21. package/react/dist/index.mjs.map +1 -0
  22. package/react/dist/index.server.d.mts +17 -0
  23. package/react/dist/index.server.d.ts +17 -0
  24. package/react/dist/index.server.js +50 -0
  25. package/react/dist/index.server.js.map +1 -0
  26. package/react/dist/index.server.mjs +23 -0
  27. package/react/dist/index.server.mjs.map +1 -0
  28. package/rsc/dist/index.d.ts +580 -0
  29. package/rsc/dist/index.mjs +18 -0
  30. package/rsc/dist/rsc-client.d.mts +1 -0
  31. package/rsc/dist/rsc-client.mjs +18 -0
  32. package/rsc/dist/rsc-client.mjs.map +1 -0
  33. package/rsc/dist/rsc-server.d.mts +516 -0
  34. package/rsc/dist/rsc-server.mjs +1900 -0
  35. package/rsc/dist/rsc-server.mjs.map +1 -0
  36. package/rsc/dist/rsc-shared.d.mts +94 -0
  37. package/rsc/dist/rsc-shared.mjs +346 -0
  38. package/rsc/dist/rsc-shared.mjs.map +1 -0
  39. package/solid/dist/index.d.mts +408 -0
  40. package/solid/dist/index.d.ts +408 -0
  41. package/solid/dist/index.js +1072 -0
  42. package/solid/dist/index.js.map +1 -0
  43. package/solid/dist/index.mjs +1044 -0
  44. package/solid/dist/index.mjs.map +1 -0
  45. package/svelte/dist/index.d.mts +484 -0
  46. package/svelte/dist/index.d.ts +484 -0
  47. package/svelte/dist/index.js +1778 -0
  48. package/svelte/dist/index.js.map +1 -0
  49. package/svelte/dist/index.mjs +1749 -0
  50. package/svelte/dist/index.mjs.map +1 -0
  51. package/vue/dist/index.d.mts +402 -0
  52. package/vue/dist/index.d.ts +402 -0
  53. package/vue/dist/index.js +1072 -0
  54. package/vue/dist/index.js.map +1 -0
  55. package/vue/dist/index.mjs +1034 -0
  56. package/vue/dist/index.mjs.map +1 -0
package/dist/index.mjs ADDED
@@ -0,0 +1,3454 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __export = (target, all) => {
3
+ for (var name in all)
4
+ __defProp(target, name, { get: all[name], enumerable: true });
5
+ };
6
+
7
+ // core/util/retry-with-exponential-backoff.ts
8
+ import { APICallError, RetryError } from "@ai-sdk/provider";
9
+ import { getErrorMessage, isAbortError } from "@ai-sdk/provider-utils";
10
+
11
+ // core/util/delay.ts
12
+ async function delay(delayInMs) {
13
+ return new Promise((resolve) => setTimeout(resolve, delayInMs));
14
+ }
15
+
16
+ // core/util/retry-with-exponential-backoff.ts
17
+ var retryWithExponentialBackoff = ({
18
+ maxRetries = 2,
19
+ initialDelayInMs = 2e3,
20
+ backoffFactor = 2
21
+ } = {}) => async (f) => _retryWithExponentialBackoff(f, {
22
+ maxRetries,
23
+ delayInMs: initialDelayInMs,
24
+ backoffFactor
25
+ });
26
+ async function _retryWithExponentialBackoff(f, {
27
+ maxRetries,
28
+ delayInMs,
29
+ backoffFactor
30
+ }, errors = []) {
31
+ try {
32
+ return await f();
33
+ } catch (error) {
34
+ if (isAbortError(error)) {
35
+ throw error;
36
+ }
37
+ if (maxRetries === 0) {
38
+ throw error;
39
+ }
40
+ const errorMessage = getErrorMessage(error);
41
+ const newErrors = [...errors, error];
42
+ const tryNumber = newErrors.length;
43
+ if (tryNumber > maxRetries) {
44
+ throw new RetryError({
45
+ message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,
46
+ reason: "maxRetriesExceeded",
47
+ errors: newErrors
48
+ });
49
+ }
50
+ if (error instanceof Error && APICallError.isAPICallError(error) && error.isRetryable === true && tryNumber <= maxRetries) {
51
+ await delay(delayInMs);
52
+ return _retryWithExponentialBackoff(
53
+ f,
54
+ { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
55
+ newErrors
56
+ );
57
+ }
58
+ if (tryNumber === 1) {
59
+ throw error;
60
+ }
61
+ throw new RetryError({
62
+ message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,
63
+ reason: "errorNotRetryable",
64
+ errors: newErrors
65
+ });
66
+ }
67
+ }
68
+
69
+ // core/embed/embed.ts
70
+ async function embed({
71
+ model,
72
+ value,
73
+ maxRetries,
74
+ abortSignal
75
+ }) {
76
+ const retry = retryWithExponentialBackoff({ maxRetries });
77
+ const modelResponse = await retry(
78
+ () => model.doEmbed({
79
+ values: [value],
80
+ abortSignal
81
+ })
82
+ );
83
+ return new EmbedResult({
84
+ value,
85
+ embedding: modelResponse.embeddings[0],
86
+ rawResponse: modelResponse.rawResponse
87
+ });
88
+ }
89
+ var EmbedResult = class {
90
+ constructor(options) {
91
+ this.value = options.value;
92
+ this.embedding = options.embedding;
93
+ this.rawResponse = options.rawResponse;
94
+ }
95
+ };
96
+
97
+ // core/util/split-array.ts
98
+ function splitArray(array, chunkSize) {
99
+ if (chunkSize <= 0) {
100
+ throw new Error("chunkSize must be greater than 0");
101
+ }
102
+ const result = [];
103
+ for (let i = 0; i < array.length; i += chunkSize) {
104
+ result.push(array.slice(i, i + chunkSize));
105
+ }
106
+ return result;
107
+ }
108
+
109
+ // core/embed/embed-many.ts
110
+ async function embedMany({
111
+ model,
112
+ values,
113
+ maxRetries,
114
+ abortSignal
115
+ }) {
116
+ const retry = retryWithExponentialBackoff({ maxRetries });
117
+ const maxEmbeddingsPerCall = model.maxEmbeddingsPerCall;
118
+ if (maxEmbeddingsPerCall == null) {
119
+ const modelResponse = await retry(
120
+ () => model.doEmbed({ values, abortSignal })
121
+ );
122
+ return new EmbedManyResult({
123
+ values,
124
+ embeddings: modelResponse.embeddings
125
+ });
126
+ }
127
+ const valueChunks = splitArray(values, maxEmbeddingsPerCall);
128
+ const embeddings = [];
129
+ for (const chunk of valueChunks) {
130
+ const modelResponse = await retry(
131
+ () => model.doEmbed({ values: chunk, abortSignal })
132
+ );
133
+ embeddings.push(...modelResponse.embeddings);
134
+ }
135
+ return new EmbedManyResult({ values, embeddings });
136
+ }
137
+ var EmbedManyResult = class {
138
+ constructor(options) {
139
+ this.values = options.values;
140
+ this.embeddings = options.embeddings;
141
+ }
142
+ };
143
+
144
+ // core/generate-object/generate-object.ts
145
+ import { NoObjectGeneratedError } from "@ai-sdk/provider";
146
+ import { safeParseJSON } from "@ai-sdk/provider-utils";
147
+
148
+ // core/generate-text/token-usage.ts
149
+ function calculateTokenUsage(usage) {
150
+ return {
151
+ promptTokens: usage.promptTokens,
152
+ completionTokens: usage.completionTokens,
153
+ totalTokens: usage.promptTokens + usage.completionTokens
154
+ };
155
+ }
156
+
157
+ // core/util/detect-image-mimetype.ts
158
+ var mimeTypeSignatures = [
159
+ { mimeType: "image/gif", bytes: [71, 73, 70] },
160
+ { mimeType: "image/png", bytes: [137, 80, 78, 71] },
161
+ { mimeType: "image/jpeg", bytes: [255, 216] },
162
+ { mimeType: "image/webp", bytes: [82, 73, 70, 70] }
163
+ ];
164
+ function detectImageMimeType(image) {
165
+ for (const { bytes, mimeType } of mimeTypeSignatures) {
166
+ if (image.length >= bytes.length && bytes.every((byte, index) => image[index] === byte)) {
167
+ return mimeType;
168
+ }
169
+ }
170
+ return void 0;
171
+ }
172
+
173
+ // core/prompt/data-content.ts
174
+ import { InvalidDataContentError } from "@ai-sdk/provider";
175
+ import {
176
+ convertBase64ToUint8Array,
177
+ convertUint8ArrayToBase64
178
+ } from "@ai-sdk/provider-utils";
179
+ function convertDataContentToBase64String(content) {
180
+ if (typeof content === "string") {
181
+ return content;
182
+ }
183
+ if (content instanceof ArrayBuffer) {
184
+ return convertUint8ArrayToBase64(new Uint8Array(content));
185
+ }
186
+ return convertUint8ArrayToBase64(content);
187
+ }
188
+ function convertDataContentToUint8Array(content) {
189
+ if (content instanceof Uint8Array) {
190
+ return content;
191
+ }
192
+ if (typeof content === "string") {
193
+ try {
194
+ return convertBase64ToUint8Array(content);
195
+ } catch (error) {
196
+ throw new InvalidDataContentError({
197
+ message: "Invalid data content. Content string is not a base64-encoded image.",
198
+ content,
199
+ cause: error
200
+ });
201
+ }
202
+ }
203
+ if (content instanceof ArrayBuffer) {
204
+ return new Uint8Array(content);
205
+ }
206
+ throw new InvalidDataContentError({ content });
207
+ }
208
+
209
+ // core/prompt/convert-to-language-model-prompt.ts
210
+ function convertToLanguageModelPrompt(prompt) {
211
+ const languageModelMessages = [];
212
+ if (prompt.system != null) {
213
+ languageModelMessages.push({ role: "system", content: prompt.system });
214
+ }
215
+ switch (prompt.type) {
216
+ case "prompt": {
217
+ languageModelMessages.push({
218
+ role: "user",
219
+ content: [{ type: "text", text: prompt.prompt }]
220
+ });
221
+ break;
222
+ }
223
+ case "messages": {
224
+ languageModelMessages.push(
225
+ ...prompt.messages.map((message) => {
226
+ switch (message.role) {
227
+ case "system": {
228
+ return { role: "system", content: message.content };
229
+ }
230
+ case "user": {
231
+ if (typeof message.content === "string") {
232
+ return {
233
+ role: "user",
234
+ content: [{ type: "text", text: message.content }]
235
+ };
236
+ }
237
+ return {
238
+ role: "user",
239
+ content: message.content.map(
240
+ (part) => {
241
+ var _a;
242
+ switch (part.type) {
243
+ case "text": {
244
+ return part;
245
+ }
246
+ case "image": {
247
+ if (part.image instanceof URL) {
248
+ return {
249
+ type: "image",
250
+ image: part.image,
251
+ mimeType: part.mimeType
252
+ };
253
+ }
254
+ const imageUint8 = convertDataContentToUint8Array(
255
+ part.image
256
+ );
257
+ return {
258
+ type: "image",
259
+ image: imageUint8,
260
+ mimeType: (_a = part.mimeType) != null ? _a : detectImageMimeType(imageUint8)
261
+ };
262
+ }
263
+ }
264
+ }
265
+ )
266
+ };
267
+ }
268
+ case "assistant": {
269
+ if (typeof message.content === "string") {
270
+ return {
271
+ role: "assistant",
272
+ content: [{ type: "text", text: message.content }]
273
+ };
274
+ }
275
+ return { role: "assistant", content: message.content };
276
+ }
277
+ case "tool": {
278
+ return message;
279
+ }
280
+ }
281
+ })
282
+ );
283
+ break;
284
+ }
285
+ default: {
286
+ const _exhaustiveCheck = prompt;
287
+ throw new Error(`Unsupported prompt type: ${_exhaustiveCheck}`);
288
+ }
289
+ }
290
+ return languageModelMessages;
291
+ }
292
+
293
+ // core/prompt/get-validated-prompt.ts
294
+ import { InvalidPromptError } from "@ai-sdk/provider";
295
+ function getValidatedPrompt(prompt) {
296
+ if (prompt.prompt == null && prompt.messages == null) {
297
+ throw new InvalidPromptError({
298
+ prompt,
299
+ message: "prompt or messages must be defined"
300
+ });
301
+ }
302
+ if (prompt.prompt != null && prompt.messages != null) {
303
+ throw new InvalidPromptError({
304
+ prompt,
305
+ message: "prompt and messages cannot be defined at the same time"
306
+ });
307
+ }
308
+ return prompt.prompt != null ? {
309
+ type: "prompt",
310
+ prompt: prompt.prompt,
311
+ messages: void 0,
312
+ system: prompt.system
313
+ } : {
314
+ type: "messages",
315
+ prompt: void 0,
316
+ messages: prompt.messages,
317
+ // only possible case bc of checks above
318
+ system: prompt.system
319
+ };
320
+ }
321
+
322
+ // core/prompt/prepare-call-settings.ts
323
+ import { InvalidArgumentError } from "@ai-sdk/provider";
324
+ function prepareCallSettings({
325
+ maxTokens,
326
+ temperature,
327
+ topP,
328
+ presencePenalty,
329
+ frequencyPenalty,
330
+ seed,
331
+ maxRetries
332
+ }) {
333
+ if (maxTokens != null) {
334
+ if (!Number.isInteger(maxTokens)) {
335
+ throw new InvalidArgumentError({
336
+ parameter: "maxTokens",
337
+ value: maxTokens,
338
+ message: "maxTokens must be an integer"
339
+ });
340
+ }
341
+ if (maxTokens < 1) {
342
+ throw new InvalidArgumentError({
343
+ parameter: "maxTokens",
344
+ value: maxTokens,
345
+ message: "maxTokens must be >= 1"
346
+ });
347
+ }
348
+ }
349
+ if (temperature != null) {
350
+ if (typeof temperature !== "number") {
351
+ throw new InvalidArgumentError({
352
+ parameter: "temperature",
353
+ value: temperature,
354
+ message: "temperature must be a number"
355
+ });
356
+ }
357
+ }
358
+ if (topP != null) {
359
+ if (typeof topP !== "number") {
360
+ throw new InvalidArgumentError({
361
+ parameter: "topP",
362
+ value: topP,
363
+ message: "topP must be a number"
364
+ });
365
+ }
366
+ }
367
+ if (presencePenalty != null) {
368
+ if (typeof presencePenalty !== "number") {
369
+ throw new InvalidArgumentError({
370
+ parameter: "presencePenalty",
371
+ value: presencePenalty,
372
+ message: "presencePenalty must be a number"
373
+ });
374
+ }
375
+ }
376
+ if (frequencyPenalty != null) {
377
+ if (typeof frequencyPenalty !== "number") {
378
+ throw new InvalidArgumentError({
379
+ parameter: "frequencyPenalty",
380
+ value: frequencyPenalty,
381
+ message: "frequencyPenalty must be a number"
382
+ });
383
+ }
384
+ }
385
+ if (seed != null) {
386
+ if (!Number.isInteger(seed)) {
387
+ throw new InvalidArgumentError({
388
+ parameter: "seed",
389
+ value: seed,
390
+ message: "seed must be an integer"
391
+ });
392
+ }
393
+ }
394
+ if (maxRetries != null) {
395
+ if (!Number.isInteger(maxRetries)) {
396
+ throw new InvalidArgumentError({
397
+ parameter: "maxRetries",
398
+ value: maxRetries,
399
+ message: "maxRetries must be an integer"
400
+ });
401
+ }
402
+ if (maxRetries < 0) {
403
+ throw new InvalidArgumentError({
404
+ parameter: "maxRetries",
405
+ value: maxRetries,
406
+ message: "maxRetries must be >= 0"
407
+ });
408
+ }
409
+ }
410
+ return {
411
+ maxTokens,
412
+ temperature: temperature != null ? temperature : 0,
413
+ topP,
414
+ presencePenalty,
415
+ frequencyPenalty,
416
+ seed,
417
+ maxRetries: maxRetries != null ? maxRetries : 2
418
+ };
419
+ }
420
+
421
+ // core/util/convert-zod-to-json-schema.ts
422
+ import zodToJsonSchema from "zod-to-json-schema";
423
+ function convertZodToJSONSchema(zodSchema) {
424
+ return zodToJsonSchema(zodSchema);
425
+ }
426
+
427
+ // core/generate-object/inject-json-schema-into-system.ts
428
+ var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
429
+ var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
430
+ function injectJsonSchemaIntoSystem({
431
+ system,
432
+ schema,
433
+ schemaPrefix = DEFAULT_SCHEMA_PREFIX,
434
+ schemaSuffix = DEFAULT_SCHEMA_SUFFIX
435
+ }) {
436
+ return [
437
+ system,
438
+ system != null ? "" : null,
439
+ // add a newline if system is not null
440
+ schemaPrefix,
441
+ JSON.stringify(schema),
442
+ schemaSuffix
443
+ ].filter((line) => line != null).join("\n");
444
+ }
445
+
446
+ // core/generate-object/generate-object.ts
447
+ async function generateObject({
448
+ model,
449
+ schema,
450
+ mode,
451
+ system,
452
+ prompt,
453
+ messages,
454
+ maxRetries,
455
+ abortSignal,
456
+ ...settings
457
+ }) {
458
+ var _a, _b;
459
+ const retry = retryWithExponentialBackoff({ maxRetries });
460
+ const jsonSchema = convertZodToJSONSchema(schema);
461
+ if (mode === "auto" || mode == null) {
462
+ mode = model.defaultObjectGenerationMode;
463
+ }
464
+ let result;
465
+ let finishReason;
466
+ let usage;
467
+ let warnings;
468
+ let rawResponse;
469
+ let logprobs;
470
+ switch (mode) {
471
+ case "json": {
472
+ const validatedPrompt = getValidatedPrompt({
473
+ system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
474
+ prompt,
475
+ messages
476
+ });
477
+ const generateResult = await retry(() => {
478
+ return model.doGenerate({
479
+ mode: { type: "object-json" },
480
+ ...prepareCallSettings(settings),
481
+ inputFormat: validatedPrompt.type,
482
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
483
+ abortSignal
484
+ });
485
+ });
486
+ if (generateResult.text === void 0) {
487
+ throw new NoObjectGeneratedError();
488
+ }
489
+ result = generateResult.text;
490
+ finishReason = generateResult.finishReason;
491
+ usage = generateResult.usage;
492
+ warnings = generateResult.warnings;
493
+ rawResponse = generateResult.rawResponse;
494
+ logprobs = generateResult.logprobs;
495
+ break;
496
+ }
497
+ case "grammar": {
498
+ const validatedPrompt = getValidatedPrompt({
499
+ system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
500
+ prompt,
501
+ messages
502
+ });
503
+ const generateResult = await retry(
504
+ () => model.doGenerate({
505
+ mode: { type: "object-grammar", schema: jsonSchema },
506
+ ...settings,
507
+ inputFormat: validatedPrompt.type,
508
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
509
+ abortSignal
510
+ })
511
+ );
512
+ if (generateResult.text === void 0) {
513
+ throw new NoObjectGeneratedError();
514
+ }
515
+ result = generateResult.text;
516
+ finishReason = generateResult.finishReason;
517
+ usage = generateResult.usage;
518
+ warnings = generateResult.warnings;
519
+ rawResponse = generateResult.rawResponse;
520
+ logprobs = generateResult.logprobs;
521
+ break;
522
+ }
523
+ case "tool": {
524
+ const validatedPrompt = getValidatedPrompt({
525
+ system,
526
+ prompt,
527
+ messages
528
+ });
529
+ const generateResult = await retry(
530
+ () => model.doGenerate({
531
+ mode: {
532
+ type: "object-tool",
533
+ tool: {
534
+ type: "function",
535
+ name: "json",
536
+ description: "Respond with a JSON object.",
537
+ parameters: jsonSchema
538
+ }
539
+ },
540
+ ...settings,
541
+ inputFormat: validatedPrompt.type,
542
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
543
+ abortSignal
544
+ })
545
+ );
546
+ const functionArgs = (_b = (_a = generateResult.toolCalls) == null ? void 0 : _a[0]) == null ? void 0 : _b.args;
547
+ if (functionArgs === void 0) {
548
+ throw new NoObjectGeneratedError();
549
+ }
550
+ result = functionArgs;
551
+ finishReason = generateResult.finishReason;
552
+ usage = generateResult.usage;
553
+ warnings = generateResult.warnings;
554
+ rawResponse = generateResult.rawResponse;
555
+ logprobs = generateResult.logprobs;
556
+ break;
557
+ }
558
+ case void 0: {
559
+ throw new Error("Model does not have a default object generation mode.");
560
+ }
561
+ default: {
562
+ const _exhaustiveCheck = mode;
563
+ throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
564
+ }
565
+ }
566
+ const parseResult = safeParseJSON({ text: result, schema });
567
+ if (!parseResult.success) {
568
+ throw parseResult.error;
569
+ }
570
+ return new GenerateObjectResult({
571
+ object: parseResult.value,
572
+ finishReason,
573
+ usage: calculateTokenUsage(usage),
574
+ warnings,
575
+ rawResponse,
576
+ logprobs
577
+ });
578
+ }
579
+ var GenerateObjectResult = class {
580
+ constructor(options) {
581
+ this.object = options.object;
582
+ this.finishReason = options.finishReason;
583
+ this.usage = options.usage;
584
+ this.warnings = options.warnings;
585
+ this.rawResponse = options.rawResponse;
586
+ this.logprobs = options.logprobs;
587
+ }
588
+ };
589
+ var experimental_generateObject = generateObject;
590
+
591
+ // core/util/async-iterable-stream.ts
592
+ function createAsyncIterableStream(source, transformer) {
593
+ const transformedStream = source.pipeThrough(
594
+ new TransformStream(transformer)
595
+ );
596
+ transformedStream[Symbol.asyncIterator] = () => {
597
+ const reader = transformedStream.getReader();
598
+ return {
599
+ async next() {
600
+ const { done, value } = await reader.read();
601
+ return done ? { done: true, value: void 0 } : { done: false, value };
602
+ }
603
+ };
604
+ };
605
+ return transformedStream;
606
+ }
607
+
608
+ // core/util/is-deep-equal-data.ts
609
+ function isDeepEqualData(obj1, obj2) {
610
+ if (obj1 === obj2)
611
+ return true;
612
+ if (obj1 == null || obj2 == null)
613
+ return false;
614
+ if (typeof obj1 !== "object" && typeof obj2 !== "object")
615
+ return obj1 === obj2;
616
+ if (obj1.constructor !== obj2.constructor)
617
+ return false;
618
+ if (obj1 instanceof Date && obj2 instanceof Date) {
619
+ return obj1.getTime() === obj2.getTime();
620
+ }
621
+ if (Array.isArray(obj1)) {
622
+ if (obj1.length !== obj2.length)
623
+ return false;
624
+ for (let i = 0; i < obj1.length; i++) {
625
+ if (!isDeepEqualData(obj1[i], obj2[i]))
626
+ return false;
627
+ }
628
+ return true;
629
+ }
630
+ const keys1 = Object.keys(obj1);
631
+ const keys2 = Object.keys(obj2);
632
+ if (keys1.length !== keys2.length)
633
+ return false;
634
+ for (const key of keys1) {
635
+ if (!keys2.includes(key))
636
+ return false;
637
+ if (!isDeepEqualData(obj1[key], obj2[key]))
638
+ return false;
639
+ }
640
+ return true;
641
+ }
642
+
643
+ // core/util/parse-partial-json.ts
644
+ import SecureJSON from "secure-json-parse";
645
+
646
+ // core/util/fix-json.ts
647
+ function fixJson(input) {
648
+ const stack = ["ROOT"];
649
+ let lastValidIndex = -1;
650
+ let literalStart = null;
651
+ function processValueStart(char, i, swapState) {
652
+ {
653
+ switch (char) {
654
+ case '"': {
655
+ lastValidIndex = i;
656
+ stack.pop();
657
+ stack.push(swapState);
658
+ stack.push("INSIDE_STRING");
659
+ break;
660
+ }
661
+ case "f":
662
+ case "t":
663
+ case "n": {
664
+ lastValidIndex = i;
665
+ literalStart = i;
666
+ stack.pop();
667
+ stack.push(swapState);
668
+ stack.push("INSIDE_LITERAL");
669
+ break;
670
+ }
671
+ case "-": {
672
+ stack.pop();
673
+ stack.push(swapState);
674
+ stack.push("INSIDE_NUMBER");
675
+ break;
676
+ }
677
+ case "0":
678
+ case "1":
679
+ case "2":
680
+ case "3":
681
+ case "4":
682
+ case "5":
683
+ case "6":
684
+ case "7":
685
+ case "8":
686
+ case "9": {
687
+ lastValidIndex = i;
688
+ stack.pop();
689
+ stack.push(swapState);
690
+ stack.push("INSIDE_NUMBER");
691
+ break;
692
+ }
693
+ case "{": {
694
+ lastValidIndex = i;
695
+ stack.pop();
696
+ stack.push(swapState);
697
+ stack.push("INSIDE_OBJECT_START");
698
+ break;
699
+ }
700
+ case "[": {
701
+ lastValidIndex = i;
702
+ stack.pop();
703
+ stack.push(swapState);
704
+ stack.push("INSIDE_ARRAY_START");
705
+ break;
706
+ }
707
+ }
708
+ }
709
+ }
710
+ function processAfterObjectValue(char, i) {
711
+ switch (char) {
712
+ case ",": {
713
+ stack.pop();
714
+ stack.push("INSIDE_OBJECT_AFTER_COMMA");
715
+ break;
716
+ }
717
+ case "}": {
718
+ lastValidIndex = i;
719
+ stack.pop();
720
+ break;
721
+ }
722
+ }
723
+ }
724
+ function processAfterArrayValue(char, i) {
725
+ switch (char) {
726
+ case ",": {
727
+ stack.pop();
728
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
729
+ break;
730
+ }
731
+ case "]": {
732
+ lastValidIndex = i;
733
+ stack.pop();
734
+ break;
735
+ }
736
+ }
737
+ }
738
+ for (let i = 0; i < input.length; i++) {
739
+ const char = input[i];
740
+ const currentState = stack[stack.length - 1];
741
+ switch (currentState) {
742
+ case "ROOT":
743
+ processValueStart(char, i, "FINISH");
744
+ break;
745
+ case "INSIDE_OBJECT_START": {
746
+ switch (char) {
747
+ case '"': {
748
+ stack.pop();
749
+ stack.push("INSIDE_OBJECT_KEY");
750
+ break;
751
+ }
752
+ case "}": {
753
+ lastValidIndex = i;
754
+ stack.pop();
755
+ break;
756
+ }
757
+ }
758
+ break;
759
+ }
760
+ case "INSIDE_OBJECT_AFTER_COMMA": {
761
+ switch (char) {
762
+ case '"': {
763
+ stack.pop();
764
+ stack.push("INSIDE_OBJECT_KEY");
765
+ break;
766
+ }
767
+ }
768
+ break;
769
+ }
770
+ case "INSIDE_OBJECT_KEY": {
771
+ switch (char) {
772
+ case '"': {
773
+ stack.pop();
774
+ stack.push("INSIDE_OBJECT_AFTER_KEY");
775
+ break;
776
+ }
777
+ }
778
+ break;
779
+ }
780
+ case "INSIDE_OBJECT_AFTER_KEY": {
781
+ switch (char) {
782
+ case ":": {
783
+ stack.pop();
784
+ stack.push("INSIDE_OBJECT_BEFORE_VALUE");
785
+ break;
786
+ }
787
+ }
788
+ break;
789
+ }
790
+ case "INSIDE_OBJECT_BEFORE_VALUE": {
791
+ processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
792
+ break;
793
+ }
794
+ case "INSIDE_OBJECT_AFTER_VALUE": {
795
+ processAfterObjectValue(char, i);
796
+ break;
797
+ }
798
+ case "INSIDE_STRING": {
799
+ switch (char) {
800
+ case '"': {
801
+ stack.pop();
802
+ lastValidIndex = i;
803
+ break;
804
+ }
805
+ case "\\": {
806
+ stack.push("INSIDE_STRING_ESCAPE");
807
+ break;
808
+ }
809
+ default: {
810
+ lastValidIndex = i;
811
+ }
812
+ }
813
+ break;
814
+ }
815
+ case "INSIDE_ARRAY_START": {
816
+ switch (char) {
817
+ case "]": {
818
+ lastValidIndex = i;
819
+ stack.pop();
820
+ break;
821
+ }
822
+ default: {
823
+ lastValidIndex = i;
824
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
825
+ break;
826
+ }
827
+ }
828
+ break;
829
+ }
830
+ case "INSIDE_ARRAY_AFTER_VALUE": {
831
+ switch (char) {
832
+ case ",": {
833
+ stack.pop();
834
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
835
+ break;
836
+ }
837
+ case "]": {
838
+ lastValidIndex = i;
839
+ stack.pop();
840
+ break;
841
+ }
842
+ default: {
843
+ lastValidIndex = i;
844
+ break;
845
+ }
846
+ }
847
+ break;
848
+ }
849
+ case "INSIDE_ARRAY_AFTER_COMMA": {
850
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
851
+ break;
852
+ }
853
+ case "INSIDE_STRING_ESCAPE": {
854
+ stack.pop();
855
+ lastValidIndex = i;
856
+ break;
857
+ }
858
+ case "INSIDE_NUMBER": {
859
+ switch (char) {
860
+ case "0":
861
+ case "1":
862
+ case "2":
863
+ case "3":
864
+ case "4":
865
+ case "5":
866
+ case "6":
867
+ case "7":
868
+ case "8":
869
+ case "9": {
870
+ lastValidIndex = i;
871
+ break;
872
+ }
873
+ case "e":
874
+ case "E":
875
+ case "-":
876
+ case ".": {
877
+ break;
878
+ }
879
+ case ",": {
880
+ stack.pop();
881
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
882
+ processAfterArrayValue(char, i);
883
+ }
884
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
885
+ processAfterObjectValue(char, i);
886
+ }
887
+ break;
888
+ }
889
+ case "}": {
890
+ stack.pop();
891
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
892
+ processAfterObjectValue(char, i);
893
+ }
894
+ break;
895
+ }
896
+ case "]": {
897
+ stack.pop();
898
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
899
+ processAfterArrayValue(char, i);
900
+ }
901
+ break;
902
+ }
903
+ default: {
904
+ stack.pop();
905
+ break;
906
+ }
907
+ }
908
+ break;
909
+ }
910
+ case "INSIDE_LITERAL": {
911
+ const partialLiteral = input.substring(literalStart, i + 1);
912
+ if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
913
+ stack.pop();
914
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
915
+ processAfterObjectValue(char, i);
916
+ } else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
917
+ processAfterArrayValue(char, i);
918
+ }
919
+ } else {
920
+ lastValidIndex = i;
921
+ }
922
+ break;
923
+ }
924
+ }
925
+ }
926
+ let result = input.slice(0, lastValidIndex + 1);
927
+ for (let i = stack.length - 1; i >= 0; i--) {
928
+ const state = stack[i];
929
+ switch (state) {
930
+ case "INSIDE_STRING": {
931
+ result += '"';
932
+ break;
933
+ }
934
+ case "INSIDE_OBJECT_KEY":
935
+ case "INSIDE_OBJECT_AFTER_KEY":
936
+ case "INSIDE_OBJECT_AFTER_COMMA":
937
+ case "INSIDE_OBJECT_START":
938
+ case "INSIDE_OBJECT_BEFORE_VALUE":
939
+ case "INSIDE_OBJECT_AFTER_VALUE": {
940
+ result += "}";
941
+ break;
942
+ }
943
+ case "INSIDE_ARRAY_START":
944
+ case "INSIDE_ARRAY_AFTER_COMMA":
945
+ case "INSIDE_ARRAY_AFTER_VALUE": {
946
+ result += "]";
947
+ break;
948
+ }
949
+ case "INSIDE_LITERAL": {
950
+ const partialLiteral = input.substring(literalStart, input.length);
951
+ if ("true".startsWith(partialLiteral)) {
952
+ result += "true".slice(partialLiteral.length);
953
+ } else if ("false".startsWith(partialLiteral)) {
954
+ result += "false".slice(partialLiteral.length);
955
+ } else if ("null".startsWith(partialLiteral)) {
956
+ result += "null".slice(partialLiteral.length);
957
+ }
958
+ }
959
+ }
960
+ }
961
+ return result;
962
+ }
963
+
964
+ // core/util/parse-partial-json.ts
965
+ function parsePartialJson(jsonText) {
966
+ if (jsonText == null) {
967
+ return void 0;
968
+ }
969
+ try {
970
+ return SecureJSON.parse(jsonText);
971
+ } catch (ignored) {
972
+ try {
973
+ const fixedJsonText = fixJson(jsonText);
974
+ return SecureJSON.parse(fixedJsonText);
975
+ } catch (ignored2) {
976
+ }
977
+ }
978
+ return void 0;
979
+ }
980
+
981
+ // core/generate-object/stream-object.ts
982
+ async function streamObject({
983
+ model,
984
+ schema,
985
+ mode,
986
+ system,
987
+ prompt,
988
+ messages,
989
+ maxRetries,
990
+ abortSignal,
991
+ ...settings
992
+ }) {
993
+ const retry = retryWithExponentialBackoff({ maxRetries });
994
+ const jsonSchema = convertZodToJSONSchema(schema);
995
+ if (mode === "auto" || mode == null) {
996
+ mode = model.defaultObjectGenerationMode;
997
+ }
998
+ let callOptions;
999
+ let transformer;
1000
+ switch (mode) {
1001
+ case "json": {
1002
+ const validatedPrompt = getValidatedPrompt({
1003
+ system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
1004
+ prompt,
1005
+ messages
1006
+ });
1007
+ callOptions = {
1008
+ mode: { type: "object-json" },
1009
+ ...prepareCallSettings(settings),
1010
+ inputFormat: validatedPrompt.type,
1011
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
1012
+ abortSignal
1013
+ };
1014
+ transformer = {
1015
+ transform: (chunk, controller) => {
1016
+ switch (chunk.type) {
1017
+ case "text-delta":
1018
+ controller.enqueue(chunk.textDelta);
1019
+ break;
1020
+ case "finish":
1021
+ case "error":
1022
+ controller.enqueue(chunk);
1023
+ break;
1024
+ }
1025
+ }
1026
+ };
1027
+ break;
1028
+ }
1029
+ case "grammar": {
1030
+ const validatedPrompt = getValidatedPrompt({
1031
+ system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
1032
+ prompt,
1033
+ messages
1034
+ });
1035
+ callOptions = {
1036
+ mode: { type: "object-grammar", schema: jsonSchema },
1037
+ ...settings,
1038
+ inputFormat: validatedPrompt.type,
1039
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
1040
+ abortSignal
1041
+ };
1042
+ transformer = {
1043
+ transform: (chunk, controller) => {
1044
+ switch (chunk.type) {
1045
+ case "text-delta":
1046
+ controller.enqueue(chunk.textDelta);
1047
+ break;
1048
+ case "finish":
1049
+ case "error":
1050
+ controller.enqueue(chunk);
1051
+ break;
1052
+ }
1053
+ }
1054
+ };
1055
+ break;
1056
+ }
1057
+ case "tool": {
1058
+ const validatedPrompt = getValidatedPrompt({
1059
+ system,
1060
+ prompt,
1061
+ messages
1062
+ });
1063
+ callOptions = {
1064
+ mode: {
1065
+ type: "object-tool",
1066
+ tool: {
1067
+ type: "function",
1068
+ name: "json",
1069
+ description: "Respond with a JSON object.",
1070
+ parameters: jsonSchema
1071
+ }
1072
+ },
1073
+ ...settings,
1074
+ inputFormat: validatedPrompt.type,
1075
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
1076
+ abortSignal
1077
+ };
1078
+ transformer = {
1079
+ transform(chunk, controller) {
1080
+ switch (chunk.type) {
1081
+ case "tool-call-delta":
1082
+ controller.enqueue(chunk.argsTextDelta);
1083
+ break;
1084
+ case "finish":
1085
+ case "error":
1086
+ controller.enqueue(chunk);
1087
+ break;
1088
+ }
1089
+ }
1090
+ };
1091
+ break;
1092
+ }
1093
+ case void 0: {
1094
+ throw new Error("Model does not have a default object generation mode.");
1095
+ }
1096
+ default: {
1097
+ const _exhaustiveCheck = mode;
1098
+ throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
1099
+ }
1100
+ }
1101
+ const result = await retry(() => model.doStream(callOptions));
1102
+ return new StreamObjectResult({
1103
+ stream: result.stream.pipeThrough(new TransformStream(transformer)),
1104
+ warnings: result.warnings,
1105
+ rawResponse: result.rawResponse
1106
+ });
1107
+ }
1108
+ var StreamObjectResult = class {
1109
+ constructor({
1110
+ stream,
1111
+ warnings,
1112
+ rawResponse
1113
+ }) {
1114
+ this.originalStream = stream;
1115
+ this.warnings = warnings;
1116
+ this.rawResponse = rawResponse;
1117
+ }
1118
+ get partialObjectStream() {
1119
+ let accumulatedText = "";
1120
+ let latestObject = void 0;
1121
+ return createAsyncIterableStream(this.originalStream, {
1122
+ transform(chunk, controller) {
1123
+ if (typeof chunk === "string") {
1124
+ accumulatedText += chunk;
1125
+ const currentObject = parsePartialJson(
1126
+ accumulatedText
1127
+ );
1128
+ if (!isDeepEqualData(latestObject, currentObject)) {
1129
+ latestObject = currentObject;
1130
+ controller.enqueue(currentObject);
1131
+ }
1132
+ } else if (chunk.type === "error") {
1133
+ throw chunk.error;
1134
+ }
1135
+ }
1136
+ });
1137
+ }
1138
+ get fullStream() {
1139
+ let accumulatedText = "";
1140
+ let latestObject = void 0;
1141
+ return createAsyncIterableStream(this.originalStream, {
1142
+ transform(chunk, controller) {
1143
+ if (typeof chunk === "string") {
1144
+ accumulatedText += chunk;
1145
+ const currentObject = parsePartialJson(
1146
+ accumulatedText
1147
+ );
1148
+ if (!isDeepEqualData(latestObject, currentObject)) {
1149
+ latestObject = currentObject;
1150
+ controller.enqueue({ type: "object", object: currentObject });
1151
+ }
1152
+ } else {
1153
+ switch (chunk.type) {
1154
+ case "finish":
1155
+ controller.enqueue({
1156
+ ...chunk,
1157
+ usage: calculateTokenUsage(chunk.usage)
1158
+ });
1159
+ break;
1160
+ default:
1161
+ controller.enqueue(chunk);
1162
+ break;
1163
+ }
1164
+ }
1165
+ }
1166
+ });
1167
+ }
1168
+ };
1169
+ var experimental_streamObject = streamObject;
1170
+
1171
+ // core/generate-text/tool-call.ts
1172
+ import {
1173
+ InvalidToolArgumentsError,
1174
+ NoSuchToolError
1175
+ } from "@ai-sdk/provider";
1176
+ import { safeParseJSON as safeParseJSON2 } from "@ai-sdk/provider-utils";
1177
+ function parseToolCall({
1178
+ toolCall,
1179
+ tools
1180
+ }) {
1181
+ const toolName = toolCall.toolName;
1182
+ if (tools == null) {
1183
+ throw new NoSuchToolError({ toolName: toolCall.toolName });
1184
+ }
1185
+ const tool2 = tools[toolName];
1186
+ if (tool2 == null) {
1187
+ throw new NoSuchToolError({
1188
+ toolName: toolCall.toolName,
1189
+ availableTools: Object.keys(tools)
1190
+ });
1191
+ }
1192
+ const parseResult = safeParseJSON2({
1193
+ text: toolCall.args,
1194
+ schema: tool2.parameters
1195
+ });
1196
+ if (parseResult.success === false) {
1197
+ throw new InvalidToolArgumentsError({
1198
+ toolName,
1199
+ toolArgs: toolCall.args,
1200
+ cause: parseResult.error
1201
+ });
1202
+ }
1203
+ return {
1204
+ type: "tool-call",
1205
+ toolCallId: toolCall.toolCallId,
1206
+ toolName,
1207
+ args: parseResult.value
1208
+ };
1209
+ }
1210
+
1211
+ // core/generate-text/generate-text.ts
1212
+ async function generateText({
1213
+ model,
1214
+ tools,
1215
+ system,
1216
+ prompt,
1217
+ messages,
1218
+ maxRetries,
1219
+ abortSignal,
1220
+ ...settings
1221
+ }) {
1222
+ var _a, _b;
1223
+ const retry = retryWithExponentialBackoff({ maxRetries });
1224
+ const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
1225
+ const modelResponse = await retry(() => {
1226
+ return model.doGenerate({
1227
+ mode: {
1228
+ type: "regular",
1229
+ tools: tools == null ? void 0 : Object.entries(tools).map(([name, tool2]) => ({
1230
+ type: "function",
1231
+ name,
1232
+ description: tool2.description,
1233
+ parameters: convertZodToJSONSchema(tool2.parameters)
1234
+ }))
1235
+ },
1236
+ ...prepareCallSettings(settings),
1237
+ inputFormat: validatedPrompt.type,
1238
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
1239
+ abortSignal
1240
+ });
1241
+ });
1242
+ const toolCalls = [];
1243
+ for (const modelToolCall of (_a = modelResponse.toolCalls) != null ? _a : []) {
1244
+ toolCalls.push(parseToolCall({ toolCall: modelToolCall, tools }));
1245
+ }
1246
+ const toolResults = tools == null ? [] : await executeTools({ toolCalls, tools });
1247
+ return new GenerateTextResult({
1248
+ // Always return a string so that the caller doesn't have to check for undefined.
1249
+ // If they need to check if the model did not return any text,
1250
+ // they can check the length of the string:
1251
+ text: (_b = modelResponse.text) != null ? _b : "",
1252
+ toolCalls,
1253
+ toolResults,
1254
+ finishReason: modelResponse.finishReason,
1255
+ usage: calculateTokenUsage(modelResponse.usage),
1256
+ warnings: modelResponse.warnings,
1257
+ rawResponse: modelResponse.rawResponse,
1258
+ logprobs: modelResponse.logprobs
1259
+ });
1260
+ }
1261
+ async function executeTools({
1262
+ toolCalls,
1263
+ tools
1264
+ }) {
1265
+ const toolResults = await Promise.all(
1266
+ toolCalls.map(async (toolCall) => {
1267
+ const tool2 = tools[toolCall.toolName];
1268
+ if ((tool2 == null ? void 0 : tool2.execute) == null) {
1269
+ return void 0;
1270
+ }
1271
+ const result = await tool2.execute(toolCall.args);
1272
+ return {
1273
+ toolCallId: toolCall.toolCallId,
1274
+ toolName: toolCall.toolName,
1275
+ args: toolCall.args,
1276
+ result
1277
+ };
1278
+ })
1279
+ );
1280
+ return toolResults.filter(
1281
+ (result) => result != null
1282
+ );
1283
+ }
1284
+ var GenerateTextResult = class {
1285
+ constructor(options) {
1286
+ this.text = options.text;
1287
+ this.toolCalls = options.toolCalls;
1288
+ this.toolResults = options.toolResults;
1289
+ this.finishReason = options.finishReason;
1290
+ this.usage = options.usage;
1291
+ this.warnings = options.warnings;
1292
+ this.rawResponse = options.rawResponse;
1293
+ this.logprobs = options.logprobs;
1294
+ }
1295
+ };
1296
+ var experimental_generateText = generateText;
1297
+
1298
+ // core/generate-text/run-tools-transformation.ts
1299
+ import { NoSuchToolError as NoSuchToolError2 } from "@ai-sdk/provider";
1300
+
1301
+ // shared/generate-id.ts
1302
+ import { customAlphabet } from "nanoid/non-secure";
1303
+ var generateId = customAlphabet(
1304
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
1305
+ 7
1306
+ );
1307
+
1308
+ // core/generate-text/run-tools-transformation.ts
1309
+ function runToolsTransformation({
1310
+ tools,
1311
+ generatorStream
1312
+ }) {
1313
+ let canClose = false;
1314
+ const outstandingToolCalls = /* @__PURE__ */ new Set();
1315
+ let toolResultsStreamController = null;
1316
+ const toolResultsStream = new ReadableStream({
1317
+ start(controller) {
1318
+ toolResultsStreamController = controller;
1319
+ }
1320
+ });
1321
+ const forwardStream = new TransformStream({
1322
+ transform(chunk, controller) {
1323
+ const chunkType = chunk.type;
1324
+ switch (chunkType) {
1325
+ case "text-delta":
1326
+ case "error": {
1327
+ controller.enqueue(chunk);
1328
+ break;
1329
+ }
1330
+ case "tool-call": {
1331
+ const toolName = chunk.toolName;
1332
+ if (tools == null) {
1333
+ toolResultsStreamController.enqueue({
1334
+ type: "error",
1335
+ error: new NoSuchToolError2({ toolName: chunk.toolName })
1336
+ });
1337
+ break;
1338
+ }
1339
+ const tool2 = tools[toolName];
1340
+ if (tool2 == null) {
1341
+ toolResultsStreamController.enqueue({
1342
+ type: "error",
1343
+ error: new NoSuchToolError2({
1344
+ toolName: chunk.toolName,
1345
+ availableTools: Object.keys(tools)
1346
+ })
1347
+ });
1348
+ break;
1349
+ }
1350
+ try {
1351
+ const toolCall = parseToolCall({
1352
+ toolCall: chunk,
1353
+ tools
1354
+ });
1355
+ controller.enqueue(toolCall);
1356
+ if (tool2.execute != null) {
1357
+ const toolExecutionId = generateId();
1358
+ outstandingToolCalls.add(toolExecutionId);
1359
+ tool2.execute(toolCall.args).then(
1360
+ (result) => {
1361
+ toolResultsStreamController.enqueue({
1362
+ ...toolCall,
1363
+ type: "tool-result",
1364
+ result
1365
+ });
1366
+ outstandingToolCalls.delete(toolExecutionId);
1367
+ if (canClose && outstandingToolCalls.size === 0) {
1368
+ toolResultsStreamController.close();
1369
+ }
1370
+ },
1371
+ (error) => {
1372
+ toolResultsStreamController.enqueue({
1373
+ type: "error",
1374
+ error
1375
+ });
1376
+ outstandingToolCalls.delete(toolExecutionId);
1377
+ if (canClose && outstandingToolCalls.size === 0) {
1378
+ toolResultsStreamController.close();
1379
+ }
1380
+ }
1381
+ );
1382
+ }
1383
+ } catch (error) {
1384
+ toolResultsStreamController.enqueue({
1385
+ type: "error",
1386
+ error
1387
+ });
1388
+ }
1389
+ break;
1390
+ }
1391
+ case "finish": {
1392
+ controller.enqueue({
1393
+ type: "finish",
1394
+ finishReason: chunk.finishReason,
1395
+ logprobs: chunk.logprobs,
1396
+ usage: calculateTokenUsage(chunk.usage)
1397
+ });
1398
+ break;
1399
+ }
1400
+ case "tool-call-delta": {
1401
+ break;
1402
+ }
1403
+ default: {
1404
+ const _exhaustiveCheck = chunkType;
1405
+ throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);
1406
+ }
1407
+ }
1408
+ },
1409
+ flush() {
1410
+ canClose = true;
1411
+ if (outstandingToolCalls.size === 0) {
1412
+ toolResultsStreamController.close();
1413
+ }
1414
+ }
1415
+ });
1416
+ return new ReadableStream({
1417
+ async start(controller) {
1418
+ return Promise.all([
1419
+ generatorStream.pipeThrough(forwardStream).pipeTo(
1420
+ new WritableStream({
1421
+ write(chunk) {
1422
+ controller.enqueue(chunk);
1423
+ },
1424
+ close() {
1425
+ }
1426
+ })
1427
+ ),
1428
+ toolResultsStream.pipeTo(
1429
+ new WritableStream({
1430
+ write(chunk) {
1431
+ controller.enqueue(chunk);
1432
+ },
1433
+ close() {
1434
+ controller.close();
1435
+ }
1436
+ })
1437
+ )
1438
+ ]);
1439
+ }
1440
+ });
1441
+ }
1442
+
1443
+ // core/generate-text/stream-text.ts
1444
+ async function streamText({
1445
+ model,
1446
+ tools,
1447
+ system,
1448
+ prompt,
1449
+ messages,
1450
+ maxRetries,
1451
+ abortSignal,
1452
+ ...settings
1453
+ }) {
1454
+ const retry = retryWithExponentialBackoff({ maxRetries });
1455
+ const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
1456
+ const { stream, warnings, rawResponse } = await retry(
1457
+ () => model.doStream({
1458
+ mode: {
1459
+ type: "regular",
1460
+ tools: tools == null ? void 0 : Object.entries(tools).map(([name, tool2]) => ({
1461
+ type: "function",
1462
+ name,
1463
+ description: tool2.description,
1464
+ parameters: convertZodToJSONSchema(tool2.parameters)
1465
+ }))
1466
+ },
1467
+ ...prepareCallSettings(settings),
1468
+ inputFormat: validatedPrompt.type,
1469
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
1470
+ abortSignal
1471
+ })
1472
+ );
1473
+ return new StreamTextResult({
1474
+ stream: runToolsTransformation({
1475
+ tools,
1476
+ generatorStream: stream
1477
+ }),
1478
+ warnings,
1479
+ rawResponse
1480
+ });
1481
+ }
1482
+ async function streamResponse({
1483
+ model,
1484
+ tools,
1485
+ system,
1486
+ prompt,
1487
+ messages,
1488
+ maxRetries,
1489
+ abortSignal,
1490
+ ...settings
1491
+ }) {
1492
+ const retry = retryWithExponentialBackoff({ maxRetries });
1493
+ const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
1494
+ const { stream, warnings, rawResponse } = await retry(() => {
1495
+ if (!model.doRawStream) {
1496
+ throw new Error("The model does not support raw streaming.");
1497
+ }
1498
+ return model.doRawStream({
1499
+ mode: {
1500
+ type: "regular",
1501
+ tools: tools == null ? void 0 : Object.entries(tools).map(([name, tool2]) => ({
1502
+ type: "function",
1503
+ name,
1504
+ description: tool2.description,
1505
+ parameters: convertZodToJSONSchema(tool2.parameters)
1506
+ }))
1507
+ },
1508
+ ...prepareCallSettings(settings),
1509
+ inputFormat: validatedPrompt.type,
1510
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
1511
+ abortSignal
1512
+ });
1513
+ });
1514
+ return { stream, warnings, rawResponse };
1515
+ }
1516
+ var StreamTextResult = class {
1517
+ constructor({
1518
+ stream,
1519
+ warnings,
1520
+ rawResponse
1521
+ }) {
1522
+ this.warnings = warnings;
1523
+ this.rawResponse = rawResponse;
1524
+ let resolveUsage;
1525
+ this.usage = new Promise((resolve) => {
1526
+ resolveUsage = resolve;
1527
+ });
1528
+ let resolveFinishReason;
1529
+ this.finishReason = new Promise((resolve) => {
1530
+ resolveFinishReason = resolve;
1531
+ });
1532
+ this.originalStream = stream.pipeThrough(
1533
+ new TransformStream({
1534
+ async transform(chunk, controller) {
1535
+ controller.enqueue(chunk);
1536
+ if (chunk.type === "finish") {
1537
+ resolveUsage(chunk.usage);
1538
+ resolveFinishReason(chunk.finishReason);
1539
+ }
1540
+ }
1541
+ })
1542
+ );
1543
+ }
1544
+ /**
1545
+ Split out a new stream from the original stream.
1546
+ The original stream is replaced to allow for further splitting,
1547
+ since we do not know how many times the stream will be split.
1548
+
1549
+ Note: this leads to buffering the stream content on the server.
1550
+ However, the LLM results are expected to be small enough to not cause issues.
1551
+ */
1552
+ teeStream() {
1553
+ const [stream1, stream2] = this.originalStream.tee();
1554
+ this.originalStream = stream2;
1555
+ return stream1;
1556
+ }
1557
+ /**
1558
+ A text stream that returns only the generated text deltas. You can use it
1559
+ as either an AsyncIterable or a ReadableStream. When an error occurs, the
1560
+ stream will throw the error.
1561
+ */
1562
+ get textStream() {
1563
+ return createAsyncIterableStream(this.teeStream(), {
1564
+ transform(chunk, controller) {
1565
+ if (chunk.type === "text-delta") {
1566
+ if (chunk.textDelta.length > 0) {
1567
+ controller.enqueue(chunk.textDelta);
1568
+ }
1569
+ } else if (chunk.type === "error") {
1570
+ throw chunk.error;
1571
+ }
1572
+ }
1573
+ });
1574
+ }
1575
+ /**
1576
+ A stream with all events, including text deltas, tool calls, tool results, and
1577
+ errors.
1578
+ You can use it as either an AsyncIterable or a ReadableStream. When an error occurs, the
1579
+ stream will throw the error.
1580
+ */
1581
+ get fullStream() {
1582
+ return createAsyncIterableStream(this.teeStream(), {
1583
+ transform(chunk, controller) {
1584
+ if (chunk.type === "text-delta") {
1585
+ if (chunk.textDelta.length > 0) {
1586
+ controller.enqueue(chunk);
1587
+ }
1588
+ } else {
1589
+ controller.enqueue(chunk);
1590
+ }
1591
+ }
1592
+ });
1593
+ }
1594
+ /**
1595
+ Converts the result to an `AIStream` object that is compatible with `StreamingTextResponse`.
1596
+ It can be used with the `useChat` and `useCompletion` hooks.
1597
+
1598
+ @param callbacks
1599
+ Stream callbacks that will be called when the stream emits events.
1600
+
1601
+ @returns an `AIStream` object.
1602
+ */
1603
+ toAIStream(callbacks = {}) {
1604
+ let aggregatedResponse = "";
1605
+ const callbackTransformer = new TransformStream({
1606
+ async start() {
1607
+ if (callbacks.onStart)
1608
+ await callbacks.onStart();
1609
+ },
1610
+ async transform(chunk, controller) {
1611
+ controller.enqueue(chunk);
1612
+ if (chunk.type === "text-delta") {
1613
+ const textDelta = chunk.textDelta;
1614
+ aggregatedResponse += textDelta;
1615
+ if (callbacks.onToken)
1616
+ await callbacks.onToken(textDelta);
1617
+ if (callbacks.onText)
1618
+ await callbacks.onText(textDelta);
1619
+ }
1620
+ },
1621
+ async flush() {
1622
+ if (callbacks.onCompletion)
1623
+ await callbacks.onCompletion(aggregatedResponse);
1624
+ if (callbacks.onFinal)
1625
+ await callbacks.onFinal(aggregatedResponse);
1626
+ }
1627
+ });
1628
+ const streamDataTransformer = new TransformStream({
1629
+ transform: async (chunk, controller) => {
1630
+ switch (chunk.type) {
1631
+ case "text-delta":
1632
+ controller.enqueue(formatStreamPart("text", chunk.textDelta));
1633
+ break;
1634
+ case "tool-call":
1635
+ controller.enqueue(
1636
+ formatStreamPart("tool_call", {
1637
+ toolCallId: chunk.toolCallId,
1638
+ toolName: chunk.toolName,
1639
+ args: chunk.args
1640
+ })
1641
+ );
1642
+ break;
1643
+ case "tool-result":
1644
+ controller.enqueue(
1645
+ formatStreamPart("tool_result", {
1646
+ toolCallId: chunk.toolCallId,
1647
+ toolName: chunk.toolName,
1648
+ args: chunk.args,
1649
+ result: chunk.result
1650
+ })
1651
+ );
1652
+ break;
1653
+ case "error":
1654
+ controller.enqueue(
1655
+ formatStreamPart("error", JSON.stringify(chunk.error))
1656
+ );
1657
+ break;
1658
+ }
1659
+ }
1660
+ });
1661
+ return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamDataTransformer).pipeThrough(new TextEncoderStream());
1662
+ }
1663
+ /**
1664
+ Writes stream data output to a Node.js response-like object.
1665
+ It sets a `Content-Type` header to `text/plain; charset=utf-8` and
1666
+ writes each stream data part as a separate chunk.
1667
+
1668
+ @param response A Node.js response-like object (ServerResponse).
1669
+ @param init Optional headers and status code.
1670
+ */
1671
+ pipeAIStreamToResponse(response, init) {
1672
+ var _a;
1673
+ response.writeHead((_a = init == null ? void 0 : init.status) != null ? _a : 200, {
1674
+ "Content-Type": "text/plain; charset=utf-8",
1675
+ ...init == null ? void 0 : init.headers
1676
+ });
1677
+ const reader = this.toAIStream().getReader();
1678
+ const read = async () => {
1679
+ try {
1680
+ while (true) {
1681
+ const { done, value } = await reader.read();
1682
+ if (done)
1683
+ break;
1684
+ response.write(value);
1685
+ }
1686
+ } catch (error) {
1687
+ throw error;
1688
+ } finally {
1689
+ response.end();
1690
+ }
1691
+ };
1692
+ read();
1693
+ }
1694
+ /**
1695
+ Writes text delta output to a Node.js response-like object.
1696
+ It sets a `Content-Type` header to `text/plain; charset=utf-8` and
1697
+ writes each text delta as a separate chunk.
1698
+
1699
+ @param response A Node.js response-like object (ServerResponse).
1700
+ @param init Optional headers and status code.
1701
+ */
1702
+ pipeTextStreamToResponse(response, init) {
1703
+ var _a;
1704
+ response.writeHead((_a = init == null ? void 0 : init.status) != null ? _a : 200, {
1705
+ "Content-Type": "text/plain; charset=utf-8",
1706
+ ...init == null ? void 0 : init.headers
1707
+ });
1708
+ const reader = this.textStream.pipeThrough(new TextEncoderStream()).getReader();
1709
+ const read = async () => {
1710
+ try {
1711
+ while (true) {
1712
+ const { done, value } = await reader.read();
1713
+ if (done)
1714
+ break;
1715
+ response.write(value);
1716
+ }
1717
+ } catch (error) {
1718
+ throw error;
1719
+ } finally {
1720
+ response.end();
1721
+ }
1722
+ };
1723
+ read();
1724
+ }
1725
+ /**
1726
+ Converts the result to a streamed response object with a stream data part stream.
1727
+ It can be used with the `useChat` and `useCompletion` hooks.
1728
+
1729
+ @param init Optional headers.
1730
+
1731
+ @return A response object.
1732
+ */
1733
+ toAIStreamResponse(init) {
1734
+ return new StreamingTextResponse(this.toAIStream(), init);
1735
+ }
1736
+ /**
1737
+ Creates a simple text stream response.
1738
+ Each text delta is encoded as UTF-8 and sent as a separate chunk.
1739
+ Non-text-delta events are ignored.
1740
+
1741
+ @param init Optional headers and status code.
1742
+ */
1743
+ toTextStreamResponse(init) {
1744
+ var _a;
1745
+ return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
1746
+ status: (_a = init == null ? void 0 : init.status) != null ? _a : 200,
1747
+ headers: {
1748
+ "Content-Type": "text/plain; charset=utf-8",
1749
+ ...init == null ? void 0 : init.headers
1750
+ }
1751
+ });
1752
+ }
1753
+ };
1754
+ var experimental_streamText = streamText;
1755
+
1756
+ // core/prompt/convert-to-core-messages.ts
1757
+ function convertToCoreMessages(messages) {
1758
+ const coreMessages = [];
1759
+ for (const { role, content, toolInvocations } of messages) {
1760
+ switch (role) {
1761
+ case "user": {
1762
+ coreMessages.push({ role: "user", content });
1763
+ break;
1764
+ }
1765
+ case "assistant": {
1766
+ if (toolInvocations == null) {
1767
+ coreMessages.push({ role: "assistant", content });
1768
+ break;
1769
+ }
1770
+ coreMessages.push({
1771
+ role: "assistant",
1772
+ content: [
1773
+ { type: "text", text: content },
1774
+ ...toolInvocations.map(({ toolCallId, toolName, args }) => ({
1775
+ type: "tool-call",
1776
+ toolCallId,
1777
+ toolName,
1778
+ args
1779
+ }))
1780
+ ]
1781
+ });
1782
+ coreMessages.push({
1783
+ role: "tool",
1784
+ content: toolInvocations.map(
1785
+ ({ toolCallId, toolName, args, result }) => ({
1786
+ type: "tool-result",
1787
+ toolCallId,
1788
+ toolName,
1789
+ args,
1790
+ result
1791
+ })
1792
+ )
1793
+ });
1794
+ break;
1795
+ }
1796
+ default: {
1797
+ const _exhaustiveCheck = role;
1798
+ throw new Error(`Unhandled role: ${_exhaustiveCheck}`);
1799
+ }
1800
+ }
1801
+ }
1802
+ return coreMessages;
1803
+ }
1804
+
1805
+ // core/tool/tool.ts
1806
+ function tool(tool2) {
1807
+ return tool2;
1808
+ }
1809
+
1810
+ // core/types/errors.ts
1811
+ import {
1812
+ APICallError as APICallError2,
1813
+ EmptyResponseBodyError,
1814
+ InvalidArgumentError as InvalidArgumentError2,
1815
+ InvalidDataContentError as InvalidDataContentError2,
1816
+ InvalidPromptError as InvalidPromptError2,
1817
+ InvalidResponseDataError,
1818
+ InvalidToolArgumentsError as InvalidToolArgumentsError2,
1819
+ JSONParseError,
1820
+ LoadAPIKeyError,
1821
+ NoObjectGeneratedError as NoObjectGeneratedError2,
1822
+ NoSuchToolError as NoSuchToolError3,
1823
+ RetryError as RetryError2,
1824
+ ToolCallParseError,
1825
+ TypeValidationError,
1826
+ UnsupportedFunctionalityError,
1827
+ UnsupportedJSONSchemaError
1828
+ } from "@ai-sdk/provider";
1829
+
1830
+ // shared/stream-parts.ts
1831
+ var textStreamPart = {
1832
+ code: "0",
1833
+ name: "text",
1834
+ parse: (value) => {
1835
+ if (typeof value !== "string") {
1836
+ throw new Error('"text" parts expect a string value.');
1837
+ }
1838
+ return { type: "text", value };
1839
+ }
1840
+ };
1841
+ var functionCallStreamPart = {
1842
+ code: "1",
1843
+ name: "function_call",
1844
+ parse: (value) => {
1845
+ if (value == null || typeof value !== "object" || !("function_call" in value) || typeof value.function_call !== "object" || value.function_call == null || !("name" in value.function_call) || !("arguments" in value.function_call) || typeof value.function_call.name !== "string" || typeof value.function_call.arguments !== "string") {
1846
+ throw new Error(
1847
+ '"function_call" parts expect an object with a "function_call" property.'
1848
+ );
1849
+ }
1850
+ return {
1851
+ type: "function_call",
1852
+ value
1853
+ };
1854
+ }
1855
+ };
1856
+ var dataStreamPart = {
1857
+ code: "2",
1858
+ name: "data",
1859
+ parse: (value) => {
1860
+ if (!Array.isArray(value)) {
1861
+ throw new Error('"data" parts expect an array value.');
1862
+ }
1863
+ return { type: "data", value };
1864
+ }
1865
+ };
1866
+ var errorStreamPart = {
1867
+ code: "3",
1868
+ name: "error",
1869
+ parse: (value) => {
1870
+ if (typeof value !== "string") {
1871
+ throw new Error('"error" parts expect a string value.');
1872
+ }
1873
+ return { type: "error", value };
1874
+ }
1875
+ };
1876
+ var assistantMessageStreamPart = {
1877
+ code: "4",
1878
+ name: "assistant_message",
1879
+ parse: (value) => {
1880
+ if (value == null || typeof value !== "object" || !("id" in value) || !("role" in value) || !("content" in value) || typeof value.id !== "string" || typeof value.role !== "string" || value.role !== "assistant" || !Array.isArray(value.content) || !value.content.every(
1881
+ (item) => item != null && typeof item === "object" && "type" in item && item.type === "text" && "text" in item && item.text != null && typeof item.text === "object" && "value" in item.text && typeof item.text.value === "string"
1882
+ )) {
1883
+ throw new Error(
1884
+ '"assistant_message" parts expect an object with an "id", "role", and "content" property.'
1885
+ );
1886
+ }
1887
+ return {
1888
+ type: "assistant_message",
1889
+ value
1890
+ };
1891
+ }
1892
+ };
1893
+ var assistantControlDataStreamPart = {
1894
+ code: "5",
1895
+ name: "assistant_control_data",
1896
+ parse: (value) => {
1897
+ if (value == null || typeof value !== "object" || !("threadId" in value) || !("messageId" in value) || typeof value.threadId !== "string" || typeof value.messageId !== "string") {
1898
+ throw new Error(
1899
+ '"assistant_control_data" parts expect an object with a "threadId" and "messageId" property.'
1900
+ );
1901
+ }
1902
+ return {
1903
+ type: "assistant_control_data",
1904
+ value: {
1905
+ threadId: value.threadId,
1906
+ messageId: value.messageId
1907
+ }
1908
+ };
1909
+ }
1910
+ };
1911
+ var dataMessageStreamPart = {
1912
+ code: "6",
1913
+ name: "data_message",
1914
+ parse: (value) => {
1915
+ if (value == null || typeof value !== "object" || !("role" in value) || !("data" in value) || typeof value.role !== "string" || value.role !== "data") {
1916
+ throw new Error(
1917
+ '"data_message" parts expect an object with a "role" and "data" property.'
1918
+ );
1919
+ }
1920
+ return {
1921
+ type: "data_message",
1922
+ value
1923
+ };
1924
+ }
1925
+ };
1926
+ var toolCallsStreamPart = {
1927
+ code: "7",
1928
+ name: "tool_calls",
1929
+ parse: (value) => {
1930
+ if (value == null || typeof value !== "object" || !("tool_calls" in value) || typeof value.tool_calls !== "object" || value.tool_calls == null || !Array.isArray(value.tool_calls) || value.tool_calls.some(
1931
+ (tc) => tc == null || typeof tc !== "object" || !("id" in tc) || typeof tc.id !== "string" || !("type" in tc) || typeof tc.type !== "string" || !("function" in tc) || tc.function == null || typeof tc.function !== "object" || !("arguments" in tc.function) || typeof tc.function.name !== "string" || typeof tc.function.arguments !== "string"
1932
+ )) {
1933
+ throw new Error(
1934
+ '"tool_calls" parts expect an object with a ToolCallPayload.'
1935
+ );
1936
+ }
1937
+ return {
1938
+ type: "tool_calls",
1939
+ value
1940
+ };
1941
+ }
1942
+ };
1943
+ var messageAnnotationsStreamPart = {
1944
+ code: "8",
1945
+ name: "message_annotations",
1946
+ parse: (value) => {
1947
+ if (!Array.isArray(value)) {
1948
+ throw new Error('"message_annotations" parts expect an array value.');
1949
+ }
1950
+ return { type: "message_annotations", value };
1951
+ }
1952
+ };
1953
+ var toolCallStreamPart = {
1954
+ code: "9",
1955
+ name: "tool_call",
1956
+ parse: (value) => {
1957
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string" || !("args" in value) || typeof value.args !== "object") {
1958
+ throw new Error(
1959
+ '"tool_call" parts expect an object with a "toolCallId", "toolName", and "args" property.'
1960
+ );
1961
+ }
1962
+ return {
1963
+ type: "tool_call",
1964
+ value
1965
+ };
1966
+ }
1967
+ };
1968
+ var toolResultStreamPart = {
1969
+ code: "a",
1970
+ name: "tool_result",
1971
+ parse: (value) => {
1972
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string" || !("args" in value) || typeof value.args !== "object" || !("result" in value)) {
1973
+ throw new Error(
1974
+ '"tool_result" parts expect an object with a "toolCallId", "toolName", "args", and "result" property.'
1975
+ );
1976
+ }
1977
+ return {
1978
+ type: "tool_result",
1979
+ value
1980
+ };
1981
+ }
1982
+ };
1983
+ var streamParts = [
1984
+ textStreamPart,
1985
+ functionCallStreamPart,
1986
+ dataStreamPart,
1987
+ errorStreamPart,
1988
+ assistantMessageStreamPart,
1989
+ assistantControlDataStreamPart,
1990
+ dataMessageStreamPart,
1991
+ toolCallsStreamPart,
1992
+ messageAnnotationsStreamPart,
1993
+ toolCallStreamPart,
1994
+ toolResultStreamPart
1995
+ ];
1996
+ var streamPartsByCode = {
1997
+ [textStreamPart.code]: textStreamPart,
1998
+ [functionCallStreamPart.code]: functionCallStreamPart,
1999
+ [dataStreamPart.code]: dataStreamPart,
2000
+ [errorStreamPart.code]: errorStreamPart,
2001
+ [assistantMessageStreamPart.code]: assistantMessageStreamPart,
2002
+ [assistantControlDataStreamPart.code]: assistantControlDataStreamPart,
2003
+ [dataMessageStreamPart.code]: dataMessageStreamPart,
2004
+ [toolCallsStreamPart.code]: toolCallsStreamPart,
2005
+ [messageAnnotationsStreamPart.code]: messageAnnotationsStreamPart,
2006
+ [toolCallStreamPart.code]: toolCallStreamPart,
2007
+ [toolResultStreamPart.code]: toolResultStreamPart
2008
+ };
2009
+ var StreamStringPrefixes = {
2010
+ [textStreamPart.name]: textStreamPart.code,
2011
+ [functionCallStreamPart.name]: functionCallStreamPart.code,
2012
+ [dataStreamPart.name]: dataStreamPart.code,
2013
+ [errorStreamPart.name]: errorStreamPart.code,
2014
+ [assistantMessageStreamPart.name]: assistantMessageStreamPart.code,
2015
+ [assistantControlDataStreamPart.name]: assistantControlDataStreamPart.code,
2016
+ [dataMessageStreamPart.name]: dataMessageStreamPart.code,
2017
+ [toolCallsStreamPart.name]: toolCallsStreamPart.code,
2018
+ [messageAnnotationsStreamPart.name]: messageAnnotationsStreamPart.code,
2019
+ [toolCallStreamPart.name]: toolCallStreamPart.code,
2020
+ [toolResultStreamPart.name]: toolResultStreamPart.code
2021
+ };
2022
+ var validCodes = streamParts.map((part) => part.code);
2023
+ var parseStreamPart = (line) => {
2024
+ const firstSeparatorIndex = line.indexOf(":");
2025
+ if (firstSeparatorIndex === -1) {
2026
+ throw new Error("Failed to parse stream string. No separator found.");
2027
+ }
2028
+ const prefix = line.slice(0, firstSeparatorIndex);
2029
+ if (!validCodes.includes(prefix)) {
2030
+ throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`);
2031
+ }
2032
+ const code = prefix;
2033
+ const textValue = line.slice(firstSeparatorIndex + 1);
2034
+ const jsonValue = JSON.parse(textValue);
2035
+ return streamPartsByCode[code].parse(jsonValue);
2036
+ };
2037
+ function formatStreamPart(type, value) {
2038
+ const streamPart = streamParts.find((part) => part.name === type);
2039
+ if (!streamPart) {
2040
+ throw new Error(`Invalid stream part type: ${type}`);
2041
+ }
2042
+ return `${streamPart.code}:${JSON.stringify(value)}
2043
+ `;
2044
+ }
2045
+
2046
+ // shared/read-data-stream.ts
2047
+ var NEWLINE = "\n".charCodeAt(0);
2048
+ function concatChunks(chunks, totalLength) {
2049
+ const concatenatedChunks = new Uint8Array(totalLength);
2050
+ let offset = 0;
2051
+ for (const chunk of chunks) {
2052
+ concatenatedChunks.set(chunk, offset);
2053
+ offset += chunk.length;
2054
+ }
2055
+ chunks.length = 0;
2056
+ return concatenatedChunks;
2057
+ }
2058
+ async function* readDataStream(reader, {
2059
+ isAborted
2060
+ } = {}) {
2061
+ const decoder = new TextDecoder();
2062
+ const chunks = [];
2063
+ let totalLength = 0;
2064
+ while (true) {
2065
+ const { value } = await reader.read();
2066
+ if (value) {
2067
+ chunks.push(value);
2068
+ totalLength += value.length;
2069
+ if (value[value.length - 1] !== NEWLINE) {
2070
+ continue;
2071
+ }
2072
+ }
2073
+ if (chunks.length === 0) {
2074
+ break;
2075
+ }
2076
+ const concatenatedChunks = concatChunks(chunks, totalLength);
2077
+ totalLength = 0;
2078
+ const streamParts2 = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseStreamPart);
2079
+ for (const streamPart of streamParts2) {
2080
+ yield streamPart;
2081
+ }
2082
+ if (isAborted == null ? void 0 : isAborted()) {
2083
+ reader.cancel();
2084
+ break;
2085
+ }
2086
+ }
2087
+ }
2088
+
2089
+ // shared/utils.ts
2090
+ function createChunkDecoder(complex) {
2091
+ const decoder = new TextDecoder();
2092
+ if (!complex) {
2093
+ return function(chunk) {
2094
+ if (!chunk)
2095
+ return "";
2096
+ return decoder.decode(chunk, { stream: true });
2097
+ };
2098
+ }
2099
+ return function(chunk) {
2100
+ const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
2101
+ return decoded.map(parseStreamPart).filter(Boolean);
2102
+ };
2103
+ }
2104
+ var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
2105
+
2106
+ // streams/ai-stream.ts
2107
+ import {
2108
+ createParser
2109
+ } from "eventsource-parser";
2110
+ function createEventStreamTransformer(customParser) {
2111
+ const textDecoder = new TextDecoder();
2112
+ let eventSourceParser;
2113
+ return new TransformStream({
2114
+ async start(controller) {
2115
+ eventSourceParser = createParser(
2116
+ (event) => {
2117
+ if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
2118
+ // @see https://replicate.com/docs/streaming
2119
+ event.event === "done") {
2120
+ controller.terminate();
2121
+ return;
2122
+ }
2123
+ if ("data" in event) {
2124
+ const parsedMessage = customParser ? customParser(event.data, {
2125
+ event: event.event
2126
+ }) : event.data;
2127
+ if (parsedMessage)
2128
+ controller.enqueue(parsedMessage);
2129
+ }
2130
+ }
2131
+ );
2132
+ },
2133
+ transform(chunk) {
2134
+ eventSourceParser.feed(textDecoder.decode(chunk));
2135
+ }
2136
+ });
2137
+ }
2138
+ function createCallbacksTransformer(cb) {
2139
+ const textEncoder = new TextEncoder();
2140
+ let aggregatedResponse = "";
2141
+ const callbacks = cb || {};
2142
+ return new TransformStream({
2143
+ async start() {
2144
+ if (callbacks.onStart)
2145
+ await callbacks.onStart();
2146
+ },
2147
+ async transform(message, controller) {
2148
+ const content = typeof message === "string" ? message : message.content;
2149
+ controller.enqueue(textEncoder.encode(content));
2150
+ aggregatedResponse += content;
2151
+ if (callbacks.onToken)
2152
+ await callbacks.onToken(content);
2153
+ if (callbacks.onText && typeof message === "string") {
2154
+ await callbacks.onText(message);
2155
+ }
2156
+ },
2157
+ async flush() {
2158
+ const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
2159
+ if (callbacks.onCompletion) {
2160
+ await callbacks.onCompletion(aggregatedResponse);
2161
+ }
2162
+ if (callbacks.onFinal && !isOpenAICallbacks) {
2163
+ await callbacks.onFinal(aggregatedResponse);
2164
+ }
2165
+ }
2166
+ });
2167
+ }
2168
+ function isOfTypeOpenAIStreamCallbacks(callbacks) {
2169
+ return "experimental_onFunctionCall" in callbacks;
2170
+ }
2171
+ function trimStartOfStreamHelper() {
2172
+ let isStreamStart = true;
2173
+ return (text) => {
2174
+ if (isStreamStart) {
2175
+ text = text.trimStart();
2176
+ if (text)
2177
+ isStreamStart = false;
2178
+ }
2179
+ return text;
2180
+ };
2181
+ }
2182
+ function AIStream(response, customParser, callbacks) {
2183
+ if (!response.ok) {
2184
+ if (response.body) {
2185
+ const reader = response.body.getReader();
2186
+ return new ReadableStream({
2187
+ async start(controller) {
2188
+ const { done, value } = await reader.read();
2189
+ if (!done) {
2190
+ const errorText = new TextDecoder().decode(value);
2191
+ controller.error(new Error(`Response error: ${errorText}`));
2192
+ }
2193
+ }
2194
+ });
2195
+ } else {
2196
+ return new ReadableStream({
2197
+ start(controller) {
2198
+ controller.error(new Error("Response error: No response body"));
2199
+ }
2200
+ });
2201
+ }
2202
+ }
2203
+ const responseBodyStream = response.body || createEmptyReadableStream();
2204
+ return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
2205
+ }
2206
+ function createEmptyReadableStream() {
2207
+ return new ReadableStream({
2208
+ start(controller) {
2209
+ controller.close();
2210
+ }
2211
+ });
2212
+ }
2213
+ function readableFromAsyncIterable(iterable) {
2214
+ let it = iterable[Symbol.asyncIterator]();
2215
+ return new ReadableStream({
2216
+ async pull(controller) {
2217
+ const { done, value } = await it.next();
2218
+ if (done)
2219
+ controller.close();
2220
+ else
2221
+ controller.enqueue(value);
2222
+ },
2223
+ async cancel(reason) {
2224
+ var _a;
2225
+ await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
2226
+ }
2227
+ });
2228
+ }
2229
+
2230
+ // streams/stream-data.ts
2231
+ var StreamData = class {
2232
+ constructor() {
2233
+ this.encoder = new TextEncoder();
2234
+ this.controller = null;
2235
+ this.isClosed = false;
2236
+ this.warningTimeout = null;
2237
+ const self = this;
2238
+ this.stream = new ReadableStream({
2239
+ start: async (controller) => {
2240
+ self.controller = controller;
2241
+ if (process.env.NODE_ENV === "development") {
2242
+ self.warningTimeout = setTimeout(() => {
2243
+ console.warn(
2244
+ "The data stream is hanging. Did you forget to close it with `data.close()`?"
2245
+ );
2246
+ }, 3e3);
2247
+ }
2248
+ },
2249
+ pull: (controller) => {
2250
+ },
2251
+ cancel: (reason) => {
2252
+ this.isClosed = true;
2253
+ }
2254
+ });
2255
+ }
2256
+ async close() {
2257
+ if (this.isClosed) {
2258
+ throw new Error("Data Stream has already been closed.");
2259
+ }
2260
+ if (!this.controller) {
2261
+ throw new Error("Stream controller is not initialized.");
2262
+ }
2263
+ this.controller.close();
2264
+ this.isClosed = true;
2265
+ if (this.warningTimeout) {
2266
+ clearTimeout(this.warningTimeout);
2267
+ }
2268
+ }
2269
+ append(value) {
2270
+ if (this.isClosed) {
2271
+ throw new Error("Data Stream has already been closed.");
2272
+ }
2273
+ if (!this.controller) {
2274
+ throw new Error("Stream controller is not initialized.");
2275
+ }
2276
+ this.controller.enqueue(
2277
+ this.encoder.encode(formatStreamPart("data", [value]))
2278
+ );
2279
+ }
2280
+ appendMessageAnnotation(value) {
2281
+ if (this.isClosed) {
2282
+ throw new Error("Data Stream has already been closed.");
2283
+ }
2284
+ if (!this.controller) {
2285
+ throw new Error("Stream controller is not initialized.");
2286
+ }
2287
+ this.controller.enqueue(
2288
+ this.encoder.encode(formatStreamPart("message_annotations", [value]))
2289
+ );
2290
+ }
2291
+ };
2292
+ function createStreamDataTransformer() {
2293
+ const encoder = new TextEncoder();
2294
+ const decoder = new TextDecoder();
2295
+ return new TransformStream({
2296
+ transform: async (chunk, controller) => {
2297
+ const message = decoder.decode(chunk);
2298
+ controller.enqueue(encoder.encode(formatStreamPart("text", message)));
2299
+ }
2300
+ });
2301
+ }
2302
+ var experimental_StreamData = class extends StreamData {
2303
+ };
2304
+
2305
+ // streams/anthropic-stream.ts
2306
+ function parseAnthropicStream() {
2307
+ let previous = "";
2308
+ return (data) => {
2309
+ const json = JSON.parse(data);
2310
+ if ("error" in json) {
2311
+ throw new Error(`${json.error.type}: ${json.error.message}`);
2312
+ }
2313
+ if (!("completion" in json)) {
2314
+ return;
2315
+ }
2316
+ const text = json.completion;
2317
+ if (!previous || text.length > previous.length && text.startsWith(previous)) {
2318
+ const delta = text.slice(previous.length);
2319
+ previous = text;
2320
+ return delta;
2321
+ }
2322
+ return text;
2323
+ };
2324
+ }
2325
+ async function* streamable(stream) {
2326
+ for await (const chunk of stream) {
2327
+ if ("completion" in chunk) {
2328
+ const text = chunk.completion;
2329
+ if (text)
2330
+ yield text;
2331
+ } else if ("delta" in chunk) {
2332
+ const { delta } = chunk;
2333
+ if ("text" in delta) {
2334
+ const text = delta.text;
2335
+ if (text)
2336
+ yield text;
2337
+ }
2338
+ }
2339
+ }
2340
+ }
2341
+ function AnthropicStream(res, cb) {
2342
+ if (Symbol.asyncIterator in res) {
2343
+ return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer());
2344
+ } else {
2345
+ return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
2346
+ createStreamDataTransformer()
2347
+ );
2348
+ }
2349
+ }
2350
+
2351
+ // streams/assistant-response.ts
2352
+ function AssistantResponse({ threadId, messageId }, process2) {
2353
+ const stream = new ReadableStream({
2354
+ async start(controller) {
2355
+ var _a;
2356
+ const textEncoder = new TextEncoder();
2357
+ const sendMessage = (message) => {
2358
+ controller.enqueue(
2359
+ textEncoder.encode(formatStreamPart("assistant_message", message))
2360
+ );
2361
+ };
2362
+ const sendDataMessage = (message) => {
2363
+ controller.enqueue(
2364
+ textEncoder.encode(formatStreamPart("data_message", message))
2365
+ );
2366
+ };
2367
+ const sendError = (errorMessage) => {
2368
+ controller.enqueue(
2369
+ textEncoder.encode(formatStreamPart("error", errorMessage))
2370
+ );
2371
+ };
2372
+ const forwardStream = async (stream2) => {
2373
+ var _a2, _b;
2374
+ let result = void 0;
2375
+ for await (const value of stream2) {
2376
+ switch (value.event) {
2377
+ case "thread.message.created": {
2378
+ controller.enqueue(
2379
+ textEncoder.encode(
2380
+ formatStreamPart("assistant_message", {
2381
+ id: value.data.id,
2382
+ role: "assistant",
2383
+ content: [{ type: "text", text: { value: "" } }]
2384
+ })
2385
+ )
2386
+ );
2387
+ break;
2388
+ }
2389
+ case "thread.message.delta": {
2390
+ const content = (_a2 = value.data.delta.content) == null ? void 0 : _a2[0];
2391
+ if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
2392
+ controller.enqueue(
2393
+ textEncoder.encode(
2394
+ formatStreamPart("text", content.text.value)
2395
+ )
2396
+ );
2397
+ }
2398
+ break;
2399
+ }
2400
+ case "thread.run.completed":
2401
+ case "thread.run.requires_action": {
2402
+ result = value.data;
2403
+ break;
2404
+ }
2405
+ }
2406
+ }
2407
+ return result;
2408
+ };
2409
+ controller.enqueue(
2410
+ textEncoder.encode(
2411
+ formatStreamPart("assistant_control_data", {
2412
+ threadId,
2413
+ messageId
2414
+ })
2415
+ )
2416
+ );
2417
+ try {
2418
+ await process2({
2419
+ threadId,
2420
+ messageId,
2421
+ sendMessage,
2422
+ sendDataMessage,
2423
+ forwardStream
2424
+ });
2425
+ } catch (error) {
2426
+ sendError((_a = error.message) != null ? _a : `${error}`);
2427
+ } finally {
2428
+ controller.close();
2429
+ }
2430
+ },
2431
+ pull(controller) {
2432
+ },
2433
+ cancel() {
2434
+ }
2435
+ });
2436
+ return new Response(stream, {
2437
+ status: 200,
2438
+ headers: {
2439
+ "Content-Type": "text/plain; charset=utf-8"
2440
+ }
2441
+ });
2442
+ }
2443
+ var experimental_AssistantResponse = AssistantResponse;
2444
+
2445
+ // streams/aws-bedrock-stream.ts
2446
+ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
2447
+ var _a, _b;
2448
+ const decoder = new TextDecoder();
2449
+ for await (const chunk of (_a = response.body) != null ? _a : []) {
2450
+ const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
2451
+ if (bytes != null) {
2452
+ const chunkText = decoder.decode(bytes);
2453
+ const chunkJSON = JSON.parse(chunkText);
2454
+ const delta = extractTextDeltaFromChunk(chunkJSON);
2455
+ if (delta != null) {
2456
+ yield delta;
2457
+ }
2458
+ }
2459
+ }
2460
+ }
2461
+ function AWSBedrockAnthropicMessagesStream(response, callbacks) {
2462
+ return AWSBedrockStream(response, callbacks, (chunk) => {
2463
+ var _a;
2464
+ return (_a = chunk.delta) == null ? void 0 : _a.text;
2465
+ });
2466
+ }
2467
+ function AWSBedrockAnthropicStream(response, callbacks) {
2468
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
2469
+ }
2470
+ function AWSBedrockCohereStream(response, callbacks) {
2471
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk == null ? void 0 : chunk.text);
2472
+ }
2473
+ function AWSBedrockLlama2Stream(response, callbacks) {
2474
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
2475
+ }
2476
+ function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
2477
+ return readableFromAsyncIterable(
2478
+ asDeltaIterable(response, extractTextDeltaFromChunk)
2479
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
2480
+ }
2481
+
2482
+ // streams/cohere-stream.ts
2483
+ var utf8Decoder = new TextDecoder("utf-8");
2484
+ async function processLines(lines, controller) {
2485
+ for (const line of lines) {
2486
+ const { text, is_finished } = JSON.parse(line);
2487
+ if (!is_finished) {
2488
+ controller.enqueue(text);
2489
+ }
2490
+ }
2491
+ }
2492
+ async function readAndProcessLines(reader, controller) {
2493
+ let segment = "";
2494
+ while (true) {
2495
+ const { value: chunk, done } = await reader.read();
2496
+ if (done) {
2497
+ break;
2498
+ }
2499
+ segment += utf8Decoder.decode(chunk, { stream: true });
2500
+ const linesArray = segment.split(/\r\n|\n|\r/g);
2501
+ segment = linesArray.pop() || "";
2502
+ await processLines(linesArray, controller);
2503
+ }
2504
+ if (segment) {
2505
+ const linesArray = [segment];
2506
+ await processLines(linesArray, controller);
2507
+ }
2508
+ controller.close();
2509
+ }
2510
+ function createParser2(res) {
2511
+ var _a;
2512
+ const reader = (_a = res.body) == null ? void 0 : _a.getReader();
2513
+ return new ReadableStream({
2514
+ async start(controller) {
2515
+ if (!reader) {
2516
+ controller.close();
2517
+ return;
2518
+ }
2519
+ await readAndProcessLines(reader, controller);
2520
+ }
2521
+ });
2522
+ }
2523
+ async function* streamable2(stream) {
2524
+ for await (const chunk of stream) {
2525
+ if (chunk.eventType === "text-generation") {
2526
+ const text = chunk.text;
2527
+ if (text)
2528
+ yield text;
2529
+ }
2530
+ }
2531
+ }
2532
+ function CohereStream(reader, callbacks) {
2533
+ if (Symbol.asyncIterator in reader) {
2534
+ return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
2535
+ } else {
2536
+ return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
2537
+ }
2538
+ }
2539
+
2540
+ // streams/google-generative-ai-stream.ts
2541
+ async function* streamable3(response) {
2542
+ var _a, _b, _c;
2543
+ for await (const chunk of response.stream) {
2544
+ const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
2545
+ if (parts === void 0) {
2546
+ continue;
2547
+ }
2548
+ const firstPart = parts[0];
2549
+ if (typeof firstPart.text === "string") {
2550
+ yield firstPart.text;
2551
+ }
2552
+ }
2553
+ }
2554
+ function GoogleGenerativeAIStream(response, cb) {
2555
+ return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer());
2556
+ }
2557
+
2558
+ // streams/huggingface-stream.ts
2559
+ function createParser3(res) {
2560
+ const trimStartOfStream = trimStartOfStreamHelper();
2561
+ return new ReadableStream({
2562
+ async pull(controller) {
2563
+ var _a, _b;
2564
+ const { value, done } = await res.next();
2565
+ if (done) {
2566
+ controller.close();
2567
+ return;
2568
+ }
2569
+ const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
2570
+ if (!text)
2571
+ return;
2572
+ if (value.generated_text != null && value.generated_text.length > 0) {
2573
+ return;
2574
+ }
2575
+ if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
2576
+ return;
2577
+ }
2578
+ controller.enqueue(text);
2579
+ }
2580
+ });
2581
+ }
2582
+ function HuggingFaceStream(res, callbacks) {
2583
+ return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
2584
+ }
2585
+
2586
+ // streams/inkeep-stream.ts
2587
+ function InkeepStream(res, callbacks) {
2588
+ if (!res.body) {
2589
+ throw new Error("Response body is null");
2590
+ }
2591
+ let chat_session_id = "";
2592
+ let records_cited;
2593
+ const inkeepEventParser = (data, options) => {
2594
+ var _a, _b;
2595
+ const { event } = options;
2596
+ if (event === "records_cited") {
2597
+ records_cited = JSON.parse(data);
2598
+ (_a = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a.call(callbacks, records_cited);
2599
+ }
2600
+ if (event === "message_chunk") {
2601
+ const inkeepMessageChunk = JSON.parse(data);
2602
+ chat_session_id = (_b = inkeepMessageChunk.chat_session_id) != null ? _b : chat_session_id;
2603
+ return inkeepMessageChunk.content_chunk;
2604
+ }
2605
+ return;
2606
+ };
2607
+ let { onRecordsCited, ...passThroughCallbacks } = callbacks || {};
2608
+ passThroughCallbacks = {
2609
+ ...passThroughCallbacks,
2610
+ onFinal: (completion) => {
2611
+ var _a;
2612
+ const inkeepOnFinalMetadata = {
2613
+ chat_session_id,
2614
+ records_cited
2615
+ };
2616
+ (_a = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a.call(callbacks, completion, inkeepOnFinalMetadata);
2617
+ }
2618
+ };
2619
+ return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
2620
+ createStreamDataTransformer()
2621
+ );
2622
+ }
2623
+
2624
+ // streams/langchain-adapter.ts
2625
+ var langchain_adapter_exports = {};
2626
+ __export(langchain_adapter_exports, {
2627
+ toAIStream: () => toAIStream
2628
+ });
2629
+ function toAIStream(stream, callbacks) {
2630
+ return stream.pipeThrough(
2631
+ new TransformStream({
2632
+ transform: async (chunk, controller) => {
2633
+ if (typeof chunk.content === "string") {
2634
+ controller.enqueue(chunk.content);
2635
+ } else {
2636
+ const content = chunk.content;
2637
+ for (const item of content) {
2638
+ if (item.type === "text") {
2639
+ controller.enqueue(item.text);
2640
+ }
2641
+ }
2642
+ }
2643
+ }
2644
+ })
2645
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
2646
+ }
2647
+
2648
+ // streams/langchain-stream.ts
2649
+ function LangChainStream(callbacks) {
2650
+ const stream = new TransformStream();
2651
+ const writer = stream.writable.getWriter();
2652
+ const runs = /* @__PURE__ */ new Set();
2653
+ const handleError = async (e, runId) => {
2654
+ runs.delete(runId);
2655
+ await writer.ready;
2656
+ await writer.abort(e);
2657
+ };
2658
+ const handleStart = async (runId) => {
2659
+ runs.add(runId);
2660
+ };
2661
+ const handleEnd = async (runId) => {
2662
+ runs.delete(runId);
2663
+ if (runs.size === 0) {
2664
+ await writer.ready;
2665
+ await writer.close();
2666
+ }
2667
+ };
2668
+ return {
2669
+ stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer()),
2670
+ writer,
2671
+ handlers: {
2672
+ handleLLMNewToken: async (token) => {
2673
+ await writer.ready;
2674
+ await writer.write(token);
2675
+ },
2676
+ handleLLMStart: async (_llm, _prompts, runId) => {
2677
+ handleStart(runId);
2678
+ },
2679
+ handleLLMEnd: async (_output, runId) => {
2680
+ await handleEnd(runId);
2681
+ },
2682
+ handleLLMError: async (e, runId) => {
2683
+ await handleError(e, runId);
2684
+ },
2685
+ handleChainStart: async (_chain, _inputs, runId) => {
2686
+ handleStart(runId);
2687
+ },
2688
+ handleChainEnd: async (_outputs, runId) => {
2689
+ await handleEnd(runId);
2690
+ },
2691
+ handleChainError: async (e, runId) => {
2692
+ await handleError(e, runId);
2693
+ },
2694
+ handleToolStart: async (_tool, _input, runId) => {
2695
+ handleStart(runId);
2696
+ },
2697
+ handleToolEnd: async (_output, runId) => {
2698
+ await handleEnd(runId);
2699
+ },
2700
+ handleToolError: async (e, runId) => {
2701
+ await handleError(e, runId);
2702
+ }
2703
+ }
2704
+ };
2705
+ }
2706
+
2707
+ // streams/mistral-stream.ts
2708
+ async function* streamable4(stream) {
2709
+ var _a, _b;
2710
+ for await (const chunk of stream) {
2711
+ const content = (_b = (_a = chunk.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.content;
2712
+ if (content === void 0 || content === "") {
2713
+ continue;
2714
+ }
2715
+ yield content;
2716
+ }
2717
+ }
2718
+ function MistralStream(response, callbacks) {
2719
+ const stream = readableFromAsyncIterable(streamable4(response));
2720
+ return stream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
2721
+ }
2722
+
2723
+ // streams/openai-stream.ts
2724
+ function parseOpenAIStream() {
2725
+ const extract = chunkToText();
2726
+ return (data) => extract(JSON.parse(data));
2727
+ }
2728
+ async function* streamable5(stream) {
2729
+ const extract = chunkToText();
2730
+ for await (let chunk of stream) {
2731
+ if ("promptFilterResults" in chunk) {
2732
+ chunk = {
2733
+ id: chunk.id,
2734
+ created: chunk.created.getDate(),
2735
+ object: chunk.object,
2736
+ // not exposed by Azure API
2737
+ model: chunk.model,
2738
+ // not exposed by Azure API
2739
+ choices: chunk.choices.map((choice) => {
2740
+ var _a, _b, _c, _d, _e, _f, _g;
2741
+ return {
2742
+ delta: {
2743
+ content: (_a = choice.delta) == null ? void 0 : _a.content,
2744
+ function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
2745
+ role: (_c = choice.delta) == null ? void 0 : _c.role,
2746
+ tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
2747
+ index,
2748
+ id: toolCall.id,
2749
+ function: toolCall.function,
2750
+ type: toolCall.type
2751
+ })) : void 0
2752
+ },
2753
+ finish_reason: choice.finishReason,
2754
+ index: choice.index
2755
+ };
2756
+ })
2757
+ };
2758
+ }
2759
+ const text = extract(chunk);
2760
+ if (text)
2761
+ yield text;
2762
+ }
2763
+ }
2764
+ function chunkToText() {
2765
+ const trimStartOfStream = trimStartOfStreamHelper();
2766
+ let isFunctionStreamingIn;
2767
+ return (json) => {
2768
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
2769
+ if (isChatCompletionChunk(json)) {
2770
+ const delta = (_a = json.choices[0]) == null ? void 0 : _a.delta;
2771
+ if ((_b = delta.function_call) == null ? void 0 : _b.name) {
2772
+ isFunctionStreamingIn = true;
2773
+ return {
2774
+ isText: false,
2775
+ content: `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`
2776
+ };
2777
+ } else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
2778
+ isFunctionStreamingIn = true;
2779
+ const toolCall = delta.tool_calls[0];
2780
+ if (toolCall.index === 0) {
2781
+ return {
2782
+ isText: false,
2783
+ content: `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`
2784
+ };
2785
+ } else {
2786
+ return {
2787
+ isText: false,
2788
+ content: `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`
2789
+ };
2790
+ }
2791
+ } else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
2792
+ return {
2793
+ isText: false,
2794
+ content: cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments)
2795
+ };
2796
+ } else if ((_l = (_k = (_j = delta.tool_calls) == null ? void 0 : _j[0]) == null ? void 0 : _k.function) == null ? void 0 : _l.arguments) {
2797
+ return {
2798
+ isText: false,
2799
+ content: cleanupArguments((_o = (_n = (_m = delta.tool_calls) == null ? void 0 : _m[0]) == null ? void 0 : _n.function) == null ? void 0 : _o.arguments)
2800
+ };
2801
+ } else if (isFunctionStreamingIn && (((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "function_call" || ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "stop")) {
2802
+ isFunctionStreamingIn = false;
2803
+ return {
2804
+ isText: false,
2805
+ content: '"}}'
2806
+ };
2807
+ } else if (isFunctionStreamingIn && ((_r = json.choices[0]) == null ? void 0 : _r.finish_reason) === "tool_calls") {
2808
+ isFunctionStreamingIn = false;
2809
+ return {
2810
+ isText: false,
2811
+ content: '"}}]}'
2812
+ };
2813
+ }
2814
+ }
2815
+ const text = trimStartOfStream(
2816
+ isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
2817
+ );
2818
+ return text;
2819
+ };
2820
+ function cleanupArguments(argumentChunk) {
2821
+ let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
2822
+ return `${escapedPartialJson}`;
2823
+ }
2824
+ }
2825
+ var __internal__OpenAIFnMessagesSymbol = Symbol(
2826
+ "internal_openai_fn_messages"
2827
+ );
2828
+ function isChatCompletionChunk(data) {
2829
+ return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
2830
+ }
2831
+ function isCompletion(data) {
2832
+ return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
2833
+ }
2834
+ function OpenAIStream(res, callbacks) {
2835
+ const cb = callbacks;
2836
+ let stream;
2837
+ if (Symbol.asyncIterator in res) {
2838
+ stream = readableFromAsyncIterable(streamable5(res)).pipeThrough(
2839
+ createCallbacksTransformer(
2840
+ (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
2841
+ ...cb,
2842
+ onFinal: void 0
2843
+ } : {
2844
+ ...cb
2845
+ }
2846
+ )
2847
+ );
2848
+ } else {
2849
+ stream = AIStream(
2850
+ res,
2851
+ parseOpenAIStream(),
2852
+ (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
2853
+ ...cb,
2854
+ onFinal: void 0
2855
+ } : {
2856
+ ...cb
2857
+ }
2858
+ );
2859
+ }
2860
+ if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
2861
+ const functionCallTransformer = createFunctionCallTransformer(cb);
2862
+ return stream.pipeThrough(functionCallTransformer);
2863
+ } else {
2864
+ return stream.pipeThrough(createStreamDataTransformer());
2865
+ }
2866
+ }
2867
+ function createFunctionCallTransformer(callbacks) {
2868
+ const textEncoder = new TextEncoder();
2869
+ let isFirstChunk = true;
2870
+ let aggregatedResponse = "";
2871
+ let aggregatedFinalCompletionResponse = "";
2872
+ let isFunctionStreamingIn = false;
2873
+ let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
2874
+ const decode = createChunkDecoder();
2875
+ return new TransformStream({
2876
+ async transform(chunk, controller) {
2877
+ const message = decode(chunk);
2878
+ aggregatedFinalCompletionResponse += message;
2879
+ const shouldHandleAsFunction = isFirstChunk && (message.startsWith('{"function_call":') || message.startsWith('{"tool_calls":'));
2880
+ if (shouldHandleAsFunction) {
2881
+ isFunctionStreamingIn = true;
2882
+ aggregatedResponse += message;
2883
+ isFirstChunk = false;
2884
+ return;
2885
+ }
2886
+ if (!isFunctionStreamingIn) {
2887
+ controller.enqueue(
2888
+ textEncoder.encode(formatStreamPart("text", message))
2889
+ );
2890
+ return;
2891
+ } else {
2892
+ aggregatedResponse += message;
2893
+ }
2894
+ },
2895
+ async flush(controller) {
2896
+ try {
2897
+ if (!isFirstChunk && isFunctionStreamingIn && (callbacks.experimental_onFunctionCall || callbacks.experimental_onToolCall)) {
2898
+ isFunctionStreamingIn = false;
2899
+ const payload = JSON.parse(aggregatedResponse);
2900
+ let newFunctionCallMessages = [
2901
+ ...functionCallMessages
2902
+ ];
2903
+ let functionResponse = void 0;
2904
+ if (callbacks.experimental_onFunctionCall) {
2905
+ if (payload.function_call === void 0) {
2906
+ console.warn(
2907
+ "experimental_onFunctionCall should not be defined when using tools"
2908
+ );
2909
+ }
2910
+ const argumentsPayload = JSON.parse(
2911
+ payload.function_call.arguments
2912
+ );
2913
+ functionResponse = await callbacks.experimental_onFunctionCall(
2914
+ {
2915
+ name: payload.function_call.name,
2916
+ arguments: argumentsPayload
2917
+ },
2918
+ (result) => {
2919
+ newFunctionCallMessages = [
2920
+ ...functionCallMessages,
2921
+ {
2922
+ role: "assistant",
2923
+ content: "",
2924
+ function_call: payload.function_call
2925
+ },
2926
+ {
2927
+ role: "function",
2928
+ name: payload.function_call.name,
2929
+ content: JSON.stringify(result)
2930
+ }
2931
+ ];
2932
+ return newFunctionCallMessages;
2933
+ }
2934
+ );
2935
+ }
2936
+ if (callbacks.experimental_onToolCall) {
2937
+ const toolCalls = {
2938
+ tools: []
2939
+ };
2940
+ for (const tool2 of payload.tool_calls) {
2941
+ toolCalls.tools.push({
2942
+ id: tool2.id,
2943
+ type: "function",
2944
+ func: {
2945
+ name: tool2.function.name,
2946
+ arguments: JSON.parse(tool2.function.arguments)
2947
+ }
2948
+ });
2949
+ }
2950
+ let responseIndex = 0;
2951
+ try {
2952
+ functionResponse = await callbacks.experimental_onToolCall(
2953
+ toolCalls,
2954
+ (result) => {
2955
+ if (result) {
2956
+ const { tool_call_id, function_name, tool_call_result } = result;
2957
+ newFunctionCallMessages = [
2958
+ ...newFunctionCallMessages,
2959
+ // Only append the assistant message if it's the first response
2960
+ ...responseIndex === 0 ? [
2961
+ {
2962
+ role: "assistant",
2963
+ content: "",
2964
+ tool_calls: payload.tool_calls.map(
2965
+ (tc) => ({
2966
+ id: tc.id,
2967
+ type: "function",
2968
+ function: {
2969
+ name: tc.function.name,
2970
+ // we send the arguments an object to the user, but as the API expects a string, we need to stringify it
2971
+ arguments: JSON.stringify(
2972
+ tc.function.arguments
2973
+ )
2974
+ }
2975
+ })
2976
+ )
2977
+ }
2978
+ ] : [],
2979
+ // Append the function call result message
2980
+ {
2981
+ role: "tool",
2982
+ tool_call_id,
2983
+ name: function_name,
2984
+ content: JSON.stringify(tool_call_result)
2985
+ }
2986
+ ];
2987
+ responseIndex++;
2988
+ }
2989
+ return newFunctionCallMessages;
2990
+ }
2991
+ );
2992
+ } catch (e) {
2993
+ console.error("Error calling experimental_onToolCall:", e);
2994
+ }
2995
+ }
2996
+ if (!functionResponse) {
2997
+ controller.enqueue(
2998
+ textEncoder.encode(
2999
+ formatStreamPart(
3000
+ payload.function_call ? "function_call" : "tool_calls",
3001
+ // parse to prevent double-encoding:
3002
+ JSON.parse(aggregatedResponse)
3003
+ )
3004
+ )
3005
+ );
3006
+ return;
3007
+ } else if (typeof functionResponse === "string") {
3008
+ controller.enqueue(
3009
+ textEncoder.encode(formatStreamPart("text", functionResponse))
3010
+ );
3011
+ aggregatedFinalCompletionResponse = functionResponse;
3012
+ return;
3013
+ }
3014
+ const filteredCallbacks = {
3015
+ ...callbacks,
3016
+ onStart: void 0
3017
+ };
3018
+ callbacks.onFinal = void 0;
3019
+ const openAIStream = OpenAIStream(functionResponse, {
3020
+ ...filteredCallbacks,
3021
+ [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
3022
+ });
3023
+ const reader = openAIStream.getReader();
3024
+ while (true) {
3025
+ const { done, value } = await reader.read();
3026
+ if (done) {
3027
+ break;
3028
+ }
3029
+ controller.enqueue(value);
3030
+ }
3031
+ }
3032
+ } finally {
3033
+ if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
3034
+ await callbacks.onFinal(aggregatedFinalCompletionResponse);
3035
+ }
3036
+ }
3037
+ }
3038
+ });
3039
+ }
3040
+
3041
+ // streams/replicate-stream.ts
3042
+ async function ReplicateStream(res, cb, options) {
3043
+ var _a;
3044
+ const url = (_a = res.urls) == null ? void 0 : _a.stream;
3045
+ if (!url) {
3046
+ if (res.error)
3047
+ throw new Error(res.error);
3048
+ else
3049
+ throw new Error("Missing stream URL in Replicate response");
3050
+ }
3051
+ const eventStream = await fetch(url, {
3052
+ method: "GET",
3053
+ headers: {
3054
+ Accept: "text/event-stream",
3055
+ ...options == null ? void 0 : options.headers
3056
+ }
3057
+ });
3058
+ return AIStream(eventStream, void 0, cb).pipeThrough(
3059
+ createStreamDataTransformer()
3060
+ );
3061
+ }
3062
+
3063
+ // core/util/merge-streams.ts
3064
+ function mergeStreams(stream1, stream2) {
3065
+ const reader1 = stream1.getReader();
3066
+ const reader2 = stream2.getReader();
3067
+ let lastRead1 = void 0;
3068
+ let lastRead2 = void 0;
3069
+ let stream1Done = false;
3070
+ let stream2Done = false;
3071
+ async function readStream1(controller) {
3072
+ try {
3073
+ if (lastRead1 == null) {
3074
+ lastRead1 = reader1.read();
3075
+ }
3076
+ const result = await lastRead1;
3077
+ lastRead1 = void 0;
3078
+ if (!result.done) {
3079
+ controller.enqueue(result.value);
3080
+ } else {
3081
+ controller.close();
3082
+ }
3083
+ } catch (error) {
3084
+ controller.error(error);
3085
+ }
3086
+ }
3087
+ async function readStream2(controller) {
3088
+ try {
3089
+ if (lastRead2 == null) {
3090
+ lastRead2 = reader2.read();
3091
+ }
3092
+ const result = await lastRead2;
3093
+ lastRead2 = void 0;
3094
+ if (!result.done) {
3095
+ controller.enqueue(result.value);
3096
+ } else {
3097
+ controller.close();
3098
+ }
3099
+ } catch (error) {
3100
+ controller.error(error);
3101
+ }
3102
+ }
3103
+ return new ReadableStream({
3104
+ async pull(controller) {
3105
+ try {
3106
+ if (stream1Done) {
3107
+ readStream2(controller);
3108
+ return;
3109
+ }
3110
+ if (stream2Done) {
3111
+ readStream1(controller);
3112
+ return;
3113
+ }
3114
+ if (lastRead1 == null) {
3115
+ lastRead1 = reader1.read();
3116
+ }
3117
+ if (lastRead2 == null) {
3118
+ lastRead2 = reader2.read();
3119
+ }
3120
+ const { result, reader } = await Promise.race([
3121
+ lastRead1.then((result2) => ({ result: result2, reader: reader1 })),
3122
+ lastRead2.then((result2) => ({ result: result2, reader: reader2 }))
3123
+ ]);
3124
+ if (!result.done) {
3125
+ controller.enqueue(result.value);
3126
+ }
3127
+ if (reader === reader1) {
3128
+ lastRead1 = void 0;
3129
+ if (result.done) {
3130
+ readStream2(controller);
3131
+ stream1Done = true;
3132
+ }
3133
+ } else {
3134
+ lastRead2 = void 0;
3135
+ if (result.done) {
3136
+ stream2Done = true;
3137
+ readStream1(controller);
3138
+ }
3139
+ }
3140
+ } catch (error) {
3141
+ controller.error(error);
3142
+ }
3143
+ },
3144
+ cancel() {
3145
+ reader1.cancel();
3146
+ reader2.cancel();
3147
+ }
3148
+ });
3149
+ }
3150
+
3151
+ // streams/stream-to-response.ts
3152
+ function streamToResponse(res, response, init, data) {
3153
+ var _a;
3154
+ response.writeHead((_a = init == null ? void 0 : init.status) != null ? _a : 200, {
3155
+ "Content-Type": "text/plain; charset=utf-8",
3156
+ ...init == null ? void 0 : init.headers
3157
+ });
3158
+ let processedStream = res;
3159
+ if (data) {
3160
+ processedStream = mergeStreams(data.stream, res);
3161
+ }
3162
+ const reader = processedStream.getReader();
3163
+ function read() {
3164
+ reader.read().then(({ done, value }) => {
3165
+ if (done) {
3166
+ response.end();
3167
+ return;
3168
+ }
3169
+ response.write(value);
3170
+ read();
3171
+ });
3172
+ }
3173
+ read();
3174
+ }
3175
+
3176
+ // shared/parse-complex-response.ts
3177
+ function assignAnnotationsToMessage(message, annotations) {
3178
+ if (!message || !annotations || !annotations.length)
3179
+ return message;
3180
+ return { ...message, annotations: [...annotations] };
3181
+ }
3182
+ async function parseComplexResponse({
3183
+ reader,
3184
+ abortControllerRef,
3185
+ update,
3186
+ onFinish,
3187
+ generateId: generateId2 = generateId,
3188
+ getCurrentDate = () => /* @__PURE__ */ new Date()
3189
+ }) {
3190
+ const createdAt = getCurrentDate();
3191
+ const prefixMap = {
3192
+ data: []
3193
+ };
3194
+ let message_annotations = void 0;
3195
+ for await (const { type, value } of readDataStream(reader, {
3196
+ isAborted: () => (abortControllerRef == null ? void 0 : abortControllerRef.current) === null
3197
+ })) {
3198
+ if (type === "text") {
3199
+ if (prefixMap["text"]) {
3200
+ prefixMap["text"] = {
3201
+ ...prefixMap["text"],
3202
+ content: (prefixMap["text"].content || "") + value
3203
+ };
3204
+ } else {
3205
+ prefixMap["text"] = {
3206
+ id: generateId2(),
3207
+ role: "assistant",
3208
+ content: value,
3209
+ createdAt
3210
+ };
3211
+ }
3212
+ }
3213
+ if (type === "tool_call") {
3214
+ if (prefixMap.text == null) {
3215
+ prefixMap.text = {
3216
+ id: generateId2(),
3217
+ role: "assistant",
3218
+ content: "",
3219
+ createdAt
3220
+ };
3221
+ }
3222
+ if (prefixMap.text.toolInvocations == null) {
3223
+ prefixMap.text.toolInvocations = [];
3224
+ }
3225
+ prefixMap.text.toolInvocations.push(value);
3226
+ } else if (type === "tool_result") {
3227
+ if (prefixMap.text == null) {
3228
+ prefixMap.text = {
3229
+ id: generateId2(),
3230
+ role: "assistant",
3231
+ content: "",
3232
+ createdAt
3233
+ };
3234
+ }
3235
+ if (prefixMap.text.toolInvocations == null) {
3236
+ prefixMap.text.toolInvocations = [];
3237
+ }
3238
+ const toolInvocationIndex = prefixMap.text.toolInvocations.findIndex(
3239
+ (invocation) => invocation.toolCallId === value.toolCallId
3240
+ );
3241
+ if (toolInvocationIndex !== -1) {
3242
+ prefixMap.text.toolInvocations[toolInvocationIndex] = value;
3243
+ } else {
3244
+ prefixMap.text.toolInvocations.push(value);
3245
+ }
3246
+ }
3247
+ let functionCallMessage = null;
3248
+ if (type === "function_call") {
3249
+ prefixMap["function_call"] = {
3250
+ id: generateId2(),
3251
+ role: "assistant",
3252
+ content: "",
3253
+ function_call: value.function_call,
3254
+ name: value.function_call.name,
3255
+ createdAt
3256
+ };
3257
+ functionCallMessage = prefixMap["function_call"];
3258
+ }
3259
+ let toolCallMessage = null;
3260
+ if (type === "tool_calls") {
3261
+ prefixMap["tool_calls"] = {
3262
+ id: generateId2(),
3263
+ role: "assistant",
3264
+ content: "",
3265
+ tool_calls: value.tool_calls,
3266
+ createdAt
3267
+ };
3268
+ toolCallMessage = prefixMap["tool_calls"];
3269
+ }
3270
+ if (type === "data") {
3271
+ prefixMap["data"].push(...value);
3272
+ }
3273
+ let responseMessage = prefixMap["text"];
3274
+ if (type === "message_annotations") {
3275
+ if (!message_annotations) {
3276
+ message_annotations = [...value];
3277
+ } else {
3278
+ message_annotations.push(...value);
3279
+ }
3280
+ functionCallMessage = assignAnnotationsToMessage(
3281
+ prefixMap["function_call"],
3282
+ message_annotations
3283
+ );
3284
+ toolCallMessage = assignAnnotationsToMessage(
3285
+ prefixMap["tool_calls"],
3286
+ message_annotations
3287
+ );
3288
+ responseMessage = assignAnnotationsToMessage(
3289
+ prefixMap["text"],
3290
+ message_annotations
3291
+ );
3292
+ }
3293
+ if (message_annotations == null ? void 0 : message_annotations.length) {
3294
+ const messagePrefixKeys = [
3295
+ "text",
3296
+ "function_call",
3297
+ "tool_calls"
3298
+ ];
3299
+ messagePrefixKeys.forEach((key) => {
3300
+ if (prefixMap[key]) {
3301
+ prefixMap[key].annotations = [...message_annotations];
3302
+ }
3303
+ });
3304
+ }
3305
+ const merged = [functionCallMessage, toolCallMessage, responseMessage].filter(Boolean).map((message) => ({
3306
+ ...assignAnnotationsToMessage(message, message_annotations)
3307
+ }));
3308
+ update(merged, [...prefixMap["data"]]);
3309
+ }
3310
+ onFinish == null ? void 0 : onFinish(prefixMap);
3311
+ return {
3312
+ messages: [
3313
+ prefixMap.text,
3314
+ prefixMap.function_call,
3315
+ prefixMap.tool_calls
3316
+ ].filter(Boolean),
3317
+ data: prefixMap.data
3318
+ };
3319
+ }
3320
+
3321
+ // streams/streaming-react-response.ts
3322
+ var experimental_StreamingReactResponse = class {
3323
+ constructor(res, options) {
3324
+ var _a, _b;
3325
+ let resolveFunc = () => {
3326
+ };
3327
+ let next = new Promise((resolve) => {
3328
+ resolveFunc = resolve;
3329
+ });
3330
+ const processedStream = (options == null ? void 0 : options.data) != null ? mergeStreams((_a = options == null ? void 0 : options.data) == null ? void 0 : _a.stream, res) : res;
3331
+ let lastPayload = void 0;
3332
+ parseComplexResponse({
3333
+ reader: processedStream.getReader(),
3334
+ update: (merged, data) => {
3335
+ var _a2, _b2, _c;
3336
+ const content = (_b2 = (_a2 = merged[0]) == null ? void 0 : _a2.content) != null ? _b2 : "";
3337
+ const ui = ((_c = options == null ? void 0 : options.ui) == null ? void 0 : _c.call(options, { content, data })) || content;
3338
+ const payload = { ui, content };
3339
+ const resolvePrevious = resolveFunc;
3340
+ const nextRow = new Promise((resolve) => {
3341
+ resolveFunc = resolve;
3342
+ });
3343
+ resolvePrevious({
3344
+ next: nextRow,
3345
+ ...payload
3346
+ });
3347
+ lastPayload = payload;
3348
+ },
3349
+ generateId: (_b = options == null ? void 0 : options.generateId) != null ? _b : generateId,
3350
+ onFinish: () => {
3351
+ if (lastPayload !== void 0) {
3352
+ resolveFunc({
3353
+ next: null,
3354
+ ...lastPayload
3355
+ });
3356
+ }
3357
+ }
3358
+ });
3359
+ return next;
3360
+ }
3361
+ };
3362
+
3363
+ // streams/streaming-text-response.ts
3364
+ var StreamingTextResponse = class extends Response {
3365
+ constructor(res, init, data) {
3366
+ let processedStream = res;
3367
+ if (data) {
3368
+ processedStream = mergeStreams(data.stream, res);
3369
+ }
3370
+ super(processedStream, {
3371
+ ...init,
3372
+ status: 200,
3373
+ headers: {
3374
+ "Content-Type": "text/plain; charset=utf-8",
3375
+ ...init == null ? void 0 : init.headers
3376
+ }
3377
+ });
3378
+ }
3379
+ };
3380
+ export {
3381
+ AIStream,
3382
+ APICallError2 as APICallError,
3383
+ AWSBedrockAnthropicMessagesStream,
3384
+ AWSBedrockAnthropicStream,
3385
+ AWSBedrockCohereStream,
3386
+ AWSBedrockLlama2Stream,
3387
+ AWSBedrockStream,
3388
+ AnthropicStream,
3389
+ AssistantResponse,
3390
+ CohereStream,
3391
+ EmbedManyResult,
3392
+ EmbedResult,
3393
+ EmptyResponseBodyError,
3394
+ GenerateObjectResult,
3395
+ GenerateTextResult,
3396
+ GoogleGenerativeAIStream,
3397
+ HuggingFaceStream,
3398
+ InkeepStream,
3399
+ InvalidArgumentError2 as InvalidArgumentError,
3400
+ InvalidDataContentError2 as InvalidDataContentError,
3401
+ InvalidPromptError2 as InvalidPromptError,
3402
+ InvalidResponseDataError,
3403
+ InvalidToolArgumentsError2 as InvalidToolArgumentsError,
3404
+ JSONParseError,
3405
+ langchain_adapter_exports as LangChainAdapter,
3406
+ LangChainStream,
3407
+ LoadAPIKeyError,
3408
+ MistralStream,
3409
+ NoObjectGeneratedError2 as NoObjectGeneratedError,
3410
+ NoSuchToolError3 as NoSuchToolError,
3411
+ OpenAIStream,
3412
+ ReplicateStream,
3413
+ RetryError2 as RetryError,
3414
+ StreamData,
3415
+ StreamObjectResult,
3416
+ StreamTextResult,
3417
+ StreamingTextResponse,
3418
+ ToolCallParseError,
3419
+ TypeValidationError,
3420
+ UnsupportedFunctionalityError,
3421
+ UnsupportedJSONSchemaError,
3422
+ convertDataContentToBase64String,
3423
+ convertDataContentToUint8Array,
3424
+ convertToCoreMessages,
3425
+ createCallbacksTransformer,
3426
+ createChunkDecoder,
3427
+ createEventStreamTransformer,
3428
+ createStreamDataTransformer,
3429
+ embed,
3430
+ embedMany,
3431
+ experimental_AssistantResponse,
3432
+ experimental_StreamData,
3433
+ experimental_StreamingReactResponse,
3434
+ experimental_generateObject,
3435
+ experimental_generateText,
3436
+ experimental_streamObject,
3437
+ experimental_streamText,
3438
+ formatStreamPart,
3439
+ generateId,
3440
+ generateObject,
3441
+ generateText,
3442
+ isStreamStringEqualToType,
3443
+ generateId as nanoid,
3444
+ parseStreamPart,
3445
+ readDataStream,
3446
+ readableFromAsyncIterable,
3447
+ streamObject,
3448
+ streamResponse,
3449
+ streamText,
3450
+ streamToResponse,
3451
+ tool,
3452
+ trimStartOfStreamHelper
3453
+ };
3454
+ //# sourceMappingURL=index.mjs.map