ai 3.1.0-canary.4 → 3.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. package/dist/index.d.mts +982 -24
  2. package/dist/index.d.ts +982 -24
  3. package/dist/index.js +1748 -175
  4. package/dist/index.js.map +1 -1
  5. package/dist/index.mjs +1723 -174
  6. package/dist/index.mjs.map +1 -1
  7. package/package.json +11 -28
  8. package/prompts/dist/index.d.mts +13 -1
  9. package/prompts/dist/index.d.ts +13 -1
  10. package/prompts/dist/index.js +13 -0
  11. package/prompts/dist/index.js.map +1 -1
  12. package/prompts/dist/index.mjs +12 -0
  13. package/prompts/dist/index.mjs.map +1 -1
  14. package/react/dist/index.d.mts +23 -6
  15. package/react/dist/index.d.ts +27 -8
  16. package/react/dist/index.js +154 -141
  17. package/react/dist/index.js.map +1 -1
  18. package/react/dist/index.mjs +153 -141
  19. package/react/dist/index.mjs.map +1 -1
  20. package/react/dist/index.server.d.mts +4 -2
  21. package/react/dist/index.server.d.ts +4 -2
  22. package/react/dist/index.server.js.map +1 -1
  23. package/react/dist/index.server.mjs.map +1 -1
  24. package/rsc/dist/index.d.ts +388 -21
  25. package/rsc/dist/rsc-client.d.mts +1 -1
  26. package/rsc/dist/rsc-client.mjs +2 -0
  27. package/rsc/dist/rsc-client.mjs.map +1 -1
  28. package/rsc/dist/rsc-server.d.mts +370 -21
  29. package/rsc/dist/rsc-server.mjs +677 -36
  30. package/rsc/dist/rsc-server.mjs.map +1 -1
  31. package/rsc/dist/rsc-shared.d.mts +24 -9
  32. package/rsc/dist/rsc-shared.mjs +98 -4
  33. package/rsc/dist/rsc-shared.mjs.map +1 -1
  34. package/solid/dist/index.d.mts +7 -3
  35. package/solid/dist/index.d.ts +7 -3
  36. package/solid/dist/index.js +106 -107
  37. package/solid/dist/index.js.map +1 -1
  38. package/solid/dist/index.mjs +106 -107
  39. package/solid/dist/index.mjs.map +1 -1
  40. package/svelte/dist/index.d.mts +7 -3
  41. package/svelte/dist/index.d.ts +7 -3
  42. package/svelte/dist/index.js +109 -109
  43. package/svelte/dist/index.js.map +1 -1
  44. package/svelte/dist/index.mjs +109 -109
  45. package/svelte/dist/index.mjs.map +1 -1
  46. package/vue/dist/index.d.mts +7 -3
  47. package/vue/dist/index.d.ts +7 -3
  48. package/vue/dist/index.js +106 -107
  49. package/vue/dist/index.js.map +1 -1
  50. package/vue/dist/index.mjs +106 -107
  51. package/vue/dist/index.mjs.map +1 -1
  52. package/ai-model-specification/dist/index.d.mts +0 -665
  53. package/ai-model-specification/dist/index.d.ts +0 -665
  54. package/ai-model-specification/dist/index.js +0 -716
  55. package/ai-model-specification/dist/index.js.map +0 -1
  56. package/ai-model-specification/dist/index.mjs +0 -656
  57. package/ai-model-specification/dist/index.mjs.map +0 -1
  58. package/core/dist/index.d.mts +0 -626
  59. package/core/dist/index.d.ts +0 -626
  60. package/core/dist/index.js +0 -1918
  61. package/core/dist/index.js.map +0 -1
  62. package/core/dist/index.mjs +0 -1873
  63. package/core/dist/index.mjs.map +0 -1
  64. package/openai/dist/index.d.mts +0 -429
  65. package/openai/dist/index.d.ts +0 -429
  66. package/openai/dist/index.js +0 -1231
  67. package/openai/dist/index.js.map +0 -1
  68. package/openai/dist/index.mjs +0 -1195
  69. package/openai/dist/index.mjs.map +0 -1
package/dist/index.mjs CHANGED
@@ -1,5 +1,1578 @@
1
- // shared/utils.ts
1
+ // core/generate-object/generate-object.ts
2
+ import { NoObjectGeneratedError } from "@ai-sdk/provider";
3
+ import { safeParseJSON } from "@ai-sdk/provider-utils";
4
+
5
+ // core/generate-text/token-usage.ts
6
+ function calculateTokenUsage(usage) {
7
+ return {
8
+ promptTokens: usage.promptTokens,
9
+ completionTokens: usage.completionTokens,
10
+ totalTokens: usage.promptTokens + usage.completionTokens
11
+ };
12
+ }
13
+
14
+ // core/util/detect-image-mimetype.ts
15
+ var mimeTypeSignatures = [
16
+ { mimeType: "image/gif", bytes: [71, 73, 70] },
17
+ { mimeType: "image/png", bytes: [137, 80, 78, 71] },
18
+ { mimeType: "image/jpeg", bytes: [255, 216] },
19
+ { mimeType: "image/webp", bytes: [82, 73, 70, 70] }
20
+ ];
21
+ function detectImageMimeType(image) {
22
+ for (const { bytes, mimeType } of mimeTypeSignatures) {
23
+ if (image.length >= bytes.length && bytes.every((byte, index) => image[index] === byte)) {
24
+ return mimeType;
25
+ }
26
+ }
27
+ return void 0;
28
+ }
29
+
30
+ // core/prompt/data-content.ts
31
+ import { InvalidDataContentError } from "@ai-sdk/provider";
32
+ import {
33
+ convertBase64ToUint8Array,
34
+ convertUint8ArrayToBase64
35
+ } from "@ai-sdk/provider-utils";
36
+ function convertDataContentToBase64String(content) {
37
+ if (typeof content === "string") {
38
+ return content;
39
+ }
40
+ if (content instanceof ArrayBuffer) {
41
+ return convertUint8ArrayToBase64(new Uint8Array(content));
42
+ }
43
+ return convertUint8ArrayToBase64(content);
44
+ }
45
+ function convertDataContentToUint8Array(content) {
46
+ if (content instanceof Uint8Array) {
47
+ return content;
48
+ }
49
+ if (typeof content === "string") {
50
+ return convertBase64ToUint8Array(content);
51
+ }
52
+ if (content instanceof ArrayBuffer) {
53
+ return new Uint8Array(content);
54
+ }
55
+ throw new InvalidDataContentError({ content });
56
+ }
57
+
58
+ // core/prompt/convert-to-language-model-prompt.ts
59
+ function convertToLanguageModelPrompt(prompt) {
60
+ const languageModelMessages = [];
61
+ if (prompt.system != null) {
62
+ languageModelMessages.push({ role: "system", content: prompt.system });
63
+ }
64
+ switch (prompt.type) {
65
+ case "prompt": {
66
+ languageModelMessages.push({
67
+ role: "user",
68
+ content: [{ type: "text", text: prompt.prompt }]
69
+ });
70
+ break;
71
+ }
72
+ case "messages": {
73
+ languageModelMessages.push(
74
+ ...prompt.messages.map((message) => {
75
+ switch (message.role) {
76
+ case "user": {
77
+ if (typeof message.content === "string") {
78
+ return {
79
+ role: "user",
80
+ content: [{ type: "text", text: message.content }]
81
+ };
82
+ }
83
+ return {
84
+ role: "user",
85
+ content: message.content.map(
86
+ (part) => {
87
+ var _a;
88
+ switch (part.type) {
89
+ case "text": {
90
+ return part;
91
+ }
92
+ case "image": {
93
+ if (part.image instanceof URL) {
94
+ return {
95
+ type: "image",
96
+ image: part.image,
97
+ mimeType: part.mimeType
98
+ };
99
+ }
100
+ const imageUint8 = convertDataContentToUint8Array(
101
+ part.image
102
+ );
103
+ return {
104
+ type: "image",
105
+ image: imageUint8,
106
+ mimeType: (_a = part.mimeType) != null ? _a : detectImageMimeType(imageUint8)
107
+ };
108
+ }
109
+ }
110
+ }
111
+ )
112
+ };
113
+ }
114
+ case "assistant": {
115
+ if (typeof message.content === "string") {
116
+ return {
117
+ role: "assistant",
118
+ content: [{ type: "text", text: message.content }]
119
+ };
120
+ }
121
+ return { role: "assistant", content: message.content };
122
+ }
123
+ case "tool": {
124
+ return message;
125
+ }
126
+ }
127
+ })
128
+ );
129
+ break;
130
+ }
131
+ default: {
132
+ const _exhaustiveCheck = prompt;
133
+ throw new Error(`Unsupported prompt type: ${_exhaustiveCheck}`);
134
+ }
135
+ }
136
+ return languageModelMessages;
137
+ }
138
+
139
+ // core/prompt/get-validated-prompt.ts
140
+ import { InvalidPromptError } from "@ai-sdk/provider";
141
+ function getValidatedPrompt(prompt) {
142
+ if (prompt.prompt == null && prompt.messages == null) {
143
+ throw new InvalidPromptError({
144
+ prompt,
145
+ message: "prompt or messages must be defined"
146
+ });
147
+ }
148
+ if (prompt.prompt != null && prompt.messages != null) {
149
+ throw new InvalidPromptError({
150
+ prompt,
151
+ message: "prompt and messages cannot be defined at the same time"
152
+ });
153
+ }
154
+ return prompt.prompt != null ? {
155
+ type: "prompt",
156
+ prompt: prompt.prompt,
157
+ messages: void 0,
158
+ system: prompt.system
159
+ } : {
160
+ type: "messages",
161
+ prompt: void 0,
162
+ messages: prompt.messages,
163
+ // only possible case bc of checks above
164
+ system: prompt.system
165
+ };
166
+ }
167
+
168
+ // core/prompt/prepare-call-settings.ts
169
+ import { InvalidArgumentError } from "@ai-sdk/provider";
170
+ function prepareCallSettings({
171
+ maxTokens,
172
+ temperature,
173
+ topP,
174
+ presencePenalty,
175
+ frequencyPenalty,
176
+ seed,
177
+ maxRetries
178
+ }) {
179
+ if (maxTokens != null) {
180
+ if (!Number.isInteger(maxTokens)) {
181
+ throw new InvalidArgumentError({
182
+ parameter: "maxTokens",
183
+ value: maxTokens,
184
+ message: "maxTokens must be an integer"
185
+ });
186
+ }
187
+ if (maxTokens < 1) {
188
+ throw new InvalidArgumentError({
189
+ parameter: "maxTokens",
190
+ value: maxTokens,
191
+ message: "maxTokens must be >= 1"
192
+ });
193
+ }
194
+ }
195
+ if (temperature != null) {
196
+ if (typeof temperature !== "number") {
197
+ throw new InvalidArgumentError({
198
+ parameter: "temperature",
199
+ value: temperature,
200
+ message: "temperature must be a number"
201
+ });
202
+ }
203
+ }
204
+ if (topP != null) {
205
+ if (typeof topP !== "number") {
206
+ throw new InvalidArgumentError({
207
+ parameter: "topP",
208
+ value: topP,
209
+ message: "topP must be a number"
210
+ });
211
+ }
212
+ }
213
+ if (presencePenalty != null) {
214
+ if (typeof presencePenalty !== "number") {
215
+ throw new InvalidArgumentError({
216
+ parameter: "presencePenalty",
217
+ value: presencePenalty,
218
+ message: "presencePenalty must be a number"
219
+ });
220
+ }
221
+ }
222
+ if (frequencyPenalty != null) {
223
+ if (typeof frequencyPenalty !== "number") {
224
+ throw new InvalidArgumentError({
225
+ parameter: "frequencyPenalty",
226
+ value: frequencyPenalty,
227
+ message: "frequencyPenalty must be a number"
228
+ });
229
+ }
230
+ }
231
+ if (seed != null) {
232
+ if (!Number.isInteger(seed)) {
233
+ throw new InvalidArgumentError({
234
+ parameter: "seed",
235
+ value: seed,
236
+ message: "seed must be an integer"
237
+ });
238
+ }
239
+ }
240
+ if (maxRetries != null) {
241
+ if (!Number.isInteger(maxRetries)) {
242
+ throw new InvalidArgumentError({
243
+ parameter: "maxRetries",
244
+ value: maxRetries,
245
+ message: "maxRetries must be an integer"
246
+ });
247
+ }
248
+ if (maxRetries < 0) {
249
+ throw new InvalidArgumentError({
250
+ parameter: "maxRetries",
251
+ value: maxRetries,
252
+ message: "maxRetries must be >= 0"
253
+ });
254
+ }
255
+ }
256
+ return {
257
+ maxTokens,
258
+ temperature: temperature != null ? temperature : 0,
259
+ topP,
260
+ presencePenalty,
261
+ frequencyPenalty,
262
+ seed,
263
+ maxRetries: maxRetries != null ? maxRetries : 2
264
+ };
265
+ }
266
+
267
+ // core/util/convert-zod-to-json-schema.ts
268
+ import zodToJsonSchema from "zod-to-json-schema";
269
+ function convertZodToJSONSchema(zodSchema) {
270
+ return zodToJsonSchema(zodSchema);
271
+ }
272
+
273
+ // core/util/retry-with-exponential-backoff.ts
274
+ import { APICallError, RetryError } from "@ai-sdk/provider";
275
+ import { getErrorMessage, isAbortError } from "@ai-sdk/provider-utils";
276
+
277
+ // core/util/delay.ts
278
+ async function delay(delayInMs) {
279
+ return new Promise((resolve) => setTimeout(resolve, delayInMs));
280
+ }
281
+
282
+ // core/util/retry-with-exponential-backoff.ts
283
+ var retryWithExponentialBackoff = ({
284
+ maxRetries = 2,
285
+ initialDelayInMs = 2e3,
286
+ backoffFactor = 2
287
+ } = {}) => async (f) => _retryWithExponentialBackoff(f, {
288
+ maxRetries,
289
+ delayInMs: initialDelayInMs,
290
+ backoffFactor
291
+ });
292
+ async function _retryWithExponentialBackoff(f, {
293
+ maxRetries,
294
+ delayInMs,
295
+ backoffFactor
296
+ }, errors = []) {
297
+ try {
298
+ return await f();
299
+ } catch (error) {
300
+ if (isAbortError(error)) {
301
+ throw error;
302
+ }
303
+ if (maxRetries === 0) {
304
+ throw error;
305
+ }
306
+ const errorMessage = getErrorMessage(error);
307
+ const newErrors = [...errors, error];
308
+ const tryNumber = newErrors.length;
309
+ if (tryNumber > maxRetries) {
310
+ throw new RetryError({
311
+ message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,
312
+ reason: "maxRetriesExceeded",
313
+ errors: newErrors
314
+ });
315
+ }
316
+ if (error instanceof Error && APICallError.isAPICallError(error) && error.isRetryable === true && tryNumber <= maxRetries) {
317
+ await delay(delayInMs);
318
+ return _retryWithExponentialBackoff(
319
+ f,
320
+ { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
321
+ newErrors
322
+ );
323
+ }
324
+ if (tryNumber === 1) {
325
+ throw error;
326
+ }
327
+ throw new RetryError({
328
+ message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,
329
+ reason: "errorNotRetryable",
330
+ errors: newErrors
331
+ });
332
+ }
333
+ }
334
+
335
+ // core/generate-object/inject-json-schema-into-system.ts
336
+ var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
337
+ var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
338
+ function injectJsonSchemaIntoSystem({
339
+ system,
340
+ schema,
341
+ schemaPrefix = DEFAULT_SCHEMA_PREFIX,
342
+ schemaSuffix = DEFAULT_SCHEMA_SUFFIX
343
+ }) {
344
+ return [
345
+ system,
346
+ system != null ? "" : null,
347
+ // add a newline if system is not null
348
+ schemaPrefix,
349
+ JSON.stringify(schema),
350
+ schemaSuffix
351
+ ].filter((line) => line != null).join("\n");
352
+ }
353
+
354
+ // core/generate-object/generate-object.ts
355
+ async function generateObject({
356
+ model,
357
+ schema,
358
+ mode,
359
+ system,
360
+ prompt,
361
+ messages,
362
+ maxRetries,
363
+ abortSignal,
364
+ ...settings
365
+ }) {
366
+ var _a, _b;
367
+ const retry = retryWithExponentialBackoff({ maxRetries });
368
+ const jsonSchema = convertZodToJSONSchema(schema);
369
+ if (mode === "auto" || mode == null) {
370
+ mode = model.defaultObjectGenerationMode;
371
+ }
372
+ let result;
373
+ let finishReason;
374
+ let usage;
375
+ let warnings;
376
+ let rawResponse;
377
+ let logprobs;
378
+ switch (mode) {
379
+ case "json": {
380
+ const validatedPrompt = getValidatedPrompt({
381
+ system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
382
+ prompt,
383
+ messages
384
+ });
385
+ const generateResult = await retry(() => {
386
+ return model.doGenerate({
387
+ mode: { type: "object-json" },
388
+ ...prepareCallSettings(settings),
389
+ inputFormat: validatedPrompt.type,
390
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
391
+ abortSignal
392
+ });
393
+ });
394
+ if (generateResult.text === void 0) {
395
+ throw new NoObjectGeneratedError();
396
+ }
397
+ result = generateResult.text;
398
+ finishReason = generateResult.finishReason;
399
+ usage = generateResult.usage;
400
+ warnings = generateResult.warnings;
401
+ rawResponse = generateResult.rawResponse;
402
+ logprobs = generateResult.logprobs;
403
+ break;
404
+ }
405
+ case "grammar": {
406
+ const validatedPrompt = getValidatedPrompt({
407
+ system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
408
+ prompt,
409
+ messages
410
+ });
411
+ const generateResult = await retry(
412
+ () => model.doGenerate({
413
+ mode: { type: "object-grammar", schema: jsonSchema },
414
+ ...settings,
415
+ inputFormat: validatedPrompt.type,
416
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
417
+ abortSignal
418
+ })
419
+ );
420
+ if (generateResult.text === void 0) {
421
+ throw new NoObjectGeneratedError();
422
+ }
423
+ result = generateResult.text;
424
+ finishReason = generateResult.finishReason;
425
+ usage = generateResult.usage;
426
+ warnings = generateResult.warnings;
427
+ rawResponse = generateResult.rawResponse;
428
+ logprobs = generateResult.logprobs;
429
+ break;
430
+ }
431
+ case "tool": {
432
+ const validatedPrompt = getValidatedPrompt({
433
+ system,
434
+ prompt,
435
+ messages
436
+ });
437
+ const generateResult = await retry(
438
+ () => model.doGenerate({
439
+ mode: {
440
+ type: "object-tool",
441
+ tool: {
442
+ type: "function",
443
+ name: "json",
444
+ description: "Respond with a JSON object.",
445
+ parameters: jsonSchema
446
+ }
447
+ },
448
+ ...settings,
449
+ inputFormat: validatedPrompt.type,
450
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
451
+ abortSignal
452
+ })
453
+ );
454
+ const functionArgs = (_b = (_a = generateResult.toolCalls) == null ? void 0 : _a[0]) == null ? void 0 : _b.args;
455
+ if (functionArgs === void 0) {
456
+ throw new NoObjectGeneratedError();
457
+ }
458
+ result = functionArgs;
459
+ finishReason = generateResult.finishReason;
460
+ usage = generateResult.usage;
461
+ warnings = generateResult.warnings;
462
+ rawResponse = generateResult.rawResponse;
463
+ logprobs = generateResult.logprobs;
464
+ break;
465
+ }
466
+ case void 0: {
467
+ throw new Error("Model does not have a default object generation mode.");
468
+ }
469
+ default: {
470
+ const _exhaustiveCheck = mode;
471
+ throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
472
+ }
473
+ }
474
+ const parseResult = safeParseJSON({ text: result, schema });
475
+ if (!parseResult.success) {
476
+ throw parseResult.error;
477
+ }
478
+ return new GenerateObjectResult({
479
+ object: parseResult.value,
480
+ finishReason,
481
+ usage: calculateTokenUsage(usage),
482
+ warnings,
483
+ rawResponse,
484
+ logprobs
485
+ });
486
+ }
487
+ var GenerateObjectResult = class {
488
+ constructor(options) {
489
+ this.object = options.object;
490
+ this.finishReason = options.finishReason;
491
+ this.usage = options.usage;
492
+ this.warnings = options.warnings;
493
+ this.rawResponse = options.rawResponse;
494
+ this.logprobs = options.logprobs;
495
+ }
496
+ };
497
+ var experimental_generateObject = generateObject;
498
+
499
+ // core/util/async-iterable-stream.ts
500
+ function createAsyncIterableStream(source, transformer) {
501
+ const transformedStream = source.pipeThrough(
502
+ new TransformStream(transformer)
503
+ );
504
+ transformedStream[Symbol.asyncIterator] = () => {
505
+ const reader = transformedStream.getReader();
506
+ return {
507
+ async next() {
508
+ const { done, value } = await reader.read();
509
+ return done ? { done: true, value: void 0 } : { done: false, value };
510
+ }
511
+ };
512
+ };
513
+ return transformedStream;
514
+ }
515
+
516
+ // core/util/is-deep-equal-data.ts
517
+ function isDeepEqualData(obj1, obj2) {
518
+ if (obj1 === obj2)
519
+ return true;
520
+ if (obj1 == null || obj2 == null)
521
+ return false;
522
+ if (typeof obj1 !== "object" && typeof obj2 !== "object")
523
+ return obj1 === obj2;
524
+ if (obj1.constructor !== obj2.constructor)
525
+ return false;
526
+ if (obj1 instanceof Date && obj2 instanceof Date) {
527
+ return obj1.getTime() === obj2.getTime();
528
+ }
529
+ if (Array.isArray(obj1)) {
530
+ if (obj1.length !== obj2.length)
531
+ return false;
532
+ for (let i = 0; i < obj1.length; i++) {
533
+ if (!isDeepEqualData(obj1[i], obj2[i]))
534
+ return false;
535
+ }
536
+ return true;
537
+ }
538
+ const keys1 = Object.keys(obj1);
539
+ const keys2 = Object.keys(obj2);
540
+ if (keys1.length !== keys2.length)
541
+ return false;
542
+ for (const key of keys1) {
543
+ if (!keys2.includes(key))
544
+ return false;
545
+ if (!isDeepEqualData(obj1[key], obj2[key]))
546
+ return false;
547
+ }
548
+ return true;
549
+ }
550
+
551
+ // core/util/parse-partial-json.ts
552
+ import SecureJSON from "secure-json-parse";
553
+
554
+ // core/util/fix-json.ts
555
+ function fixJson(input) {
556
+ const stack = ["ROOT"];
557
+ let lastValidIndex = -1;
558
+ let literalStart = null;
559
+ function processValueStart(char, i, swapState) {
560
+ {
561
+ switch (char) {
562
+ case '"': {
563
+ lastValidIndex = i;
564
+ stack.pop();
565
+ stack.push(swapState);
566
+ stack.push("INSIDE_STRING");
567
+ break;
568
+ }
569
+ case "f":
570
+ case "t":
571
+ case "n": {
572
+ lastValidIndex = i;
573
+ literalStart = i;
574
+ stack.pop();
575
+ stack.push(swapState);
576
+ stack.push("INSIDE_LITERAL");
577
+ break;
578
+ }
579
+ case "-": {
580
+ stack.pop();
581
+ stack.push(swapState);
582
+ stack.push("INSIDE_NUMBER");
583
+ break;
584
+ }
585
+ case "0":
586
+ case "1":
587
+ case "2":
588
+ case "3":
589
+ case "4":
590
+ case "5":
591
+ case "6":
592
+ case "7":
593
+ case "8":
594
+ case "9": {
595
+ lastValidIndex = i;
596
+ stack.pop();
597
+ stack.push(swapState);
598
+ stack.push("INSIDE_NUMBER");
599
+ break;
600
+ }
601
+ case "{": {
602
+ lastValidIndex = i;
603
+ stack.pop();
604
+ stack.push(swapState);
605
+ stack.push("INSIDE_OBJECT_START");
606
+ break;
607
+ }
608
+ case "[": {
609
+ lastValidIndex = i;
610
+ stack.pop();
611
+ stack.push(swapState);
612
+ stack.push("INSIDE_ARRAY_START");
613
+ break;
614
+ }
615
+ }
616
+ }
617
+ }
618
+ function processAfterObjectValue(char, i) {
619
+ switch (char) {
620
+ case ",": {
621
+ stack.pop();
622
+ stack.push("INSIDE_OBJECT_AFTER_COMMA");
623
+ break;
624
+ }
625
+ case "}": {
626
+ lastValidIndex = i;
627
+ stack.pop();
628
+ break;
629
+ }
630
+ }
631
+ }
632
+ function processAfterArrayValue(char, i) {
633
+ switch (char) {
634
+ case ",": {
635
+ stack.pop();
636
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
637
+ break;
638
+ }
639
+ case "]": {
640
+ lastValidIndex = i;
641
+ stack.pop();
642
+ break;
643
+ }
644
+ }
645
+ }
646
+ for (let i = 0; i < input.length; i++) {
647
+ const char = input[i];
648
+ const currentState = stack[stack.length - 1];
649
+ switch (currentState) {
650
+ case "ROOT":
651
+ processValueStart(char, i, "FINISH");
652
+ break;
653
+ case "INSIDE_OBJECT_START": {
654
+ switch (char) {
655
+ case '"': {
656
+ stack.pop();
657
+ stack.push("INSIDE_OBJECT_KEY");
658
+ break;
659
+ }
660
+ case "}": {
661
+ stack.pop();
662
+ break;
663
+ }
664
+ }
665
+ break;
666
+ }
667
+ case "INSIDE_OBJECT_AFTER_COMMA": {
668
+ switch (char) {
669
+ case '"': {
670
+ stack.pop();
671
+ stack.push("INSIDE_OBJECT_KEY");
672
+ break;
673
+ }
674
+ }
675
+ break;
676
+ }
677
+ case "INSIDE_OBJECT_KEY": {
678
+ switch (char) {
679
+ case '"': {
680
+ stack.pop();
681
+ stack.push("INSIDE_OBJECT_AFTER_KEY");
682
+ break;
683
+ }
684
+ }
685
+ break;
686
+ }
687
+ case "INSIDE_OBJECT_AFTER_KEY": {
688
+ switch (char) {
689
+ case ":": {
690
+ stack.pop();
691
+ stack.push("INSIDE_OBJECT_BEFORE_VALUE");
692
+ break;
693
+ }
694
+ }
695
+ break;
696
+ }
697
+ case "INSIDE_OBJECT_BEFORE_VALUE": {
698
+ processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
699
+ break;
700
+ }
701
+ case "INSIDE_OBJECT_AFTER_VALUE": {
702
+ processAfterObjectValue(char, i);
703
+ break;
704
+ }
705
+ case "INSIDE_STRING": {
706
+ switch (char) {
707
+ case '"': {
708
+ stack.pop();
709
+ lastValidIndex = i;
710
+ break;
711
+ }
712
+ case "\\": {
713
+ stack.push("INSIDE_STRING_ESCAPE");
714
+ break;
715
+ }
716
+ default: {
717
+ lastValidIndex = i;
718
+ }
719
+ }
720
+ break;
721
+ }
722
+ case "INSIDE_ARRAY_START": {
723
+ switch (char) {
724
+ case "]": {
725
+ lastValidIndex = i;
726
+ stack.pop();
727
+ break;
728
+ }
729
+ default: {
730
+ lastValidIndex = i;
731
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
732
+ break;
733
+ }
734
+ }
735
+ break;
736
+ }
737
+ case "INSIDE_ARRAY_AFTER_VALUE": {
738
+ switch (char) {
739
+ case ",": {
740
+ stack.pop();
741
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
742
+ break;
743
+ }
744
+ case "]": {
745
+ lastValidIndex = i;
746
+ stack.pop();
747
+ break;
748
+ }
749
+ default: {
750
+ lastValidIndex = i;
751
+ break;
752
+ }
753
+ }
754
+ break;
755
+ }
756
+ case "INSIDE_ARRAY_AFTER_COMMA": {
757
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
758
+ break;
759
+ }
760
+ case "INSIDE_STRING_ESCAPE": {
761
+ stack.pop();
762
+ lastValidIndex = i;
763
+ break;
764
+ }
765
+ case "INSIDE_NUMBER": {
766
+ switch (char) {
767
+ case "0":
768
+ case "1":
769
+ case "2":
770
+ case "3":
771
+ case "4":
772
+ case "5":
773
+ case "6":
774
+ case "7":
775
+ case "8":
776
+ case "9": {
777
+ lastValidIndex = i;
778
+ break;
779
+ }
780
+ case "e":
781
+ case "E":
782
+ case "-":
783
+ case ".": {
784
+ break;
785
+ }
786
+ case ",": {
787
+ stack.pop();
788
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
789
+ processAfterArrayValue(char, i);
790
+ }
791
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
792
+ processAfterObjectValue(char, i);
793
+ }
794
+ break;
795
+ }
796
+ case "}": {
797
+ stack.pop();
798
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
799
+ processAfterObjectValue(char, i);
800
+ }
801
+ break;
802
+ }
803
+ case "]": {
804
+ stack.pop();
805
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
806
+ processAfterArrayValue(char, i);
807
+ }
808
+ break;
809
+ }
810
+ default: {
811
+ stack.pop();
812
+ break;
813
+ }
814
+ }
815
+ break;
816
+ }
817
+ case "INSIDE_LITERAL": {
818
+ const partialLiteral = input.substring(literalStart, i + 1);
819
+ if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
820
+ stack.pop();
821
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
822
+ processAfterObjectValue(char, i);
823
+ } else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
824
+ processAfterArrayValue(char, i);
825
+ }
826
+ } else {
827
+ lastValidIndex = i;
828
+ }
829
+ break;
830
+ }
831
+ }
832
+ }
833
+ let result = input.slice(0, lastValidIndex + 1);
834
+ for (let i = stack.length - 1; i >= 0; i--) {
835
+ const state = stack[i];
836
+ switch (state) {
837
+ case "INSIDE_STRING": {
838
+ result += '"';
839
+ break;
840
+ }
841
+ case "INSIDE_OBJECT_KEY":
842
+ case "INSIDE_OBJECT_AFTER_KEY":
843
+ case "INSIDE_OBJECT_AFTER_COMMA":
844
+ case "INSIDE_OBJECT_START":
845
+ case "INSIDE_OBJECT_BEFORE_VALUE":
846
+ case "INSIDE_OBJECT_AFTER_VALUE": {
847
+ result += "}";
848
+ break;
849
+ }
850
+ case "INSIDE_ARRAY_START":
851
+ case "INSIDE_ARRAY_AFTER_COMMA":
852
+ case "INSIDE_ARRAY_AFTER_VALUE": {
853
+ result += "]";
854
+ break;
855
+ }
856
+ case "INSIDE_LITERAL": {
857
+ const partialLiteral = input.substring(literalStart, input.length);
858
+ if ("true".startsWith(partialLiteral)) {
859
+ result += "true".slice(partialLiteral.length);
860
+ } else if ("false".startsWith(partialLiteral)) {
861
+ result += "false".slice(partialLiteral.length);
862
+ } else if ("null".startsWith(partialLiteral)) {
863
+ result += "null".slice(partialLiteral.length);
864
+ }
865
+ }
866
+ }
867
+ }
868
+ return result;
869
+ }
870
+
871
+ // core/util/parse-partial-json.ts
872
+ function parsePartialJson(jsonText) {
873
+ if (jsonText == null) {
874
+ return void 0;
875
+ }
876
+ try {
877
+ return SecureJSON.parse(jsonText);
878
+ } catch (ignored) {
879
+ try {
880
+ const fixedJsonText = fixJson(jsonText);
881
+ return SecureJSON.parse(fixedJsonText);
882
+ } catch (ignored2) {
883
+ }
884
+ }
885
+ return void 0;
886
+ }
887
+
888
+ // core/generate-object/stream-object.ts
889
+ async function streamObject({
890
+ model,
891
+ schema,
892
+ mode,
893
+ system,
894
+ prompt,
895
+ messages,
896
+ maxRetries,
897
+ abortSignal,
898
+ ...settings
899
+ }) {
900
+ const retry = retryWithExponentialBackoff({ maxRetries });
901
+ const jsonSchema = convertZodToJSONSchema(schema);
902
+ if (mode === "auto" || mode == null) {
903
+ mode = model.defaultObjectGenerationMode;
904
+ }
905
+ let callOptions;
906
+ let transformer;
907
+ switch (mode) {
908
+ case "json": {
909
+ const validatedPrompt = getValidatedPrompt({
910
+ system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
911
+ prompt,
912
+ messages
913
+ });
914
+ callOptions = {
915
+ mode: { type: "object-json" },
916
+ ...prepareCallSettings(settings),
917
+ inputFormat: validatedPrompt.type,
918
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
919
+ abortSignal
920
+ };
921
+ transformer = {
922
+ transform: (chunk, controller) => {
923
+ switch (chunk.type) {
924
+ case "text-delta":
925
+ controller.enqueue(chunk.textDelta);
926
+ break;
927
+ case "finish":
928
+ case "error":
929
+ controller.enqueue(chunk);
930
+ break;
931
+ }
932
+ }
933
+ };
934
+ break;
935
+ }
936
+ case "grammar": {
937
+ const validatedPrompt = getValidatedPrompt({
938
+ system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
939
+ prompt,
940
+ messages
941
+ });
942
+ callOptions = {
943
+ mode: { type: "object-grammar", schema: jsonSchema },
944
+ ...settings,
945
+ inputFormat: validatedPrompt.type,
946
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
947
+ abortSignal
948
+ };
949
+ transformer = {
950
+ transform: (chunk, controller) => {
951
+ switch (chunk.type) {
952
+ case "text-delta":
953
+ controller.enqueue(chunk.textDelta);
954
+ break;
955
+ case "finish":
956
+ case "error":
957
+ controller.enqueue(chunk);
958
+ break;
959
+ }
960
+ }
961
+ };
962
+ break;
963
+ }
964
+ case "tool": {
965
+ const validatedPrompt = getValidatedPrompt({
966
+ system,
967
+ prompt,
968
+ messages
969
+ });
970
+ callOptions = {
971
+ mode: {
972
+ type: "object-tool",
973
+ tool: {
974
+ type: "function",
975
+ name: "json",
976
+ description: "Respond with a JSON object.",
977
+ parameters: jsonSchema
978
+ }
979
+ },
980
+ ...settings,
981
+ inputFormat: validatedPrompt.type,
982
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
983
+ abortSignal
984
+ };
985
+ transformer = {
986
+ transform(chunk, controller) {
987
+ switch (chunk.type) {
988
+ case "tool-call-delta":
989
+ controller.enqueue(chunk.argsTextDelta);
990
+ break;
991
+ case "finish":
992
+ case "error":
993
+ controller.enqueue(chunk);
994
+ break;
995
+ }
996
+ }
997
+ };
998
+ break;
999
+ }
1000
+ case void 0: {
1001
+ throw new Error("Model does not have a default object generation mode.");
1002
+ }
1003
+ default: {
1004
+ const _exhaustiveCheck = mode;
1005
+ throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
1006
+ }
1007
+ }
1008
+ const result = await retry(() => model.doStream(callOptions));
1009
+ return new StreamObjectResult({
1010
+ stream: result.stream.pipeThrough(new TransformStream(transformer)),
1011
+ warnings: result.warnings,
1012
+ rawResponse: result.rawResponse
1013
+ });
1014
+ }
1015
+ var StreamObjectResult = class {
1016
+ constructor({
1017
+ stream,
1018
+ warnings,
1019
+ rawResponse
1020
+ }) {
1021
+ this.originalStream = stream;
1022
+ this.warnings = warnings;
1023
+ this.rawResponse = rawResponse;
1024
+ }
1025
+ get partialObjectStream() {
1026
+ let accumulatedText = "";
1027
+ let latestObject = void 0;
1028
+ return createAsyncIterableStream(this.originalStream, {
1029
+ transform(chunk, controller) {
1030
+ if (typeof chunk === "string") {
1031
+ accumulatedText += chunk;
1032
+ const currentObject = parsePartialJson(
1033
+ accumulatedText
1034
+ );
1035
+ if (!isDeepEqualData(latestObject, currentObject)) {
1036
+ latestObject = currentObject;
1037
+ controller.enqueue(currentObject);
1038
+ }
1039
+ } else if (chunk.type === "error") {
1040
+ throw chunk.error;
1041
+ }
1042
+ }
1043
+ });
1044
+ }
1045
+ get fullStream() {
1046
+ let accumulatedText = "";
1047
+ let latestObject = void 0;
1048
+ return createAsyncIterableStream(this.originalStream, {
1049
+ transform(chunk, controller) {
1050
+ if (typeof chunk === "string") {
1051
+ accumulatedText += chunk;
1052
+ const currentObject = parsePartialJson(
1053
+ accumulatedText
1054
+ );
1055
+ if (!isDeepEqualData(latestObject, currentObject)) {
1056
+ latestObject = currentObject;
1057
+ controller.enqueue({ type: "object", object: currentObject });
1058
+ }
1059
+ } else {
1060
+ switch (chunk.type) {
1061
+ case "finish":
1062
+ controller.enqueue({
1063
+ ...chunk,
1064
+ usage: calculateTokenUsage(chunk.usage)
1065
+ });
1066
+ break;
1067
+ default:
1068
+ controller.enqueue(chunk);
1069
+ break;
1070
+ }
1071
+ }
1072
+ }
1073
+ });
1074
+ }
1075
+ };
1076
+ var experimental_streamObject = streamObject;
1077
+
1078
+ // core/generate-text/tool-call.ts
1079
+ import {
1080
+ InvalidToolArgumentsError,
1081
+ NoSuchToolError
1082
+ } from "@ai-sdk/provider";
1083
+ import { safeParseJSON as safeParseJSON2 } from "@ai-sdk/provider-utils";
1084
+ function parseToolCall({
1085
+ toolCall,
1086
+ tools
1087
+ }) {
1088
+ const toolName = toolCall.toolName;
1089
+ if (tools == null) {
1090
+ throw new NoSuchToolError({ toolName: toolCall.toolName });
1091
+ }
1092
+ const tool2 = tools[toolName];
1093
+ if (tool2 == null) {
1094
+ throw new NoSuchToolError({
1095
+ toolName: toolCall.toolName,
1096
+ availableTools: Object.keys(tools)
1097
+ });
1098
+ }
1099
+ const parseResult = safeParseJSON2({
1100
+ text: toolCall.args,
1101
+ schema: tool2.parameters
1102
+ });
1103
+ if (parseResult.success === false) {
1104
+ throw new InvalidToolArgumentsError({
1105
+ toolName,
1106
+ toolArgs: toolCall.args,
1107
+ cause: parseResult.error
1108
+ });
1109
+ }
1110
+ return {
1111
+ type: "tool-call",
1112
+ toolCallId: toolCall.toolCallId,
1113
+ toolName,
1114
+ args: parseResult.value
1115
+ };
1116
+ }
1117
+
1118
+ // core/generate-text/generate-text.ts
1119
+ async function generateText({
1120
+ model,
1121
+ tools,
1122
+ system,
1123
+ prompt,
1124
+ messages,
1125
+ maxRetries,
1126
+ abortSignal,
1127
+ ...settings
1128
+ }) {
1129
+ var _a, _b;
1130
+ const retry = retryWithExponentialBackoff({ maxRetries });
1131
+ const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
1132
+ const modelResponse = await retry(() => {
1133
+ return model.doGenerate({
1134
+ mode: {
1135
+ type: "regular",
1136
+ tools: tools == null ? void 0 : Object.entries(tools).map(([name, tool2]) => ({
1137
+ type: "function",
1138
+ name,
1139
+ description: tool2.description,
1140
+ parameters: convertZodToJSONSchema(tool2.parameters)
1141
+ }))
1142
+ },
1143
+ ...prepareCallSettings(settings),
1144
+ inputFormat: validatedPrompt.type,
1145
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
1146
+ abortSignal
1147
+ });
1148
+ });
1149
+ const toolCalls = [];
1150
+ for (const modelToolCall of (_a = modelResponse.toolCalls) != null ? _a : []) {
1151
+ toolCalls.push(parseToolCall({ toolCall: modelToolCall, tools }));
1152
+ }
1153
+ const toolResults = tools == null ? [] : await executeTools({ toolCalls, tools });
1154
+ return new GenerateTextResult({
1155
+ // Always return a string so that the caller doesn't have to check for undefined.
1156
+ // If they need to check if the model did not return any text,
1157
+ // they can check the length of the string:
1158
+ text: (_b = modelResponse.text) != null ? _b : "",
1159
+ toolCalls,
1160
+ toolResults,
1161
+ finishReason: modelResponse.finishReason,
1162
+ usage: calculateTokenUsage(modelResponse.usage),
1163
+ warnings: modelResponse.warnings,
1164
+ rawResponse: modelResponse.rawResponse,
1165
+ logprobs: modelResponse.logprobs
1166
+ });
1167
+ }
1168
+ async function executeTools({
1169
+ toolCalls,
1170
+ tools
1171
+ }) {
1172
+ const toolResults = await Promise.all(
1173
+ toolCalls.map(async (toolCall) => {
1174
+ const tool2 = tools[toolCall.toolName];
1175
+ if ((tool2 == null ? void 0 : tool2.execute) == null) {
1176
+ return void 0;
1177
+ }
1178
+ const result = await tool2.execute(toolCall.args);
1179
+ return {
1180
+ toolCallId: toolCall.toolCallId,
1181
+ toolName: toolCall.toolName,
1182
+ args: toolCall.args,
1183
+ result
1184
+ };
1185
+ })
1186
+ );
1187
+ return toolResults.filter(
1188
+ (result) => result != null
1189
+ );
1190
+ }
1191
+ var GenerateTextResult = class {
1192
+ constructor(options) {
1193
+ this.text = options.text;
1194
+ this.toolCalls = options.toolCalls;
1195
+ this.toolResults = options.toolResults;
1196
+ this.finishReason = options.finishReason;
1197
+ this.usage = options.usage;
1198
+ this.warnings = options.warnings;
1199
+ this.rawResponse = options.rawResponse;
1200
+ this.logprobs = options.logprobs;
1201
+ }
1202
+ };
1203
+ var experimental_generateText = generateText;
1204
+
1205
+ // core/generate-text/run-tools-transformation.ts
1206
+ import { NoSuchToolError as NoSuchToolError2 } from "@ai-sdk/provider";
1207
+
1208
+ // shared/generate-id.ts
2
1209
  import { customAlphabet } from "nanoid/non-secure";
1210
+ var generateId = customAlphabet(
1211
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
1212
+ 7
1213
+ );
1214
+
1215
+ // core/generate-text/run-tools-transformation.ts
1216
+ function runToolsTransformation({
1217
+ tools,
1218
+ generatorStream
1219
+ }) {
1220
+ let canClose = false;
1221
+ const outstandingToolCalls = /* @__PURE__ */ new Set();
1222
+ let toolResultsStreamController = null;
1223
+ const toolResultsStream = new ReadableStream({
1224
+ start(controller) {
1225
+ toolResultsStreamController = controller;
1226
+ }
1227
+ });
1228
+ const forwardStream = new TransformStream({
1229
+ transform(chunk, controller) {
1230
+ const chunkType = chunk.type;
1231
+ switch (chunkType) {
1232
+ case "text-delta":
1233
+ case "error": {
1234
+ controller.enqueue(chunk);
1235
+ break;
1236
+ }
1237
+ case "tool-call": {
1238
+ const toolName = chunk.toolName;
1239
+ if (tools == null) {
1240
+ toolResultsStreamController.enqueue({
1241
+ type: "error",
1242
+ error: new NoSuchToolError2({ toolName: chunk.toolName })
1243
+ });
1244
+ break;
1245
+ }
1246
+ const tool2 = tools[toolName];
1247
+ if (tool2 == null) {
1248
+ toolResultsStreamController.enqueue({
1249
+ type: "error",
1250
+ error: new NoSuchToolError2({
1251
+ toolName: chunk.toolName,
1252
+ availableTools: Object.keys(tools)
1253
+ })
1254
+ });
1255
+ break;
1256
+ }
1257
+ try {
1258
+ const toolCall = parseToolCall({
1259
+ toolCall: chunk,
1260
+ tools
1261
+ });
1262
+ controller.enqueue(toolCall);
1263
+ if (tool2.execute != null) {
1264
+ const toolExecutionId = generateId();
1265
+ outstandingToolCalls.add(toolExecutionId);
1266
+ tool2.execute(toolCall.args).then(
1267
+ (result) => {
1268
+ toolResultsStreamController.enqueue({
1269
+ ...toolCall,
1270
+ type: "tool-result",
1271
+ result
1272
+ });
1273
+ outstandingToolCalls.delete(toolExecutionId);
1274
+ if (canClose && outstandingToolCalls.size === 0) {
1275
+ toolResultsStreamController.close();
1276
+ }
1277
+ },
1278
+ (error) => {
1279
+ toolResultsStreamController.enqueue({
1280
+ type: "error",
1281
+ error
1282
+ });
1283
+ outstandingToolCalls.delete(toolExecutionId);
1284
+ if (canClose && outstandingToolCalls.size === 0) {
1285
+ toolResultsStreamController.close();
1286
+ }
1287
+ }
1288
+ );
1289
+ }
1290
+ } catch (error) {
1291
+ toolResultsStreamController.enqueue({
1292
+ type: "error",
1293
+ error
1294
+ });
1295
+ }
1296
+ break;
1297
+ }
1298
+ case "finish": {
1299
+ controller.enqueue({
1300
+ type: "finish",
1301
+ finishReason: chunk.finishReason,
1302
+ logprobs: chunk.logprobs,
1303
+ usage: calculateTokenUsage(chunk.usage)
1304
+ });
1305
+ break;
1306
+ }
1307
+ case "tool-call-delta": {
1308
+ break;
1309
+ }
1310
+ default: {
1311
+ const _exhaustiveCheck = chunkType;
1312
+ throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);
1313
+ }
1314
+ }
1315
+ },
1316
+ flush() {
1317
+ canClose = true;
1318
+ if (outstandingToolCalls.size === 0) {
1319
+ toolResultsStreamController.close();
1320
+ }
1321
+ }
1322
+ });
1323
+ return new ReadableStream({
1324
+ async start(controller) {
1325
+ return Promise.all([
1326
+ generatorStream.pipeThrough(forwardStream).pipeTo(
1327
+ new WritableStream({
1328
+ write(chunk) {
1329
+ controller.enqueue(chunk);
1330
+ },
1331
+ close() {
1332
+ }
1333
+ })
1334
+ ),
1335
+ toolResultsStream.pipeTo(
1336
+ new WritableStream({
1337
+ write(chunk) {
1338
+ controller.enqueue(chunk);
1339
+ },
1340
+ close() {
1341
+ controller.close();
1342
+ }
1343
+ })
1344
+ )
1345
+ ]);
1346
+ }
1347
+ });
1348
+ }
1349
+
1350
+ // core/generate-text/stream-text.ts
1351
+ async function streamText({
1352
+ model,
1353
+ tools,
1354
+ system,
1355
+ prompt,
1356
+ messages,
1357
+ maxRetries,
1358
+ abortSignal,
1359
+ ...settings
1360
+ }) {
1361
+ const retry = retryWithExponentialBackoff({ maxRetries });
1362
+ const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
1363
+ const { stream, warnings, rawResponse } = await retry(
1364
+ () => model.doStream({
1365
+ mode: {
1366
+ type: "regular",
1367
+ tools: tools == null ? void 0 : Object.entries(tools).map(([name, tool2]) => ({
1368
+ type: "function",
1369
+ name,
1370
+ description: tool2.description,
1371
+ parameters: convertZodToJSONSchema(tool2.parameters)
1372
+ }))
1373
+ },
1374
+ ...prepareCallSettings(settings),
1375
+ inputFormat: validatedPrompt.type,
1376
+ prompt: convertToLanguageModelPrompt(validatedPrompt),
1377
+ abortSignal
1378
+ })
1379
+ );
1380
+ return new StreamTextResult({
1381
+ stream: runToolsTransformation({
1382
+ tools,
1383
+ generatorStream: stream
1384
+ }),
1385
+ warnings,
1386
+ rawResponse
1387
+ });
1388
+ }
1389
+ var StreamTextResult = class {
1390
+ constructor({
1391
+ stream,
1392
+ warnings,
1393
+ rawResponse
1394
+ }) {
1395
+ this.originalStream = stream;
1396
+ this.warnings = warnings;
1397
+ this.rawResponse = rawResponse;
1398
+ }
1399
+ /**
1400
+ A text stream that returns only the generated text deltas. You can use it
1401
+ as either an AsyncIterable or a ReadableStream. When an error occurs, the
1402
+ stream will throw the error.
1403
+ */
1404
+ get textStream() {
1405
+ return createAsyncIterableStream(this.originalStream, {
1406
+ transform(chunk, controller) {
1407
+ if (chunk.type === "text-delta") {
1408
+ if (chunk.textDelta.length > 0) {
1409
+ controller.enqueue(chunk.textDelta);
1410
+ }
1411
+ } else if (chunk.type === "error") {
1412
+ throw chunk.error;
1413
+ }
1414
+ }
1415
+ });
1416
+ }
1417
+ /**
1418
+ A stream with all events, including text deltas, tool calls, tool results, and
1419
+ errors.
1420
+ You can use it as either an AsyncIterable or a ReadableStream. When an error occurs, the
1421
+ stream will throw the error.
1422
+ */
1423
+ get fullStream() {
1424
+ return createAsyncIterableStream(this.originalStream, {
1425
+ transform(chunk, controller) {
1426
+ if (chunk.type === "text-delta") {
1427
+ if (chunk.textDelta.length > 0) {
1428
+ controller.enqueue(chunk);
1429
+ }
1430
+ } else {
1431
+ controller.enqueue(chunk);
1432
+ }
1433
+ }
1434
+ });
1435
+ }
1436
+ /**
1437
+ Converts the result to an `AIStream` object that is compatible with `StreamingTextResponse`.
1438
+ It can be used with the `useChat` and `useCompletion` hooks.
1439
+
1440
+ @param callbacks
1441
+ Stream callbacks that will be called when the stream emits events.
1442
+
1443
+ @returns an `AIStream` object.
1444
+ */
1445
+ toAIStream(callbacks) {
1446
+ return this.textStream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
1447
+ }
1448
+ /**
1449
+ Writes stream data output to a Node.js response-like object.
1450
+ It sets a `Content-Type` header to `text/plain; charset=utf-8` and
1451
+ writes each stream data part as a separate chunk.
1452
+
1453
+ @param response A Node.js response-like object (ServerResponse).
1454
+ @param init Optional headers and status code.
1455
+ */
1456
+ pipeAIStreamToResponse(response, init) {
1457
+ var _a;
1458
+ response.writeHead((_a = init == null ? void 0 : init.status) != null ? _a : 200, {
1459
+ "Content-Type": "text/plain; charset=utf-8",
1460
+ ...init == null ? void 0 : init.headers
1461
+ });
1462
+ const reader = this.textStream.pipeThrough(createCallbacksTransformer(void 0)).pipeThrough(createStreamDataTransformer()).getReader();
1463
+ const read = async () => {
1464
+ try {
1465
+ while (true) {
1466
+ const { done, value } = await reader.read();
1467
+ if (done)
1468
+ break;
1469
+ response.write(value);
1470
+ }
1471
+ } catch (error) {
1472
+ throw error;
1473
+ } finally {
1474
+ response.end();
1475
+ }
1476
+ };
1477
+ read();
1478
+ }
1479
+ /**
1480
+ Writes text delta output to a Node.js response-like object.
1481
+ It sets a `Content-Type` header to `text/plain; charset=utf-8` and
1482
+ writes each text delta as a separate chunk.
1483
+
1484
+ @param response A Node.js response-like object (ServerResponse).
1485
+ @param init Optional headers and status code.
1486
+ */
1487
+ pipeTextStreamToResponse(response, init) {
1488
+ var _a;
1489
+ response.writeHead((_a = init == null ? void 0 : init.status) != null ? _a : 200, {
1490
+ "Content-Type": "text/plain; charset=utf-8",
1491
+ ...init == null ? void 0 : init.headers
1492
+ });
1493
+ const reader = this.textStream.getReader();
1494
+ const read = async () => {
1495
+ const encoder = new TextEncoder();
1496
+ try {
1497
+ while (true) {
1498
+ const { done, value } = await reader.read();
1499
+ if (done)
1500
+ break;
1501
+ response.write(encoder.encode(value));
1502
+ }
1503
+ } catch (error) {
1504
+ throw error;
1505
+ } finally {
1506
+ response.end();
1507
+ }
1508
+ };
1509
+ read();
1510
+ }
1511
+ /**
1512
+ Converts the result to a streamed response object with a stream data part stream.
1513
+ It can be used with the `useChat` and `useCompletion` hooks.
1514
+
1515
+ @param init Optional headers.
1516
+
1517
+ @return A response object.
1518
+ */
1519
+ toAIStreamResponse(init) {
1520
+ return new StreamingTextResponse(this.toAIStream(), init);
1521
+ }
1522
+ /**
1523
+ Creates a simple text stream response.
1524
+ Each text delta is encoded as UTF-8 and sent as a separate chunk.
1525
+ Non-text-delta events are ignored.
1526
+
1527
+ @param init Optional headers and status code.
1528
+ */
1529
+ toTextStreamResponse(init) {
1530
+ var _a;
1531
+ const encoder = new TextEncoder();
1532
+ return new Response(
1533
+ this.textStream.pipeThrough(
1534
+ new TransformStream({
1535
+ transform(chunk, controller) {
1536
+ controller.enqueue(encoder.encode(chunk));
1537
+ }
1538
+ })
1539
+ ),
1540
+ {
1541
+ status: (_a = init == null ? void 0 : init.status) != null ? _a : 200,
1542
+ headers: {
1543
+ "Content-Type": "text/plain; charset=utf-8",
1544
+ ...init == null ? void 0 : init.headers
1545
+ }
1546
+ }
1547
+ );
1548
+ }
1549
+ };
1550
+ var experimental_streamText = streamText;
1551
+
1552
+ // core/tool/tool.ts
1553
+ function tool(tool2) {
1554
+ return tool2;
1555
+ }
1556
+
1557
+ // core/types/errors.ts
1558
+ import {
1559
+ APICallError as APICallError2,
1560
+ EmptyResponseBodyError,
1561
+ InvalidArgumentError as InvalidArgumentError2,
1562
+ InvalidDataContentError as InvalidDataContentError2,
1563
+ InvalidPromptError as InvalidPromptError2,
1564
+ InvalidResponseDataError,
1565
+ InvalidToolArgumentsError as InvalidToolArgumentsError2,
1566
+ JSONParseError,
1567
+ LoadAPIKeyError,
1568
+ NoObjectGeneratedError as NoObjectGeneratedError2,
1569
+ NoSuchToolError as NoSuchToolError3,
1570
+ RetryError as RetryError2,
1571
+ ToolCallParseError,
1572
+ TypeValidationError,
1573
+ UnsupportedFunctionalityError,
1574
+ UnsupportedJSONSchemaError
1575
+ } from "@ai-sdk/provider";
3
1576
 
4
1577
  // shared/stream-parts.ts
5
1578
  var textStreamPart = {
@@ -101,9 +1674,9 @@ var toolCallStreamPart = {
101
1674
  code: "7",
102
1675
  name: "tool_calls",
103
1676
  parse: (value) => {
104
- if (value == null || typeof value !== "object" || !("tool_calls" in value) || typeof value.tool_calls !== "object" || value.tool_calls == null || !Array.isArray(value.tool_calls) || value.tool_calls.some((tc) => {
105
- tc == null || typeof tc !== "object" || !("id" in tc) || typeof tc.id !== "string" || !("type" in tc) || typeof tc.type !== "string" || !("function" in tc) || tc.function == null || typeof tc.function !== "object" || !("arguments" in tc.function) || typeof tc.function.name !== "string" || typeof tc.function.arguments !== "string";
106
- })) {
1677
+ if (value == null || typeof value !== "object" || !("tool_calls" in value) || typeof value.tool_calls !== "object" || value.tool_calls == null || !Array.isArray(value.tool_calls) || value.tool_calls.some(
1678
+ (tc) => tc == null || typeof tc !== "object" || !("id" in tc) || typeof tc.id !== "string" || !("type" in tc) || typeof tc.type !== "string" || !("function" in tc) || tc.function == null || typeof tc.function !== "object" || !("arguments" in tc.function) || typeof tc.function.name !== "string" || typeof tc.function.arguments !== "string"
1679
+ )) {
107
1680
  throw new Error(
108
1681
  '"tool_calls" parts expect an object with a ToolCallPayload.'
109
1682
  );
@@ -181,11 +1754,50 @@ function formatStreamPart(type, value) {
181
1754
  `;
182
1755
  }
183
1756
 
1757
+ // shared/read-data-stream.ts
1758
+ var NEWLINE = "\n".charCodeAt(0);
1759
+ function concatChunks(chunks, totalLength) {
1760
+ const concatenatedChunks = new Uint8Array(totalLength);
1761
+ let offset = 0;
1762
+ for (const chunk of chunks) {
1763
+ concatenatedChunks.set(chunk, offset);
1764
+ offset += chunk.length;
1765
+ }
1766
+ chunks.length = 0;
1767
+ return concatenatedChunks;
1768
+ }
1769
+ async function* readDataStream(reader, {
1770
+ isAborted
1771
+ } = {}) {
1772
+ const decoder = new TextDecoder();
1773
+ const chunks = [];
1774
+ let totalLength = 0;
1775
+ while (true) {
1776
+ const { value } = await reader.read();
1777
+ if (value) {
1778
+ chunks.push(value);
1779
+ totalLength += value.length;
1780
+ if (value[value.length - 1] !== NEWLINE) {
1781
+ continue;
1782
+ }
1783
+ }
1784
+ if (chunks.length === 0) {
1785
+ break;
1786
+ }
1787
+ const concatenatedChunks = concatChunks(chunks, totalLength);
1788
+ totalLength = 0;
1789
+ const streamParts2 = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseStreamPart);
1790
+ for (const streamPart of streamParts2) {
1791
+ yield streamPart;
1792
+ }
1793
+ if (isAborted == null ? void 0 : isAborted()) {
1794
+ reader.cancel();
1795
+ break;
1796
+ }
1797
+ }
1798
+ }
1799
+
184
1800
  // shared/utils.ts
185
- var nanoid = customAlphabet(
186
- "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
187
- 7
188
- );
189
1801
  function createChunkDecoder(complex) {
190
1802
  const decoder = new TextDecoder();
191
1803
  if (!complex) {
@@ -201,7 +1813,6 @@ function createChunkDecoder(complex) {
201
1813
  };
202
1814
  }
203
1815
  var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
204
- var COMPLEX_HEADER = "X-Experimental-Stream-Data";
205
1816
 
206
1817
  // streams/ai-stream.ts
207
1818
  import {
@@ -328,7 +1939,7 @@ function readableFromAsyncIterable(iterable) {
328
1939
  }
329
1940
 
330
1941
  // streams/stream-data.ts
331
- var experimental_StreamData = class {
1942
+ var StreamData = class {
332
1943
  constructor() {
333
1944
  this.encoder = new TextEncoder();
334
1945
  this.controller = null;
@@ -414,14 +2025,7 @@ var experimental_StreamData = class {
414
2025
  this.messageAnnotations.push(value);
415
2026
  }
416
2027
  };
417
- function createStreamDataTransformer(experimental_streamData) {
418
- if (!experimental_streamData) {
419
- return new TransformStream({
420
- transform: async (chunk, controller) => {
421
- controller.enqueue(chunk);
422
- }
423
- });
424
- }
2028
+ function createStreamDataTransformer() {
425
2029
  const encoder = new TextEncoder();
426
2030
  const decoder = new TextDecoder();
427
2031
  return new TransformStream({
@@ -431,6 +2035,8 @@ function createStreamDataTransformer(experimental_streamData) {
431
2035
  }
432
2036
  });
433
2037
  }
2038
+ var experimental_StreamData = class extends StreamData {
2039
+ };
434
2040
 
435
2041
  // streams/anthropic-stream.ts
436
2042
  function parseAnthropicStream() {
@@ -470,16 +2076,16 @@ async function* streamable(stream) {
470
2076
  }
471
2077
  function AnthropicStream(res, cb) {
472
2078
  if (Symbol.asyncIterator in res) {
473
- return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
2079
+ return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer());
474
2080
  } else {
475
2081
  return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
476
- createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
2082
+ createStreamDataTransformer()
477
2083
  );
478
2084
  }
479
2085
  }
480
2086
 
481
2087
  // streams/assistant-response.ts
482
- function experimental_AssistantResponse({ threadId, messageId }, process2) {
2088
+ function AssistantResponse({ threadId, messageId }, process2) {
483
2089
  const stream = new ReadableStream({
484
2090
  async start(controller) {
485
2091
  var _a;
@@ -570,6 +2176,7 @@ function experimental_AssistantResponse({ threadId, messageId }, process2) {
570
2176
  }
571
2177
  });
572
2178
  }
2179
+ var experimental_AssistantResponse = AssistantResponse;
573
2180
 
574
2181
  // streams/aws-bedrock-stream.ts
575
2182
  async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
@@ -587,20 +2194,17 @@ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
587
2194
  }
588
2195
  }
589
2196
  }
2197
+ function AWSBedrockAnthropicMessagesStream(response, callbacks) {
2198
+ return AWSBedrockStream(response, callbacks, (chunk) => {
2199
+ var _a;
2200
+ return (_a = chunk.delta) == null ? void 0 : _a.text;
2201
+ });
2202
+ }
590
2203
  function AWSBedrockAnthropicStream(response, callbacks) {
591
2204
  return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
592
2205
  }
593
2206
  function AWSBedrockCohereStream(response, callbacks) {
594
- return AWSBedrockStream(
595
- response,
596
- callbacks,
597
- // As of 2023-11-17, Bedrock does not support streaming for Cohere,
598
- // so we take the full generation:
599
- (chunk) => {
600
- var _a, _b;
601
- return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
602
- }
603
- );
2207
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk == null ? void 0 : chunk.text);
604
2208
  }
605
2209
  function AWSBedrockLlama2Stream(response, callbacks) {
606
2210
  return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
@@ -608,9 +2212,7 @@ function AWSBedrockLlama2Stream(response, callbacks) {
608
2212
  function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
609
2213
  return readableFromAsyncIterable(
610
2214
  asDeltaIterable(response, extractTextDeltaFromChunk)
611
- ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
612
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
613
- );
2215
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
614
2216
  }
615
2217
 
616
2218
  // streams/cohere-stream.ts
@@ -665,13 +2267,9 @@ async function* streamable2(stream) {
665
2267
  }
666
2268
  function CohereStream(reader, callbacks) {
667
2269
  if (Symbol.asyncIterator in reader) {
668
- return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
669
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
670
- );
2270
+ return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
671
2271
  } else {
672
- return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
673
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
674
- );
2272
+ return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
675
2273
  }
676
2274
  }
677
2275
 
@@ -690,7 +2288,7 @@ async function* streamable3(response) {
690
2288
  }
691
2289
  }
692
2290
  function GoogleGenerativeAIStream(response, cb) {
693
- return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
2291
+ return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer());
694
2292
  }
695
2293
 
696
2294
  // streams/huggingface-stream.ts
@@ -718,9 +2316,7 @@ function createParser3(res) {
718
2316
  });
719
2317
  }
720
2318
  function HuggingFaceStream(res, callbacks) {
721
- return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
722
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
723
- );
2319
+ return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
724
2320
  }
725
2321
 
726
2322
  // streams/inkeep-stream.ts
@@ -757,7 +2353,7 @@ function InkeepStream(res, callbacks) {
757
2353
  }
758
2354
  };
759
2355
  return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
760
- createStreamDataTransformer(passThroughCallbacks == null ? void 0 : passThroughCallbacks.experimental_streamData)
2356
+ createStreamDataTransformer()
761
2357
  );
762
2358
  }
763
2359
 
@@ -782,9 +2378,7 @@ function LangChainStream(callbacks) {
782
2378
  }
783
2379
  };
784
2380
  return {
785
- stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
786
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
787
- ),
2381
+ stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer()),
788
2382
  writer,
789
2383
  handlers: {
790
2384
  handleLLMNewToken: async (token) => {
@@ -835,9 +2429,7 @@ async function* streamable4(stream) {
835
2429
  }
836
2430
  function MistralStream(response, callbacks) {
837
2431
  const stream = readableFromAsyncIterable(streamable4(response));
838
- return stream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
839
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
840
- );
2432
+ return stream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
841
2433
  }
842
2434
 
843
2435
  // streams/openai-stream.ts
@@ -981,9 +2573,7 @@ function OpenAIStream(res, callbacks) {
981
2573
  const functionCallTransformer = createFunctionCallTransformer(cb);
982
2574
  return stream.pipeThrough(functionCallTransformer);
983
2575
  } else {
984
- return stream.pipeThrough(
985
- createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
986
- );
2576
+ return stream.pipeThrough(createStreamDataTransformer());
987
2577
  }
988
2578
  }
989
2579
  function createFunctionCallTransformer(callbacks) {
@@ -993,7 +2583,6 @@ function createFunctionCallTransformer(callbacks) {
993
2583
  let aggregatedFinalCompletionResponse = "";
994
2584
  let isFunctionStreamingIn = false;
995
2585
  let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
996
- const isComplexMode = callbacks == null ? void 0 : callbacks.experimental_streamData;
997
2586
  const decode = createChunkDecoder();
998
2587
  return new TransformStream({
999
2588
  async transform(chunk, controller) {
@@ -1008,7 +2597,7 @@ function createFunctionCallTransformer(callbacks) {
1008
2597
  }
1009
2598
  if (!isFunctionStreamingIn) {
1010
2599
  controller.enqueue(
1011
- isComplexMode ? textEncoder.encode(formatStreamPart("text", message)) : chunk
2600
+ textEncoder.encode(formatStreamPart("text", message))
1012
2601
  );
1013
2602
  return;
1014
2603
  } else {
@@ -1060,13 +2649,13 @@ function createFunctionCallTransformer(callbacks) {
1060
2649
  const toolCalls = {
1061
2650
  tools: []
1062
2651
  };
1063
- for (const tool of payload.tool_calls) {
2652
+ for (const tool2 of payload.tool_calls) {
1064
2653
  toolCalls.tools.push({
1065
- id: tool.id,
2654
+ id: tool2.id,
1066
2655
  type: "function",
1067
2656
  func: {
1068
- name: tool.function.name,
1069
- arguments: JSON.parse(tool.function.arguments)
2657
+ name: tool2.function.name,
2658
+ arguments: JSON.parse(tool2.function.arguments)
1070
2659
  }
1071
2660
  });
1072
2661
  }
@@ -1119,17 +2708,17 @@ function createFunctionCallTransformer(callbacks) {
1119
2708
  if (!functionResponse) {
1120
2709
  controller.enqueue(
1121
2710
  textEncoder.encode(
1122
- isComplexMode ? formatStreamPart(
2711
+ formatStreamPart(
1123
2712
  payload.function_call ? "function_call" : "tool_calls",
1124
2713
  // parse to prevent double-encoding:
1125
2714
  JSON.parse(aggregatedResponse)
1126
- ) : aggregatedResponse
2715
+ )
1127
2716
  )
1128
2717
  );
1129
2718
  return;
1130
2719
  } else if (typeof functionResponse === "string") {
1131
2720
  controller.enqueue(
1132
- isComplexMode ? textEncoder.encode(formatStreamPart("text", functionResponse)) : textEncoder.encode(functionResponse)
2721
+ textEncoder.encode(formatStreamPart("text", functionResponse))
1133
2722
  );
1134
2723
  aggregatedFinalCompletionResponse = functionResponse;
1135
2724
  return;
@@ -1179,53 +2768,10 @@ async function ReplicateStream(res, cb, options) {
1179
2768
  }
1180
2769
  });
1181
2770
  return AIStream(eventStream, void 0, cb).pipeThrough(
1182
- createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
2771
+ createStreamDataTransformer()
1183
2772
  );
1184
2773
  }
1185
2774
 
1186
- // shared/read-data-stream.ts
1187
- var NEWLINE = "\n".charCodeAt(0);
1188
- function concatChunks(chunks, totalLength) {
1189
- const concatenatedChunks = new Uint8Array(totalLength);
1190
- let offset = 0;
1191
- for (const chunk of chunks) {
1192
- concatenatedChunks.set(chunk, offset);
1193
- offset += chunk.length;
1194
- }
1195
- chunks.length = 0;
1196
- return concatenatedChunks;
1197
- }
1198
- async function* readDataStream(reader, {
1199
- isAborted
1200
- } = {}) {
1201
- const decoder = new TextDecoder();
1202
- const chunks = [];
1203
- let totalLength = 0;
1204
- while (true) {
1205
- const { value } = await reader.read();
1206
- if (value) {
1207
- chunks.push(value);
1208
- totalLength += value.length;
1209
- if (value[value.length - 1] !== NEWLINE) {
1210
- continue;
1211
- }
1212
- }
1213
- if (chunks.length === 0) {
1214
- break;
1215
- }
1216
- const concatenatedChunks = concatChunks(chunks, totalLength);
1217
- totalLength = 0;
1218
- const streamParts2 = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseStreamPart);
1219
- for (const streamPart of streamParts2) {
1220
- yield streamPart;
1221
- }
1222
- if (isAborted == null ? void 0 : isAborted()) {
1223
- reader.cancel();
1224
- break;
1225
- }
1226
- }
1227
- }
1228
-
1229
2775
  // shared/parse-complex-response.ts
1230
2776
  function assignAnnotationsToMessage(message, annotations) {
1231
2777
  if (!message || !annotations || !annotations.length)
@@ -1237,7 +2783,7 @@ async function parseComplexResponse({
1237
2783
  abortControllerRef,
1238
2784
  update,
1239
2785
  onFinish,
1240
- generateId = nanoid,
2786
+ generateId: generateId2 = generateId,
1241
2787
  getCurrentDate = () => /* @__PURE__ */ new Date()
1242
2788
  }) {
1243
2789
  const createdAt = getCurrentDate();
@@ -1256,7 +2802,7 @@ async function parseComplexResponse({
1256
2802
  };
1257
2803
  } else {
1258
2804
  prefixMap["text"] = {
1259
- id: generateId(),
2805
+ id: generateId2(),
1260
2806
  role: "assistant",
1261
2807
  content: value,
1262
2808
  createdAt
@@ -1266,7 +2812,7 @@ async function parseComplexResponse({
1266
2812
  let functionCallMessage = null;
1267
2813
  if (type === "function_call") {
1268
2814
  prefixMap["function_call"] = {
1269
- id: generateId(),
2815
+ id: generateId2(),
1270
2816
  role: "assistant",
1271
2817
  content: "",
1272
2818
  function_call: value.function_call,
@@ -1278,7 +2824,7 @@ async function parseComplexResponse({
1278
2824
  let toolCallMessage = null;
1279
2825
  if (type === "tool_calls") {
1280
2826
  prefixMap["tool_calls"] = {
1281
- id: generateId(),
2827
+ id: generateId2(),
1282
2828
  role: "assistant",
1283
2829
  content: "",
1284
2830
  tool_calls: value.tool_calls,
@@ -1340,74 +2886,41 @@ async function parseComplexResponse({
1340
2886
  // streams/streaming-react-response.ts
1341
2887
  var experimental_StreamingReactResponse = class {
1342
2888
  constructor(res, options) {
1343
- var _a;
2889
+ var _a, _b;
1344
2890
  let resolveFunc = () => {
1345
2891
  };
1346
2892
  let next = new Promise((resolve) => {
1347
2893
  resolveFunc = resolve;
1348
2894
  });
1349
- if (options == null ? void 0 : options.data) {
1350
- const processedStream = res.pipeThrough(
1351
- options.data.stream
1352
- );
1353
- let lastPayload = void 0;
1354
- parseComplexResponse({
1355
- reader: processedStream.getReader(),
1356
- update: (merged, data) => {
1357
- var _a2, _b, _c;
1358
- const content2 = (_b = (_a2 = merged[0]) == null ? void 0 : _a2.content) != null ? _b : "";
1359
- const ui = ((_c = options == null ? void 0 : options.ui) == null ? void 0 : _c.call(options, { content: content2, data })) || content2;
1360
- const payload = { ui, content: content2 };
1361
- const resolvePrevious = resolveFunc;
1362
- const nextRow = new Promise((resolve) => {
1363
- resolveFunc = resolve;
1364
- });
1365
- resolvePrevious({
1366
- next: nextRow,
1367
- ...payload
2895
+ const processedStream = (options == null ? void 0 : options.data) != null ? res.pipeThrough((_a = options == null ? void 0 : options.data) == null ? void 0 : _a.stream) : res;
2896
+ let lastPayload = void 0;
2897
+ parseComplexResponse({
2898
+ reader: processedStream.getReader(),
2899
+ update: (merged, data) => {
2900
+ var _a2, _b2, _c;
2901
+ const content = (_b2 = (_a2 = merged[0]) == null ? void 0 : _a2.content) != null ? _b2 : "";
2902
+ const ui = ((_c = options == null ? void 0 : options.ui) == null ? void 0 : _c.call(options, { content, data })) || content;
2903
+ const payload = { ui, content };
2904
+ const resolvePrevious = resolveFunc;
2905
+ const nextRow = new Promise((resolve) => {
2906
+ resolveFunc = resolve;
2907
+ });
2908
+ resolvePrevious({
2909
+ next: nextRow,
2910
+ ...payload
2911
+ });
2912
+ lastPayload = payload;
2913
+ },
2914
+ generateId: (_b = options == null ? void 0 : options.generateId) != null ? _b : generateId,
2915
+ onFinish: () => {
2916
+ if (lastPayload !== void 0) {
2917
+ resolveFunc({
2918
+ next: null,
2919
+ ...lastPayload
1368
2920
  });
1369
- lastPayload = payload;
1370
- },
1371
- generateId: (_a = options.generateId) != null ? _a : nanoid,
1372
- onFinish: () => {
1373
- if (lastPayload !== void 0) {
1374
- resolveFunc({
1375
- next: null,
1376
- ...lastPayload
1377
- });
1378
- }
1379
2921
  }
1380
- });
1381
- return next;
1382
- }
1383
- let content = "";
1384
- const decode = createChunkDecoder();
1385
- const reader = res.getReader();
1386
- async function readChunk() {
1387
- var _a2;
1388
- const { done, value } = await reader.read();
1389
- if (!done) {
1390
- content += decode(value);
1391
2922
  }
1392
- const ui = ((_a2 = options == null ? void 0 : options.ui) == null ? void 0 : _a2.call(options, { content })) || content;
1393
- const payload = {
1394
- ui,
1395
- content
1396
- };
1397
- const resolvePrevious = resolveFunc;
1398
- const nextRow = done ? null : new Promise((resolve) => {
1399
- resolveFunc = resolve;
1400
- });
1401
- resolvePrevious({
1402
- next: nextRow,
1403
- ...payload
1404
- });
1405
- if (done) {
1406
- return;
1407
- }
1408
- await readChunk();
1409
- }
1410
- readChunk();
2923
+ });
1411
2924
  return next;
1412
2925
  }
1413
2926
  };
@@ -1424,7 +2937,6 @@ var StreamingTextResponse = class extends Response {
1424
2937
  status: 200,
1425
2938
  headers: {
1426
2939
  "Content-Type": "text/plain; charset=utf-8",
1427
- [COMPLEX_HEADER]: data ? "true" : "false",
1428
2940
  ...init == null ? void 0 : init.headers
1429
2941
  }
1430
2942
  });
@@ -1450,21 +2962,45 @@ function streamToResponse(res, response, init) {
1450
2962
  }
1451
2963
  export {
1452
2964
  AIStream,
2965
+ APICallError2 as APICallError,
2966
+ AWSBedrockAnthropicMessagesStream,
1453
2967
  AWSBedrockAnthropicStream,
1454
2968
  AWSBedrockCohereStream,
1455
2969
  AWSBedrockLlama2Stream,
1456
2970
  AWSBedrockStream,
1457
2971
  AnthropicStream,
1458
- COMPLEX_HEADER,
2972
+ AssistantResponse,
1459
2973
  CohereStream,
2974
+ EmptyResponseBodyError,
2975
+ GenerateObjectResult,
2976
+ GenerateTextResult,
1460
2977
  GoogleGenerativeAIStream,
1461
2978
  HuggingFaceStream,
1462
2979
  InkeepStream,
2980
+ InvalidArgumentError2 as InvalidArgumentError,
2981
+ InvalidDataContentError2 as InvalidDataContentError,
2982
+ InvalidPromptError2 as InvalidPromptError,
2983
+ InvalidResponseDataError,
2984
+ InvalidToolArgumentsError2 as InvalidToolArgumentsError,
2985
+ JSONParseError,
1463
2986
  LangChainStream,
2987
+ LoadAPIKeyError,
1464
2988
  MistralStream,
2989
+ NoObjectGeneratedError2 as NoObjectGeneratedError,
2990
+ NoSuchToolError3 as NoSuchToolError,
1465
2991
  OpenAIStream,
1466
2992
  ReplicateStream,
2993
+ RetryError2 as RetryError,
2994
+ StreamData,
2995
+ StreamObjectResult,
2996
+ StreamTextResult,
1467
2997
  StreamingTextResponse,
2998
+ ToolCallParseError,
2999
+ TypeValidationError,
3000
+ UnsupportedFunctionalityError,
3001
+ UnsupportedJSONSchemaError,
3002
+ convertDataContentToBase64String,
3003
+ convertDataContentToUint8Array,
1468
3004
  createCallbacksTransformer,
1469
3005
  createChunkDecoder,
1470
3006
  createEventStreamTransformer,
@@ -1472,10 +3008,23 @@ export {
1472
3008
  experimental_AssistantResponse,
1473
3009
  experimental_StreamData,
1474
3010
  experimental_StreamingReactResponse,
3011
+ experimental_generateObject,
3012
+ experimental_generateText,
3013
+ experimental_streamObject,
3014
+ experimental_streamText,
3015
+ formatStreamPart,
3016
+ generateId,
3017
+ generateObject,
3018
+ generateText,
1475
3019
  isStreamStringEqualToType,
1476
- nanoid,
3020
+ generateId as nanoid,
3021
+ parseStreamPart,
3022
+ readDataStream,
1477
3023
  readableFromAsyncIterable,
3024
+ streamObject,
3025
+ streamText,
1478
3026
  streamToResponse,
3027
+ tool,
1479
3028
  trimStartOfStreamHelper
1480
3029
  };
1481
3030
  //# sourceMappingURL=index.mjs.map