ai 5.0.0-canary.4 → 5.0.0-canary.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/CHANGELOG.md +18 -0
  2. package/dist/index.d.mts +861 -145
  3. package/dist/index.d.ts +861 -145
  4. package/dist/index.js +1653 -166
  5. package/dist/index.js.map +1 -1
  6. package/dist/index.mjs +1568 -119
  7. package/dist/index.mjs.map +1 -1
  8. package/dist/internal/index.d.mts +205 -15
  9. package/dist/internal/index.d.ts +205 -15
  10. package/dist/internal/index.js +63 -9
  11. package/dist/internal/index.js.map +1 -1
  12. package/dist/internal/index.mjs +46 -2
  13. package/dist/internal/index.mjs.map +1 -1
  14. package/dist/mcp-stdio/index.js.map +1 -0
  15. package/dist/mcp-stdio/index.mjs.map +1 -0
  16. package/dist/test/index.js.map +1 -0
  17. package/dist/test/index.mjs.map +1 -0
  18. package/package.json +18 -19
  19. package/mcp-stdio/create-child-process.test.ts +0 -92
  20. package/mcp-stdio/create-child-process.ts +0 -21
  21. package/mcp-stdio/dist/index.js.map +0 -1
  22. package/mcp-stdio/dist/index.mjs.map +0 -1
  23. package/mcp-stdio/get-environment.test.ts +0 -13
  24. package/mcp-stdio/get-environment.ts +0 -43
  25. package/mcp-stdio/index.ts +0 -4
  26. package/mcp-stdio/mcp-stdio-transport.test.ts +0 -262
  27. package/mcp-stdio/mcp-stdio-transport.ts +0 -157
  28. package/test/dist/index.js.map +0 -1
  29. package/test/dist/index.mjs.map +0 -1
  30. package/{mcp-stdio/dist → dist/mcp-stdio}/index.d.mts +6 -6
  31. package/{mcp-stdio/dist → dist/mcp-stdio}/index.d.ts +6 -6
  32. /package/{mcp-stdio/dist → dist/mcp-stdio}/index.js +0 -0
  33. /package/{mcp-stdio/dist → dist/mcp-stdio}/index.mjs +0 -0
  34. /package/{test/dist → dist/test}/index.d.mts +0 -0
  35. /package/{test/dist → dist/test}/index.d.ts +0 -0
  36. /package/{test/dist → dist/test}/index.js +0 -0
  37. /package/{test/dist → dist/test}/index.mjs +0 -0
package/dist/index.js CHANGED
@@ -1,7 +1,9 @@
1
1
  "use strict";
2
+ var __create = Object.create;
2
3
  var __defProp = Object.defineProperty;
3
4
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
5
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
5
7
  var __hasOwnProp = Object.prototype.hasOwnProperty;
6
8
  var __export = (target, all) => {
7
9
  for (var name17 in all)
@@ -15,11 +17,19 @@ var __copyProps = (to, from, except, desc) => {
15
17
  }
16
18
  return to;
17
19
  };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
18
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
29
 
20
- // streams/index.ts
21
- var streams_exports = {};
22
- __export(streams_exports, {
30
+ // index.ts
31
+ var ai_exports = {};
32
+ __export(ai_exports, {
23
33
  AISDKError: () => import_provider20.AISDKError,
24
34
  APICallError: () => import_provider20.APICallError,
25
35
  DownloadError: () => DownloadError,
@@ -53,6 +63,9 @@ __export(streams_exports, {
53
63
  UnsupportedFunctionalityError: () => import_provider20.UnsupportedFunctionalityError,
54
64
  appendClientMessage: () => appendClientMessage,
55
65
  appendResponseMessages: () => appendResponseMessages,
66
+ asSchema: () => asSchema,
67
+ callChatApi: () => callChatApi,
68
+ callCompletionApi: () => callCompletionApi,
56
69
  convertToCoreMessages: () => convertToCoreMessages,
57
70
  coreAssistantMessageSchema: () => coreAssistantMessageSchema,
58
71
  coreMessageSchema: () => coreMessageSchema,
@@ -62,7 +75,7 @@ __export(streams_exports, {
62
75
  cosineSimilarity: () => cosineSimilarity,
63
76
  createDataStream: () => createDataStream,
64
77
  createDataStreamResponse: () => createDataStreamResponse,
65
- createIdGenerator: () => import_provider_utils16.createIdGenerator,
78
+ createIdGenerator: () => import_provider_utils21.createIdGenerator,
66
79
  createProviderRegistry: () => createProviderRegistry,
67
80
  customProvider: () => customProvider,
68
81
  defaultSettingsMiddleware: () => defaultSettingsMiddleware,
@@ -74,33 +87,1513 @@ __export(streams_exports, {
74
87
  experimental_generateImage: () => generateImage,
75
88
  experimental_transcribe: () => transcribe,
76
89
  experimental_wrapLanguageModel: () => experimental_wrapLanguageModel,
90
+ extractMaxToolInvocationStep: () => extractMaxToolInvocationStep,
77
91
  extractReasoningMiddleware: () => extractReasoningMiddleware,
78
- formatDataStreamPart: () => import_ui_utils11.formatDataStreamPart,
79
- generateId: () => import_provider_utils16.generateId,
92
+ fillMessageParts: () => fillMessageParts,
93
+ formatDataStreamPart: () => formatDataStreamPart,
94
+ generateId: () => import_provider_utils21.generateId,
80
95
  generateObject: () => generateObject,
81
96
  generateText: () => generateText,
82
- jsonSchema: () => import_ui_utils11.jsonSchema,
83
- parseDataStreamPart: () => import_ui_utils11.parseDataStreamPart,
97
+ getMessageParts: () => getMessageParts,
98
+ getTextFromDataUrl: () => getTextFromDataUrl,
99
+ isAssistantMessageWithCompletedToolCalls: () => isAssistantMessageWithCompletedToolCalls,
100
+ isDeepEqualData: () => isDeepEqualData,
101
+ jsonSchema: () => jsonSchema,
102
+ parseDataStreamPart: () => parseDataStreamPart,
103
+ parsePartialJson: () => parsePartialJson,
84
104
  pipeDataStreamToResponse: () => pipeDataStreamToResponse,
85
- processDataStream: () => import_ui_utils11.processDataStream,
86
- processTextStream: () => import_ui_utils11.processTextStream,
105
+ prepareAttachmentsForRequest: () => prepareAttachmentsForRequest,
106
+ processDataStream: () => processDataStream,
107
+ processTextStream: () => processTextStream,
108
+ shouldResubmitMessages: () => shouldResubmitMessages,
87
109
  simulateReadableStream: () => simulateReadableStream,
88
110
  simulateStreamingMiddleware: () => simulateStreamingMiddleware,
89
111
  smoothStream: () => smoothStream,
90
112
  streamObject: () => streamObject,
91
113
  streamText: () => streamText,
92
114
  tool: () => tool,
115
+ updateToolCallResult: () => updateToolCallResult,
93
116
  wrapLanguageModel: () => wrapLanguageModel,
94
- zodSchema: () => import_ui_utils11.zodSchema
117
+ zodSchema: () => zodSchema
95
118
  });
96
- module.exports = __toCommonJS(streams_exports);
119
+ module.exports = __toCommonJS(ai_exports);
97
120
 
98
121
  // core/index.ts
99
- var import_provider_utils16 = require("@ai-sdk/provider-utils");
100
- var import_ui_utils11 = require("@ai-sdk/ui-utils");
122
+ var import_provider_utils21 = require("@ai-sdk/provider-utils");
123
+
124
+ // core/util/index.ts
125
+ var import_provider_utils5 = require("@ai-sdk/provider-utils");
126
+
127
+ // core/util/process-chat-response.ts
128
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
129
+
130
+ // core/types/duplicated/usage.ts
131
+ function calculateLanguageModelUsage({
132
+ promptTokens,
133
+ completionTokens
134
+ }) {
135
+ return {
136
+ promptTokens,
137
+ completionTokens,
138
+ totalTokens: promptTokens + completionTokens
139
+ };
140
+ }
141
+
142
+ // core/util/parse-partial-json.ts
143
+ var import_provider_utils = require("@ai-sdk/provider-utils");
144
+
145
+ // core/util/fix-json.ts
146
+ function fixJson(input) {
147
+ const stack = ["ROOT"];
148
+ let lastValidIndex = -1;
149
+ let literalStart = null;
150
+ function processValueStart(char, i, swapState) {
151
+ {
152
+ switch (char) {
153
+ case '"': {
154
+ lastValidIndex = i;
155
+ stack.pop();
156
+ stack.push(swapState);
157
+ stack.push("INSIDE_STRING");
158
+ break;
159
+ }
160
+ case "f":
161
+ case "t":
162
+ case "n": {
163
+ lastValidIndex = i;
164
+ literalStart = i;
165
+ stack.pop();
166
+ stack.push(swapState);
167
+ stack.push("INSIDE_LITERAL");
168
+ break;
169
+ }
170
+ case "-": {
171
+ stack.pop();
172
+ stack.push(swapState);
173
+ stack.push("INSIDE_NUMBER");
174
+ break;
175
+ }
176
+ case "0":
177
+ case "1":
178
+ case "2":
179
+ case "3":
180
+ case "4":
181
+ case "5":
182
+ case "6":
183
+ case "7":
184
+ case "8":
185
+ case "9": {
186
+ lastValidIndex = i;
187
+ stack.pop();
188
+ stack.push(swapState);
189
+ stack.push("INSIDE_NUMBER");
190
+ break;
191
+ }
192
+ case "{": {
193
+ lastValidIndex = i;
194
+ stack.pop();
195
+ stack.push(swapState);
196
+ stack.push("INSIDE_OBJECT_START");
197
+ break;
198
+ }
199
+ case "[": {
200
+ lastValidIndex = i;
201
+ stack.pop();
202
+ stack.push(swapState);
203
+ stack.push("INSIDE_ARRAY_START");
204
+ break;
205
+ }
206
+ }
207
+ }
208
+ }
209
+ function processAfterObjectValue(char, i) {
210
+ switch (char) {
211
+ case ",": {
212
+ stack.pop();
213
+ stack.push("INSIDE_OBJECT_AFTER_COMMA");
214
+ break;
215
+ }
216
+ case "}": {
217
+ lastValidIndex = i;
218
+ stack.pop();
219
+ break;
220
+ }
221
+ }
222
+ }
223
+ function processAfterArrayValue(char, i) {
224
+ switch (char) {
225
+ case ",": {
226
+ stack.pop();
227
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
228
+ break;
229
+ }
230
+ case "]": {
231
+ lastValidIndex = i;
232
+ stack.pop();
233
+ break;
234
+ }
235
+ }
236
+ }
237
+ for (let i = 0; i < input.length; i++) {
238
+ const char = input[i];
239
+ const currentState = stack[stack.length - 1];
240
+ switch (currentState) {
241
+ case "ROOT":
242
+ processValueStart(char, i, "FINISH");
243
+ break;
244
+ case "INSIDE_OBJECT_START": {
245
+ switch (char) {
246
+ case '"': {
247
+ stack.pop();
248
+ stack.push("INSIDE_OBJECT_KEY");
249
+ break;
250
+ }
251
+ case "}": {
252
+ lastValidIndex = i;
253
+ stack.pop();
254
+ break;
255
+ }
256
+ }
257
+ break;
258
+ }
259
+ case "INSIDE_OBJECT_AFTER_COMMA": {
260
+ switch (char) {
261
+ case '"': {
262
+ stack.pop();
263
+ stack.push("INSIDE_OBJECT_KEY");
264
+ break;
265
+ }
266
+ }
267
+ break;
268
+ }
269
+ case "INSIDE_OBJECT_KEY": {
270
+ switch (char) {
271
+ case '"': {
272
+ stack.pop();
273
+ stack.push("INSIDE_OBJECT_AFTER_KEY");
274
+ break;
275
+ }
276
+ }
277
+ break;
278
+ }
279
+ case "INSIDE_OBJECT_AFTER_KEY": {
280
+ switch (char) {
281
+ case ":": {
282
+ stack.pop();
283
+ stack.push("INSIDE_OBJECT_BEFORE_VALUE");
284
+ break;
285
+ }
286
+ }
287
+ break;
288
+ }
289
+ case "INSIDE_OBJECT_BEFORE_VALUE": {
290
+ processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
291
+ break;
292
+ }
293
+ case "INSIDE_OBJECT_AFTER_VALUE": {
294
+ processAfterObjectValue(char, i);
295
+ break;
296
+ }
297
+ case "INSIDE_STRING": {
298
+ switch (char) {
299
+ case '"': {
300
+ stack.pop();
301
+ lastValidIndex = i;
302
+ break;
303
+ }
304
+ case "\\": {
305
+ stack.push("INSIDE_STRING_ESCAPE");
306
+ break;
307
+ }
308
+ default: {
309
+ lastValidIndex = i;
310
+ }
311
+ }
312
+ break;
313
+ }
314
+ case "INSIDE_ARRAY_START": {
315
+ switch (char) {
316
+ case "]": {
317
+ lastValidIndex = i;
318
+ stack.pop();
319
+ break;
320
+ }
321
+ default: {
322
+ lastValidIndex = i;
323
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
324
+ break;
325
+ }
326
+ }
327
+ break;
328
+ }
329
+ case "INSIDE_ARRAY_AFTER_VALUE": {
330
+ switch (char) {
331
+ case ",": {
332
+ stack.pop();
333
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
334
+ break;
335
+ }
336
+ case "]": {
337
+ lastValidIndex = i;
338
+ stack.pop();
339
+ break;
340
+ }
341
+ default: {
342
+ lastValidIndex = i;
343
+ break;
344
+ }
345
+ }
346
+ break;
347
+ }
348
+ case "INSIDE_ARRAY_AFTER_COMMA": {
349
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
350
+ break;
351
+ }
352
+ case "INSIDE_STRING_ESCAPE": {
353
+ stack.pop();
354
+ lastValidIndex = i;
355
+ break;
356
+ }
357
+ case "INSIDE_NUMBER": {
358
+ switch (char) {
359
+ case "0":
360
+ case "1":
361
+ case "2":
362
+ case "3":
363
+ case "4":
364
+ case "5":
365
+ case "6":
366
+ case "7":
367
+ case "8":
368
+ case "9": {
369
+ lastValidIndex = i;
370
+ break;
371
+ }
372
+ case "e":
373
+ case "E":
374
+ case "-":
375
+ case ".": {
376
+ break;
377
+ }
378
+ case ",": {
379
+ stack.pop();
380
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
381
+ processAfterArrayValue(char, i);
382
+ }
383
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
384
+ processAfterObjectValue(char, i);
385
+ }
386
+ break;
387
+ }
388
+ case "}": {
389
+ stack.pop();
390
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
391
+ processAfterObjectValue(char, i);
392
+ }
393
+ break;
394
+ }
395
+ case "]": {
396
+ stack.pop();
397
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
398
+ processAfterArrayValue(char, i);
399
+ }
400
+ break;
401
+ }
402
+ default: {
403
+ stack.pop();
404
+ break;
405
+ }
406
+ }
407
+ break;
408
+ }
409
+ case "INSIDE_LITERAL": {
410
+ const partialLiteral = input.substring(literalStart, i + 1);
411
+ if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
412
+ stack.pop();
413
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
414
+ processAfterObjectValue(char, i);
415
+ } else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
416
+ processAfterArrayValue(char, i);
417
+ }
418
+ } else {
419
+ lastValidIndex = i;
420
+ }
421
+ break;
422
+ }
423
+ }
424
+ }
425
+ let result = input.slice(0, lastValidIndex + 1);
426
+ for (let i = stack.length - 1; i >= 0; i--) {
427
+ const state = stack[i];
428
+ switch (state) {
429
+ case "INSIDE_STRING": {
430
+ result += '"';
431
+ break;
432
+ }
433
+ case "INSIDE_OBJECT_KEY":
434
+ case "INSIDE_OBJECT_AFTER_KEY":
435
+ case "INSIDE_OBJECT_AFTER_COMMA":
436
+ case "INSIDE_OBJECT_START":
437
+ case "INSIDE_OBJECT_BEFORE_VALUE":
438
+ case "INSIDE_OBJECT_AFTER_VALUE": {
439
+ result += "}";
440
+ break;
441
+ }
442
+ case "INSIDE_ARRAY_START":
443
+ case "INSIDE_ARRAY_AFTER_COMMA":
444
+ case "INSIDE_ARRAY_AFTER_VALUE": {
445
+ result += "]";
446
+ break;
447
+ }
448
+ case "INSIDE_LITERAL": {
449
+ const partialLiteral = input.substring(literalStart, input.length);
450
+ if ("true".startsWith(partialLiteral)) {
451
+ result += "true".slice(partialLiteral.length);
452
+ } else if ("false".startsWith(partialLiteral)) {
453
+ result += "false".slice(partialLiteral.length);
454
+ } else if ("null".startsWith(partialLiteral)) {
455
+ result += "null".slice(partialLiteral.length);
456
+ }
457
+ }
458
+ }
459
+ }
460
+ return result;
461
+ }
462
+
463
+ // core/util/parse-partial-json.ts
464
+ function parsePartialJson(jsonText) {
465
+ if (jsonText === void 0) {
466
+ return { value: void 0, state: "undefined-input" };
467
+ }
468
+ let result = (0, import_provider_utils.safeParseJSON)({ text: jsonText });
469
+ if (result.success) {
470
+ return { value: result.value, state: "successful-parse" };
471
+ }
472
+ result = (0, import_provider_utils.safeParseJSON)({ text: fixJson(jsonText) });
473
+ if (result.success) {
474
+ return { value: result.value, state: "repaired-parse" };
475
+ }
476
+ return { value: void 0, state: "failed-parse" };
477
+ }
478
+
479
+ // core/util/data-stream-parts.ts
480
+ var textStreamPart = {
481
+ code: "0",
482
+ name: "text",
483
+ parse: (value) => {
484
+ if (typeof value !== "string") {
485
+ throw new Error('"text" parts expect a string value.');
486
+ }
487
+ return { type: "text", value };
488
+ }
489
+ };
490
+ var dataStreamPart = {
491
+ code: "2",
492
+ name: "data",
493
+ parse: (value) => {
494
+ if (!Array.isArray(value)) {
495
+ throw new Error('"data" parts expect an array value.');
496
+ }
497
+ return { type: "data", value };
498
+ }
499
+ };
500
+ var errorStreamPart = {
501
+ code: "3",
502
+ name: "error",
503
+ parse: (value) => {
504
+ if (typeof value !== "string") {
505
+ throw new Error('"error" parts expect a string value.');
506
+ }
507
+ return { type: "error", value };
508
+ }
509
+ };
510
+ var messageAnnotationsStreamPart = {
511
+ code: "8",
512
+ name: "message_annotations",
513
+ parse: (value) => {
514
+ if (!Array.isArray(value)) {
515
+ throw new Error('"message_annotations" parts expect an array value.');
516
+ }
517
+ return { type: "message_annotations", value };
518
+ }
519
+ };
520
+ var toolCallStreamPart = {
521
+ code: "9",
522
+ name: "tool_call",
523
+ parse: (value) => {
524
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string" || !("args" in value) || typeof value.args !== "object") {
525
+ throw new Error(
526
+ '"tool_call" parts expect an object with a "toolCallId", "toolName", and "args" property.'
527
+ );
528
+ }
529
+ return {
530
+ type: "tool_call",
531
+ value
532
+ };
533
+ }
534
+ };
535
+ var toolResultStreamPart = {
536
+ code: "a",
537
+ name: "tool_result",
538
+ parse: (value) => {
539
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("result" in value)) {
540
+ throw new Error(
541
+ '"tool_result" parts expect an object with a "toolCallId" and a "result" property.'
542
+ );
543
+ }
544
+ return {
545
+ type: "tool_result",
546
+ value
547
+ };
548
+ }
549
+ };
550
+ var toolCallStreamingStartStreamPart = {
551
+ code: "b",
552
+ name: "tool_call_streaming_start",
553
+ parse: (value) => {
554
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string") {
555
+ throw new Error(
556
+ '"tool_call_streaming_start" parts expect an object with a "toolCallId" and "toolName" property.'
557
+ );
558
+ }
559
+ return {
560
+ type: "tool_call_streaming_start",
561
+ value
562
+ };
563
+ }
564
+ };
565
+ var toolCallDeltaStreamPart = {
566
+ code: "c",
567
+ name: "tool_call_delta",
568
+ parse: (value) => {
569
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("argsTextDelta" in value) || typeof value.argsTextDelta !== "string") {
570
+ throw new Error(
571
+ '"tool_call_delta" parts expect an object with a "toolCallId" and "argsTextDelta" property.'
572
+ );
573
+ }
574
+ return {
575
+ type: "tool_call_delta",
576
+ value
577
+ };
578
+ }
579
+ };
580
+ var finishMessageStreamPart = {
581
+ code: "d",
582
+ name: "finish_message",
583
+ parse: (value) => {
584
+ if (value == null || typeof value !== "object" || !("finishReason" in value) || typeof value.finishReason !== "string") {
585
+ throw new Error(
586
+ '"finish_message" parts expect an object with a "finishReason" property.'
587
+ );
588
+ }
589
+ const result = {
590
+ finishReason: value.finishReason
591
+ };
592
+ if ("usage" in value && value.usage != null && typeof value.usage === "object" && "promptTokens" in value.usage && "completionTokens" in value.usage) {
593
+ result.usage = {
594
+ promptTokens: typeof value.usage.promptTokens === "number" ? value.usage.promptTokens : Number.NaN,
595
+ completionTokens: typeof value.usage.completionTokens === "number" ? value.usage.completionTokens : Number.NaN
596
+ };
597
+ }
598
+ return {
599
+ type: "finish_message",
600
+ value: result
601
+ };
602
+ }
603
+ };
604
+ var finishStepStreamPart = {
605
+ code: "e",
606
+ name: "finish_step",
607
+ parse: (value) => {
608
+ if (value == null || typeof value !== "object" || !("finishReason" in value) || typeof value.finishReason !== "string") {
609
+ throw new Error(
610
+ '"finish_step" parts expect an object with a "finishReason" property.'
611
+ );
612
+ }
613
+ const result = {
614
+ finishReason: value.finishReason,
615
+ isContinued: false
616
+ };
617
+ if ("usage" in value && value.usage != null && typeof value.usage === "object" && "promptTokens" in value.usage && "completionTokens" in value.usage) {
618
+ result.usage = {
619
+ promptTokens: typeof value.usage.promptTokens === "number" ? value.usage.promptTokens : Number.NaN,
620
+ completionTokens: typeof value.usage.completionTokens === "number" ? value.usage.completionTokens : Number.NaN
621
+ };
622
+ }
623
+ if ("isContinued" in value && typeof value.isContinued === "boolean") {
624
+ result.isContinued = value.isContinued;
625
+ }
626
+ return {
627
+ type: "finish_step",
628
+ value: result
629
+ };
630
+ }
631
+ };
632
+ var startStepStreamPart = {
633
+ code: "f",
634
+ name: "start_step",
635
+ parse: (value) => {
636
+ if (value == null || typeof value !== "object" || !("messageId" in value) || typeof value.messageId !== "string") {
637
+ throw new Error(
638
+ '"start_step" parts expect an object with an "id" property.'
639
+ );
640
+ }
641
+ return {
642
+ type: "start_step",
643
+ value: {
644
+ messageId: value.messageId
645
+ }
646
+ };
647
+ }
648
+ };
649
+ var reasoningStreamPart = {
650
+ code: "g",
651
+ name: "reasoning",
652
+ parse: (value) => {
653
+ if (typeof value !== "string") {
654
+ throw new Error('"reasoning" parts expect a string value.');
655
+ }
656
+ return { type: "reasoning", value };
657
+ }
658
+ };
659
+ var sourcePart = {
660
+ code: "h",
661
+ name: "source",
662
+ parse: (value) => {
663
+ if (value == null || typeof value !== "object") {
664
+ throw new Error('"source" parts expect a Source object.');
665
+ }
666
+ return {
667
+ type: "source",
668
+ value
669
+ };
670
+ }
671
+ };
672
+ var redactedReasoningStreamPart = {
673
+ code: "i",
674
+ name: "redacted_reasoning",
675
+ parse: (value) => {
676
+ if (value == null || typeof value !== "object" || !("data" in value) || typeof value.data !== "string") {
677
+ throw new Error(
678
+ '"redacted_reasoning" parts expect an object with a "data" property.'
679
+ );
680
+ }
681
+ return { type: "redacted_reasoning", value: { data: value.data } };
682
+ }
683
+ };
684
+ var reasoningSignatureStreamPart = {
685
+ code: "j",
686
+ name: "reasoning_signature",
687
+ parse: (value) => {
688
+ if (value == null || typeof value !== "object" || !("signature" in value) || typeof value.signature !== "string") {
689
+ throw new Error(
690
+ '"reasoning_signature" parts expect an object with a "signature" property.'
691
+ );
692
+ }
693
+ return {
694
+ type: "reasoning_signature",
695
+ value: { signature: value.signature }
696
+ };
697
+ }
698
+ };
699
+ var fileStreamPart = {
700
+ code: "k",
701
+ name: "file",
702
+ parse: (value) => {
703
+ if (value == null || typeof value !== "object" || !("data" in value) || typeof value.data !== "string" || !("mimeType" in value) || typeof value.mimeType !== "string") {
704
+ throw new Error(
705
+ '"file" parts expect an object with a "data" and "mimeType" property.'
706
+ );
707
+ }
708
+ return { type: "file", value };
709
+ }
710
+ };
711
+ var dataStreamParts = [
712
+ textStreamPart,
713
+ dataStreamPart,
714
+ errorStreamPart,
715
+ messageAnnotationsStreamPart,
716
+ toolCallStreamPart,
717
+ toolResultStreamPart,
718
+ toolCallStreamingStartStreamPart,
719
+ toolCallDeltaStreamPart,
720
+ finishMessageStreamPart,
721
+ finishStepStreamPart,
722
+ startStepStreamPart,
723
+ reasoningStreamPart,
724
+ sourcePart,
725
+ redactedReasoningStreamPart,
726
+ reasoningSignatureStreamPart,
727
+ fileStreamPart
728
+ ];
729
+ var dataStreamPartsByCode = Object.fromEntries(
730
+ dataStreamParts.map((part) => [part.code, part])
731
+ );
732
+ var DataStreamStringPrefixes = Object.fromEntries(
733
+ dataStreamParts.map((part) => [part.name, part.code])
734
+ );
735
+ var validCodes = dataStreamParts.map((part) => part.code);
736
+ var parseDataStreamPart = (line) => {
737
+ const firstSeparatorIndex = line.indexOf(":");
738
+ if (firstSeparatorIndex === -1) {
739
+ throw new Error("Failed to parse stream string. No separator found.");
740
+ }
741
+ const prefix = line.slice(0, firstSeparatorIndex);
742
+ if (!validCodes.includes(prefix)) {
743
+ throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`);
744
+ }
745
+ const code = prefix;
746
+ const textValue = line.slice(firstSeparatorIndex + 1);
747
+ const jsonValue = JSON.parse(textValue);
748
+ return dataStreamPartsByCode[code].parse(jsonValue);
749
+ };
750
+ function formatDataStreamPart(type, value) {
751
+ const streamPart = dataStreamParts.find((part) => part.name === type);
752
+ if (!streamPart) {
753
+ throw new Error(`Invalid stream part type: ${type}`);
754
+ }
755
+ return `${streamPart.code}:${JSON.stringify(value)}
756
+ `;
757
+ }
758
+
759
+ // core/util/process-data-stream.ts
760
+ var NEWLINE = "\n".charCodeAt(0);
761
+ function concatChunks(chunks, totalLength) {
762
+ const concatenatedChunks = new Uint8Array(totalLength);
763
+ let offset = 0;
764
+ for (const chunk of chunks) {
765
+ concatenatedChunks.set(chunk, offset);
766
+ offset += chunk.length;
767
+ }
768
+ chunks.length = 0;
769
+ return concatenatedChunks;
770
+ }
771
+ async function processDataStream({
772
+ stream,
773
+ onTextPart,
774
+ onReasoningPart,
775
+ onReasoningSignaturePart,
776
+ onRedactedReasoningPart,
777
+ onSourcePart,
778
+ onFilePart,
779
+ onDataPart,
780
+ onErrorPart,
781
+ onToolCallStreamingStartPart,
782
+ onToolCallDeltaPart,
783
+ onToolCallPart,
784
+ onToolResultPart,
785
+ onMessageAnnotationsPart,
786
+ onFinishMessagePart,
787
+ onFinishStepPart,
788
+ onStartStepPart
789
+ }) {
790
+ const reader = stream.getReader();
791
+ const decoder = new TextDecoder();
792
+ const chunks = [];
793
+ let totalLength = 0;
794
+ while (true) {
795
+ const { value } = await reader.read();
796
+ if (value) {
797
+ chunks.push(value);
798
+ totalLength += value.length;
799
+ if (value[value.length - 1] !== NEWLINE) {
800
+ continue;
801
+ }
802
+ }
803
+ if (chunks.length === 0) {
804
+ break;
805
+ }
806
+ const concatenatedChunks = concatChunks(chunks, totalLength);
807
+ totalLength = 0;
808
+ const streamParts = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseDataStreamPart);
809
+ for (const { type, value: value2 } of streamParts) {
810
+ switch (type) {
811
+ case "text":
812
+ await (onTextPart == null ? void 0 : onTextPart(value2));
813
+ break;
814
+ case "reasoning":
815
+ await (onReasoningPart == null ? void 0 : onReasoningPart(value2));
816
+ break;
817
+ case "reasoning_signature":
818
+ await (onReasoningSignaturePart == null ? void 0 : onReasoningSignaturePart(value2));
819
+ break;
820
+ case "redacted_reasoning":
821
+ await (onRedactedReasoningPart == null ? void 0 : onRedactedReasoningPart(value2));
822
+ break;
823
+ case "file":
824
+ await (onFilePart == null ? void 0 : onFilePart(value2));
825
+ break;
826
+ case "source":
827
+ await (onSourcePart == null ? void 0 : onSourcePart(value2));
828
+ break;
829
+ case "data":
830
+ await (onDataPart == null ? void 0 : onDataPart(value2));
831
+ break;
832
+ case "error":
833
+ await (onErrorPart == null ? void 0 : onErrorPart(value2));
834
+ break;
835
+ case "message_annotations":
836
+ await (onMessageAnnotationsPart == null ? void 0 : onMessageAnnotationsPart(value2));
837
+ break;
838
+ case "tool_call_streaming_start":
839
+ await (onToolCallStreamingStartPart == null ? void 0 : onToolCallStreamingStartPart(value2));
840
+ break;
841
+ case "tool_call_delta":
842
+ await (onToolCallDeltaPart == null ? void 0 : onToolCallDeltaPart(value2));
843
+ break;
844
+ case "tool_call":
845
+ await (onToolCallPart == null ? void 0 : onToolCallPart(value2));
846
+ break;
847
+ case "tool_result":
848
+ await (onToolResultPart == null ? void 0 : onToolResultPart(value2));
849
+ break;
850
+ case "finish_message":
851
+ await (onFinishMessagePart == null ? void 0 : onFinishMessagePart(value2));
852
+ break;
853
+ case "finish_step":
854
+ await (onFinishStepPart == null ? void 0 : onFinishStepPart(value2));
855
+ break;
856
+ case "start_step":
857
+ await (onStartStepPart == null ? void 0 : onStartStepPart(value2));
858
+ break;
859
+ default: {
860
+ const exhaustiveCheck = type;
861
+ throw new Error(`Unknown stream part type: ${exhaustiveCheck}`);
862
+ }
863
+ }
864
+ }
865
+ }
866
+ }
867
+
868
+ // core/util/process-chat-response.ts
869
+ async function processChatResponse({
870
+ stream,
871
+ update,
872
+ onToolCall,
873
+ onFinish,
874
+ generateId: generateId3 = import_provider_utils2.generateId,
875
+ getCurrentDate = () => /* @__PURE__ */ new Date(),
876
+ lastMessage
877
+ }) {
878
+ var _a17, _b;
879
+ const replaceLastMessage = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
880
+ let step = replaceLastMessage ? 1 + // find max step in existing tool invocations:
881
+ ((_b = (_a17 = lastMessage.toolInvocations) == null ? void 0 : _a17.reduce((max, toolInvocation) => {
882
+ var _a18;
883
+ return Math.max(max, (_a18 = toolInvocation.step) != null ? _a18 : 0);
884
+ }, 0)) != null ? _b : 0) : 0;
885
+ const message = replaceLastMessage ? structuredClone(lastMessage) : {
886
+ id: generateId3(),
887
+ createdAt: getCurrentDate(),
888
+ role: "assistant",
889
+ content: "",
890
+ parts: []
891
+ };
892
+ let currentTextPart = void 0;
893
+ let currentReasoningPart = void 0;
894
+ let currentReasoningTextDetail = void 0;
895
+ function updateToolInvocationPart(toolCallId, invocation) {
896
+ const part = message.parts.find(
897
+ (part2) => part2.type === "tool-invocation" && part2.toolInvocation.toolCallId === toolCallId
898
+ );
899
+ if (part != null) {
900
+ part.toolInvocation = invocation;
901
+ } else {
902
+ message.parts.push({
903
+ type: "tool-invocation",
904
+ toolInvocation: invocation
905
+ });
906
+ }
907
+ }
908
+ const data = [];
909
+ let messageAnnotations = replaceLastMessage ? lastMessage == null ? void 0 : lastMessage.annotations : void 0;
910
+ const partialToolCalls = {};
911
+ let usage = {
912
+ completionTokens: NaN,
913
+ promptTokens: NaN,
914
+ totalTokens: NaN
915
+ };
916
+ let finishReason = "unknown";
917
+ function execUpdate() {
918
+ const copiedData = [...data];
919
+ if (messageAnnotations == null ? void 0 : messageAnnotations.length) {
920
+ message.annotations = messageAnnotations;
921
+ }
922
+ const copiedMessage = {
923
+ // deep copy the message to ensure that deep changes (msg attachments) are updated
924
+ // with SolidJS. SolidJS uses referential integration of sub-objects to detect changes.
925
+ ...structuredClone(message),
926
+ // add a revision id to ensure that the message is updated with SWR. SWR uses a
927
+ // hashing approach by default to detect changes, but it only works for shallow
928
+ // changes. This is why we need to add a revision id to ensure that the message
929
+ // is updated with SWR (without it, the changes get stuck in SWR and are not
930
+ // forwarded to rendering):
931
+ revisionId: generateId3()
932
+ };
933
+ update({
934
+ message: copiedMessage,
935
+ data: copiedData,
936
+ replaceLastMessage
937
+ });
938
+ }
939
+ await processDataStream({
940
+ stream,
941
+ onTextPart(value) {
942
+ if (currentTextPart == null) {
943
+ currentTextPart = {
944
+ type: "text",
945
+ text: value
946
+ };
947
+ message.parts.push(currentTextPart);
948
+ } else {
949
+ currentTextPart.text += value;
950
+ }
951
+ message.content += value;
952
+ execUpdate();
953
+ },
954
+ onReasoningPart(value) {
955
+ var _a18;
956
+ if (currentReasoningTextDetail == null) {
957
+ currentReasoningTextDetail = { type: "text", text: value };
958
+ if (currentReasoningPart != null) {
959
+ currentReasoningPart.details.push(currentReasoningTextDetail);
960
+ }
961
+ } else {
962
+ currentReasoningTextDetail.text += value;
963
+ }
964
+ if (currentReasoningPart == null) {
965
+ currentReasoningPart = {
966
+ type: "reasoning",
967
+ reasoning: value,
968
+ details: [currentReasoningTextDetail]
969
+ };
970
+ message.parts.push(currentReasoningPart);
971
+ } else {
972
+ currentReasoningPart.reasoning += value;
973
+ }
974
+ message.reasoning = ((_a18 = message.reasoning) != null ? _a18 : "") + value;
975
+ execUpdate();
976
+ },
977
+ onReasoningSignaturePart(value) {
978
+ if (currentReasoningTextDetail != null) {
979
+ currentReasoningTextDetail.signature = value.signature;
980
+ }
981
+ },
982
+ onRedactedReasoningPart(value) {
983
+ if (currentReasoningPart == null) {
984
+ currentReasoningPart = {
985
+ type: "reasoning",
986
+ reasoning: "",
987
+ details: []
988
+ };
989
+ message.parts.push(currentReasoningPart);
990
+ }
991
+ currentReasoningPart.details.push({
992
+ type: "redacted",
993
+ data: value.data
994
+ });
995
+ currentReasoningTextDetail = void 0;
996
+ execUpdate();
997
+ },
998
+ onFilePart(value) {
999
+ message.parts.push({
1000
+ type: "file",
1001
+ mediaType: value.mimeType,
1002
+ data: value.data
1003
+ });
1004
+ execUpdate();
1005
+ },
1006
+ onSourcePart(value) {
1007
+ message.parts.push({
1008
+ type: "source",
1009
+ source: value
1010
+ });
1011
+ execUpdate();
1012
+ },
1013
+ onToolCallStreamingStartPart(value) {
1014
+ if (message.toolInvocations == null) {
1015
+ message.toolInvocations = [];
1016
+ }
1017
+ partialToolCalls[value.toolCallId] = {
1018
+ text: "",
1019
+ step,
1020
+ toolName: value.toolName,
1021
+ index: message.toolInvocations.length
1022
+ };
1023
+ const invocation = {
1024
+ state: "partial-call",
1025
+ step,
1026
+ toolCallId: value.toolCallId,
1027
+ toolName: value.toolName,
1028
+ args: void 0
1029
+ };
1030
+ message.toolInvocations.push(invocation);
1031
+ updateToolInvocationPart(value.toolCallId, invocation);
1032
+ execUpdate();
1033
+ },
1034
+ onToolCallDeltaPart(value) {
1035
+ const partialToolCall = partialToolCalls[value.toolCallId];
1036
+ partialToolCall.text += value.argsTextDelta;
1037
+ const { value: partialArgs } = parsePartialJson(partialToolCall.text);
1038
+ const invocation = {
1039
+ state: "partial-call",
1040
+ step: partialToolCall.step,
1041
+ toolCallId: value.toolCallId,
1042
+ toolName: partialToolCall.toolName,
1043
+ args: partialArgs
1044
+ };
1045
+ message.toolInvocations[partialToolCall.index] = invocation;
1046
+ updateToolInvocationPart(value.toolCallId, invocation);
1047
+ execUpdate();
1048
+ },
1049
+ async onToolCallPart(value) {
1050
+ const invocation = {
1051
+ state: "call",
1052
+ step,
1053
+ ...value
1054
+ };
1055
+ if (partialToolCalls[value.toolCallId] != null) {
1056
+ message.toolInvocations[partialToolCalls[value.toolCallId].index] = invocation;
1057
+ } else {
1058
+ if (message.toolInvocations == null) {
1059
+ message.toolInvocations = [];
1060
+ }
1061
+ message.toolInvocations.push(invocation);
1062
+ }
1063
+ updateToolInvocationPart(value.toolCallId, invocation);
1064
+ execUpdate();
1065
+ if (onToolCall) {
1066
+ const result = await onToolCall({ toolCall: value });
1067
+ if (result != null) {
1068
+ const invocation2 = {
1069
+ state: "result",
1070
+ step,
1071
+ ...value,
1072
+ result
1073
+ };
1074
+ message.toolInvocations[message.toolInvocations.length - 1] = invocation2;
1075
+ updateToolInvocationPart(value.toolCallId, invocation2);
1076
+ execUpdate();
1077
+ }
1078
+ }
1079
+ },
1080
+ onToolResultPart(value) {
1081
+ const toolInvocations = message.toolInvocations;
1082
+ if (toolInvocations == null) {
1083
+ throw new Error("tool_result must be preceded by a tool_call");
1084
+ }
1085
+ const toolInvocationIndex = toolInvocations.findIndex(
1086
+ (invocation2) => invocation2.toolCallId === value.toolCallId
1087
+ );
1088
+ if (toolInvocationIndex === -1) {
1089
+ throw new Error(
1090
+ "tool_result must be preceded by a tool_call with the same toolCallId"
1091
+ );
1092
+ }
1093
+ const invocation = {
1094
+ ...toolInvocations[toolInvocationIndex],
1095
+ state: "result",
1096
+ ...value
1097
+ };
1098
+ toolInvocations[toolInvocationIndex] = invocation;
1099
+ updateToolInvocationPart(value.toolCallId, invocation);
1100
+ execUpdate();
1101
+ },
1102
+ onDataPart(value) {
1103
+ data.push(...value);
1104
+ execUpdate();
1105
+ },
1106
+ onMessageAnnotationsPart(value) {
1107
+ if (messageAnnotations == null) {
1108
+ messageAnnotations = [...value];
1109
+ } else {
1110
+ messageAnnotations.push(...value);
1111
+ }
1112
+ execUpdate();
1113
+ },
1114
+ onFinishStepPart(value) {
1115
+ step += 1;
1116
+ currentTextPart = value.isContinued ? currentTextPart : void 0;
1117
+ currentReasoningPart = void 0;
1118
+ currentReasoningTextDetail = void 0;
1119
+ },
1120
+ onStartStepPart(value) {
1121
+ if (!replaceLastMessage) {
1122
+ message.id = value.messageId;
1123
+ }
1124
+ message.parts.push({ type: "step-start" });
1125
+ execUpdate();
1126
+ },
1127
+ onFinishMessagePart(value) {
1128
+ finishReason = value.finishReason;
1129
+ if (value.usage != null) {
1130
+ usage = calculateLanguageModelUsage(value.usage);
1131
+ }
1132
+ },
1133
+ onErrorPart(error) {
1134
+ throw new Error(error);
1135
+ }
1136
+ });
1137
+ onFinish == null ? void 0 : onFinish({ message, finishReason, usage });
1138
+ }
1139
+
1140
+ // core/util/process-chat-text-response.ts
1141
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
1142
+
1143
+ // core/util/process-text-stream.ts
1144
+ async function processTextStream({
1145
+ stream,
1146
+ onTextPart
1147
+ }) {
1148
+ const reader = stream.pipeThrough(new TextDecoderStream()).getReader();
1149
+ while (true) {
1150
+ const { done, value } = await reader.read();
1151
+ if (done) {
1152
+ break;
1153
+ }
1154
+ await onTextPart(value);
1155
+ }
1156
+ }
1157
+
1158
+ // core/util/process-chat-text-response.ts
1159
+ async function processChatTextResponse({
1160
+ stream,
1161
+ update,
1162
+ onFinish,
1163
+ getCurrentDate = () => /* @__PURE__ */ new Date(),
1164
+ generateId: generateId3 = import_provider_utils3.generateId
1165
+ }) {
1166
+ const textPart = { type: "text", text: "" };
1167
+ const resultMessage = {
1168
+ id: generateId3(),
1169
+ createdAt: getCurrentDate(),
1170
+ role: "assistant",
1171
+ content: "",
1172
+ parts: [textPart]
1173
+ };
1174
+ await processTextStream({
1175
+ stream,
1176
+ onTextPart: (chunk) => {
1177
+ resultMessage.content += chunk;
1178
+ textPart.text += chunk;
1179
+ update({
1180
+ message: { ...resultMessage },
1181
+ data: [],
1182
+ replaceLastMessage: false
1183
+ });
1184
+ }
1185
+ });
1186
+ onFinish == null ? void 0 : onFinish(resultMessage, {
1187
+ usage: { completionTokens: NaN, promptTokens: NaN, totalTokens: NaN },
1188
+ finishReason: "unknown"
1189
+ });
1190
+ }
1191
+
1192
+ // core/util/call-chat-api.ts
1193
+ var getOriginalFetch = () => fetch;
1194
+ async function callChatApi({
1195
+ api,
1196
+ body,
1197
+ streamProtocol = "data",
1198
+ credentials,
1199
+ headers,
1200
+ abortController,
1201
+ restoreMessagesOnFailure,
1202
+ onResponse,
1203
+ onUpdate,
1204
+ onFinish,
1205
+ onToolCall,
1206
+ generateId: generateId3,
1207
+ fetch: fetch2 = getOriginalFetch(),
1208
+ lastMessage
1209
+ }) {
1210
+ var _a17, _b;
1211
+ const response = await fetch2(api, {
1212
+ method: "POST",
1213
+ body: JSON.stringify(body),
1214
+ headers: {
1215
+ "Content-Type": "application/json",
1216
+ ...headers
1217
+ },
1218
+ signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
1219
+ credentials
1220
+ }).catch((err) => {
1221
+ restoreMessagesOnFailure();
1222
+ throw err;
1223
+ });
1224
+ if (onResponse) {
1225
+ try {
1226
+ await onResponse(response);
1227
+ } catch (err) {
1228
+ throw err;
1229
+ }
1230
+ }
1231
+ if (!response.ok) {
1232
+ restoreMessagesOnFailure();
1233
+ throw new Error(
1234
+ (_b = await response.text()) != null ? _b : "Failed to fetch the chat response."
1235
+ );
1236
+ }
1237
+ if (!response.body) {
1238
+ throw new Error("The response body is empty.");
1239
+ }
1240
+ switch (streamProtocol) {
1241
+ case "text": {
1242
+ await processChatTextResponse({
1243
+ stream: response.body,
1244
+ update: onUpdate,
1245
+ onFinish,
1246
+ generateId: generateId3
1247
+ });
1248
+ return;
1249
+ }
1250
+ case "data": {
1251
+ await processChatResponse({
1252
+ stream: response.body,
1253
+ update: onUpdate,
1254
+ lastMessage,
1255
+ onToolCall,
1256
+ onFinish({ message, finishReason, usage }) {
1257
+ if (onFinish && message != null) {
1258
+ onFinish(message, { usage, finishReason });
1259
+ }
1260
+ },
1261
+ generateId: generateId3
1262
+ });
1263
+ return;
1264
+ }
1265
+ default: {
1266
+ const exhaustiveCheck = streamProtocol;
1267
+ throw new Error(`Unknown stream protocol: ${exhaustiveCheck}`);
1268
+ }
1269
+ }
1270
+ }
1271
+
1272
+ // core/util/call-completion-api.ts
1273
+ var getOriginalFetch2 = () => fetch;
1274
+ async function callCompletionApi({
1275
+ api,
1276
+ prompt,
1277
+ credentials,
1278
+ headers,
1279
+ body,
1280
+ streamProtocol = "data",
1281
+ setCompletion,
1282
+ setLoading,
1283
+ setError,
1284
+ setAbortController,
1285
+ onResponse,
1286
+ onFinish,
1287
+ onError,
1288
+ onData,
1289
+ fetch: fetch2 = getOriginalFetch2()
1290
+ }) {
1291
+ var _a17;
1292
+ try {
1293
+ setLoading(true);
1294
+ setError(void 0);
1295
+ const abortController = new AbortController();
1296
+ setAbortController(abortController);
1297
+ setCompletion("");
1298
+ const response = await fetch2(api, {
1299
+ method: "POST",
1300
+ body: JSON.stringify({
1301
+ prompt,
1302
+ ...body
1303
+ }),
1304
+ credentials,
1305
+ headers: {
1306
+ "Content-Type": "application/json",
1307
+ ...headers
1308
+ },
1309
+ signal: abortController.signal
1310
+ }).catch((err) => {
1311
+ throw err;
1312
+ });
1313
+ if (onResponse) {
1314
+ try {
1315
+ await onResponse(response);
1316
+ } catch (err) {
1317
+ throw err;
1318
+ }
1319
+ }
1320
+ if (!response.ok) {
1321
+ throw new Error(
1322
+ (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
1323
+ );
1324
+ }
1325
+ if (!response.body) {
1326
+ throw new Error("The response body is empty.");
1327
+ }
1328
+ let result = "";
1329
+ switch (streamProtocol) {
1330
+ case "text": {
1331
+ await processTextStream({
1332
+ stream: response.body,
1333
+ onTextPart: (chunk) => {
1334
+ result += chunk;
1335
+ setCompletion(result);
1336
+ }
1337
+ });
1338
+ break;
1339
+ }
1340
+ case "data": {
1341
+ await processDataStream({
1342
+ stream: response.body,
1343
+ onTextPart(value) {
1344
+ result += value;
1345
+ setCompletion(result);
1346
+ },
1347
+ onDataPart(value) {
1348
+ onData == null ? void 0 : onData(value);
1349
+ },
1350
+ onErrorPart(value) {
1351
+ throw new Error(value);
1352
+ }
1353
+ });
1354
+ break;
1355
+ }
1356
+ default: {
1357
+ const exhaustiveCheck = streamProtocol;
1358
+ throw new Error(`Unknown stream protocol: ${exhaustiveCheck}`);
1359
+ }
1360
+ }
1361
+ if (onFinish) {
1362
+ onFinish(prompt, result);
1363
+ }
1364
+ setAbortController(null);
1365
+ return result;
1366
+ } catch (err) {
1367
+ if (err.name === "AbortError") {
1368
+ setAbortController(null);
1369
+ return null;
1370
+ }
1371
+ if (err instanceof Error) {
1372
+ if (onError) {
1373
+ onError(err);
1374
+ }
1375
+ }
1376
+ setError(err);
1377
+ } finally {
1378
+ setLoading(false);
1379
+ }
1380
+ }
1381
+
1382
+ // core/util/data-url.ts
1383
+ function getTextFromDataUrl(dataUrl) {
1384
+ const [header, base64Content] = dataUrl.split(",");
1385
+ const mediaType = header.split(";")[0].split(":")[1];
1386
+ if (mediaType == null || base64Content == null) {
1387
+ throw new Error("Invalid data URL format");
1388
+ }
1389
+ try {
1390
+ return window.atob(base64Content);
1391
+ } catch (error) {
1392
+ throw new Error(`Error decoding data URL`);
1393
+ }
1394
+ }
1395
+
1396
+ // core/util/extract-max-tool-invocation-step.ts
1397
+ function extractMaxToolInvocationStep(toolInvocations) {
1398
+ return toolInvocations == null ? void 0 : toolInvocations.reduce((max, toolInvocation) => {
1399
+ var _a17;
1400
+ return Math.max(max, (_a17 = toolInvocation.step) != null ? _a17 : 0);
1401
+ }, 0);
1402
+ }
1403
+
1404
+ // core/util/get-message-parts.ts
1405
+ function getMessageParts(message) {
1406
+ var _a17;
1407
+ return (_a17 = message.parts) != null ? _a17 : [
1408
+ ...message.toolInvocations ? message.toolInvocations.map((toolInvocation) => ({
1409
+ type: "tool-invocation",
1410
+ toolInvocation
1411
+ })) : [],
1412
+ ...message.reasoning ? [
1413
+ {
1414
+ type: "reasoning",
1415
+ reasoning: message.reasoning,
1416
+ details: [{ type: "text", text: message.reasoning }]
1417
+ }
1418
+ ] : [],
1419
+ ...message.content ? [{ type: "text", text: message.content }] : []
1420
+ ];
1421
+ }
1422
+
1423
+ // core/util/fill-message-parts.ts
1424
+ function fillMessageParts(messages) {
1425
+ return messages.map((message) => ({
1426
+ ...message,
1427
+ parts: getMessageParts(message)
1428
+ }));
1429
+ }
1430
+
1431
+ // core/util/is-deep-equal-data.ts
1432
+ function isDeepEqualData(obj1, obj2) {
1433
+ if (obj1 === obj2)
1434
+ return true;
1435
+ if (obj1 == null || obj2 == null)
1436
+ return false;
1437
+ if (typeof obj1 !== "object" && typeof obj2 !== "object")
1438
+ return obj1 === obj2;
1439
+ if (obj1.constructor !== obj2.constructor)
1440
+ return false;
1441
+ if (obj1 instanceof Date && obj2 instanceof Date) {
1442
+ return obj1.getTime() === obj2.getTime();
1443
+ }
1444
+ if (Array.isArray(obj1)) {
1445
+ if (obj1.length !== obj2.length)
1446
+ return false;
1447
+ for (let i = 0; i < obj1.length; i++) {
1448
+ if (!isDeepEqualData(obj1[i], obj2[i]))
1449
+ return false;
1450
+ }
1451
+ return true;
1452
+ }
1453
+ const keys1 = Object.keys(obj1);
1454
+ const keys2 = Object.keys(obj2);
1455
+ if (keys1.length !== keys2.length)
1456
+ return false;
1457
+ for (const key of keys1) {
1458
+ if (!keys2.includes(key))
1459
+ return false;
1460
+ if (!isDeepEqualData(obj1[key], obj2[key]))
1461
+ return false;
1462
+ }
1463
+ return true;
1464
+ }
1465
+
1466
+ // core/util/prepare-attachments-for-request.ts
1467
+ async function prepareAttachmentsForRequest(attachmentsFromOptions) {
1468
+ if (!attachmentsFromOptions) {
1469
+ return [];
1470
+ }
1471
+ if (attachmentsFromOptions instanceof FileList) {
1472
+ return Promise.all(
1473
+ Array.from(attachmentsFromOptions).map(async (attachment) => {
1474
+ const { name: name17, type } = attachment;
1475
+ const dataUrl = await new Promise((resolve, reject) => {
1476
+ const reader = new FileReader();
1477
+ reader.onload = (readerEvent) => {
1478
+ var _a17;
1479
+ resolve((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
1480
+ };
1481
+ reader.onerror = (error) => reject(error);
1482
+ reader.readAsDataURL(attachment);
1483
+ });
1484
+ return {
1485
+ name: name17,
1486
+ contentType: type,
1487
+ url: dataUrl
1488
+ };
1489
+ })
1490
+ );
1491
+ }
1492
+ if (Array.isArray(attachmentsFromOptions)) {
1493
+ return attachmentsFromOptions;
1494
+ }
1495
+ throw new Error("Invalid attachments type");
1496
+ }
1497
+
1498
+ // core/util/schema.ts
1499
+ var import_provider_utils4 = require("@ai-sdk/provider-utils");
1500
+
1501
+ // core/util/zod-schema.ts
1502
+ var import_zod_to_json_schema = __toESM(require("zod-to-json-schema"));
1503
+ function zodSchema(zodSchema2, options) {
1504
+ var _a17;
1505
+ const useReferences = (_a17 = options == null ? void 0 : options.useReferences) != null ? _a17 : false;
1506
+ return jsonSchema(
1507
+ (0, import_zod_to_json_schema.default)(zodSchema2, {
1508
+ $refStrategy: useReferences ? "root" : "none",
1509
+ target: "jsonSchema7"
1510
+ // note: openai mode breaks various gemini conversions
1511
+ }),
1512
+ {
1513
+ validate: (value) => {
1514
+ const result = zodSchema2.safeParse(value);
1515
+ return result.success ? { success: true, value: result.data } : { success: false, error: result.error };
1516
+ }
1517
+ }
1518
+ );
1519
+ }
1520
+
1521
+ // core/util/schema.ts
1522
+ var schemaSymbol = Symbol.for("vercel.ai.schema");
1523
+ function jsonSchema(jsonSchema2, {
1524
+ validate
1525
+ } = {}) {
1526
+ return {
1527
+ [schemaSymbol]: true,
1528
+ _type: void 0,
1529
+ // should never be used directly
1530
+ [import_provider_utils4.validatorSymbol]: true,
1531
+ jsonSchema: jsonSchema2,
1532
+ validate
1533
+ };
1534
+ }
1535
+ function isSchema(value) {
1536
+ return typeof value === "object" && value !== null && schemaSymbol in value && value[schemaSymbol] === true && "jsonSchema" in value && "validate" in value;
1537
+ }
1538
+ function asSchema(schema) {
1539
+ return isSchema(schema) ? schema : zodSchema(schema);
1540
+ }
1541
+
1542
+ // core/util/should-resubmit-messages.ts
1543
+ function shouldResubmitMessages({
1544
+ originalMaxToolInvocationStep,
1545
+ originalMessageCount,
1546
+ maxSteps,
1547
+ messages
1548
+ }) {
1549
+ var _a17;
1550
+ const lastMessage = messages[messages.length - 1];
1551
+ return (
1552
+ // check if the feature is enabled:
1553
+ maxSteps > 1 && // ensure there is a last message:
1554
+ lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors):
1555
+ (messages.length > originalMessageCount || extractMaxToolInvocationStep(lastMessage.toolInvocations) !== originalMaxToolInvocationStep) && // check that next step is possible:
1556
+ isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
1557
+ ((_a17 = extractMaxToolInvocationStep(lastMessage.toolInvocations)) != null ? _a17 : 0) < maxSteps
1558
+ );
1559
+ }
1560
+ function isAssistantMessageWithCompletedToolCalls(message) {
1561
+ if (message.role !== "assistant") {
1562
+ return false;
1563
+ }
1564
+ const lastStepStartIndex = message.parts.reduce((lastIndex, part, index) => {
1565
+ return part.type === "step-start" ? index : lastIndex;
1566
+ }, -1);
1567
+ const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter((part) => part.type === "tool-invocation");
1568
+ return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
1569
+ }
1570
+
1571
+ // core/util/update-tool-call-result.ts
1572
+ function updateToolCallResult({
1573
+ messages,
1574
+ toolCallId,
1575
+ toolResult: result
1576
+ }) {
1577
+ var _a17;
1578
+ const lastMessage = messages[messages.length - 1];
1579
+ const invocationPart = lastMessage.parts.find(
1580
+ (part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
1581
+ );
1582
+ if (invocationPart == null) {
1583
+ return;
1584
+ }
1585
+ const toolResult = {
1586
+ ...invocationPart.toolInvocation,
1587
+ state: "result",
1588
+ result
1589
+ };
1590
+ invocationPart.toolInvocation = toolResult;
1591
+ lastMessage.toolInvocations = (_a17 = lastMessage.toolInvocations) == null ? void 0 : _a17.map(
1592
+ (toolInvocation) => toolInvocation.toolCallId === toolCallId ? toolResult : toolInvocation
1593
+ );
1594
+ }
101
1595
 
102
1596
  // core/data-stream/create-data-stream.ts
103
- var import_ui_utils = require("@ai-sdk/ui-utils");
104
1597
  function createDataStream({
105
1598
  execute,
106
1599
  onError = () => "An error occurred."
@@ -125,13 +1618,13 @@ function createDataStream({
125
1618
  safeEnqueue(data);
126
1619
  },
127
1620
  writeData(data) {
128
- safeEnqueue((0, import_ui_utils.formatDataStreamPart)("data", [data]));
1621
+ safeEnqueue(formatDataStreamPart("data", [data]));
129
1622
  },
130
1623
  writeMessageAnnotation(annotation) {
131
- safeEnqueue((0, import_ui_utils.formatDataStreamPart)("message_annotations", [annotation]));
1624
+ safeEnqueue(formatDataStreamPart("message_annotations", [annotation]));
132
1625
  },
133
1626
  writeSource(source) {
134
- safeEnqueue((0, import_ui_utils.formatDataStreamPart)("source", source));
1627
+ safeEnqueue(formatDataStreamPart("source", source));
135
1628
  },
136
1629
  merge(streamArg) {
137
1630
  ongoingStreamPromises.push(
@@ -144,7 +1637,7 @@ function createDataStream({
144
1637
  safeEnqueue(value);
145
1638
  }
146
1639
  })().catch((error) => {
147
- safeEnqueue((0, import_ui_utils.formatDataStreamPart)("error", onError(error)));
1640
+ safeEnqueue(formatDataStreamPart("error", onError(error)));
148
1641
  })
149
1642
  );
150
1643
  },
@@ -153,12 +1646,12 @@ function createDataStream({
153
1646
  if (result) {
154
1647
  ongoingStreamPromises.push(
155
1648
  result.catch((error) => {
156
- safeEnqueue((0, import_ui_utils.formatDataStreamPart)("error", onError(error)));
1649
+ safeEnqueue(formatDataStreamPart("error", onError(error)));
157
1650
  })
158
1651
  );
159
1652
  }
160
1653
  } catch (error) {
161
- safeEnqueue((0, import_ui_utils.formatDataStreamPart)("error", onError(error)));
1654
+ safeEnqueue(formatDataStreamPart("error", onError(error)));
162
1655
  }
163
1656
  const waitForStreams = new Promise(async (resolve) => {
164
1657
  while (ongoingStreamPromises.length > 0) {
@@ -308,7 +1801,7 @@ _a = symbol;
308
1801
 
309
1802
  // util/retry-with-exponential-backoff.ts
310
1803
  var import_provider3 = require("@ai-sdk/provider");
311
- var import_provider_utils = require("@ai-sdk/provider-utils");
1804
+ var import_provider_utils6 = require("@ai-sdk/provider-utils");
312
1805
 
313
1806
  // util/retry-error.ts
314
1807
  var import_provider2 = require("@ai-sdk/provider");
@@ -352,13 +1845,13 @@ async function _retryWithExponentialBackoff(f, {
352
1845
  try {
353
1846
  return await f();
354
1847
  } catch (error) {
355
- if ((0, import_provider_utils.isAbortError)(error)) {
1848
+ if ((0, import_provider_utils6.isAbortError)(error)) {
356
1849
  throw error;
357
1850
  }
358
1851
  if (maxRetries === 0) {
359
1852
  throw error;
360
1853
  }
361
- const errorMessage = (0, import_provider_utils.getErrorMessage)(error);
1854
+ const errorMessage = (0, import_provider_utils6.getErrorMessage)(error);
362
1855
  const newErrors = [...errors, error];
363
1856
  const tryNumber = newErrors.length;
364
1857
  if (tryNumber > maxRetries) {
@@ -369,7 +1862,7 @@ async function _retryWithExponentialBackoff(f, {
369
1862
  });
370
1863
  }
371
1864
  if (error instanceof Error && import_provider3.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
372
- await (0, import_provider_utils.delay)(delayInMs);
1865
+ await (0, import_provider_utils6.delay)(delayInMs);
373
1866
  return _retryWithExponentialBackoff(
374
1867
  f,
375
1868
  { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
@@ -907,7 +2400,7 @@ var NoImageGeneratedError = class extends import_provider4.AISDKError {
907
2400
  _a3 = symbol3;
908
2401
 
909
2402
  // core/generate-text/generated-file.ts
910
- var import_provider_utils2 = require("@ai-sdk/provider-utils");
2403
+ var import_provider_utils7 = require("@ai-sdk/provider-utils");
911
2404
  var DefaultGeneratedFile = class {
912
2405
  constructor({
913
2406
  data,
@@ -921,14 +2414,14 @@ var DefaultGeneratedFile = class {
921
2414
  // lazy conversion with caching to avoid unnecessary conversion overhead:
922
2415
  get base64() {
923
2416
  if (this.base64Data == null) {
924
- this.base64Data = (0, import_provider_utils2.convertUint8ArrayToBase64)(this.uint8ArrayData);
2417
+ this.base64Data = (0, import_provider_utils7.convertUint8ArrayToBase64)(this.uint8ArrayData);
925
2418
  }
926
2419
  return this.base64Data;
927
2420
  }
928
2421
  // lazy conversion with caching to avoid unnecessary conversion overhead:
929
2422
  get uint8Array() {
930
2423
  if (this.uint8ArrayData == null) {
931
- this.uint8ArrayData = (0, import_provider_utils2.convertBase64ToUint8Array)(this.base64Data);
2424
+ this.uint8ArrayData = (0, import_provider_utils7.convertBase64ToUint8Array)(this.base64Data);
932
2425
  }
933
2426
  return this.uint8ArrayData;
934
2427
  }
@@ -1137,7 +2630,7 @@ var DefaultGenerateImageResult = class {
1137
2630
 
1138
2631
  // core/generate-object/generate-object.ts
1139
2632
  var import_provider12 = require("@ai-sdk/provider");
1140
- var import_provider_utils7 = require("@ai-sdk/provider-utils");
2633
+ var import_provider_utils12 = require("@ai-sdk/provider-utils");
1141
2634
 
1142
2635
  // errors/no-object-generated-error.ts
1143
2636
  var import_provider5 = require("@ai-sdk/provider");
@@ -1168,7 +2661,7 @@ var NoObjectGeneratedError = class extends import_provider5.AISDKError {
1168
2661
  _a4 = symbol4;
1169
2662
 
1170
2663
  // core/prompt/convert-to-language-model-prompt.ts
1171
- var import_provider_utils4 = require("@ai-sdk/provider-utils");
2664
+ var import_provider_utils9 = require("@ai-sdk/provider-utils");
1172
2665
 
1173
2666
  // util/download-error.ts
1174
2667
  var import_provider6 = require("@ai-sdk/provider");
@@ -1222,7 +2715,7 @@ async function download({ url }) {
1222
2715
  }
1223
2716
 
1224
2717
  // core/prompt/data-content.ts
1225
- var import_provider_utils3 = require("@ai-sdk/provider-utils");
2718
+ var import_provider_utils8 = require("@ai-sdk/provider-utils");
1226
2719
 
1227
2720
  // core/prompt/invalid-data-content-error.ts
1228
2721
  var import_provider7 = require("@ai-sdk/provider");
@@ -1266,9 +2759,9 @@ function convertDataContentToBase64String(content) {
1266
2759
  return content;
1267
2760
  }
1268
2761
  if (content instanceof ArrayBuffer) {
1269
- return (0, import_provider_utils3.convertUint8ArrayToBase64)(new Uint8Array(content));
2762
+ return (0, import_provider_utils8.convertUint8ArrayToBase64)(new Uint8Array(content));
1270
2763
  }
1271
- return (0, import_provider_utils3.convertUint8ArrayToBase64)(content);
2764
+ return (0, import_provider_utils8.convertUint8ArrayToBase64)(content);
1272
2765
  }
1273
2766
  function convertDataContentToUint8Array(content) {
1274
2767
  if (content instanceof Uint8Array) {
@@ -1276,7 +2769,7 @@ function convertDataContentToUint8Array(content) {
1276
2769
  }
1277
2770
  if (typeof content === "string") {
1278
2771
  try {
1279
- return (0, import_provider_utils3.convertBase64ToUint8Array)(content);
2772
+ return (0, import_provider_utils8.convertBase64ToUint8Array)(content);
1280
2773
  } catch (error) {
1281
2774
  throw new InvalidDataContentError({
1282
2775
  message: "Invalid data content. Content string is not a base64-encoded media.",
@@ -1553,7 +3046,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1553
3046
  mediaType: mediaType != null ? mediaType : "image/*",
1554
3047
  // any image
1555
3048
  filename: void 0,
1556
- data: normalizedData instanceof Uint8Array ? (0, import_provider_utils4.convertUint8ArrayToBase64)(normalizedData) : normalizedData,
3049
+ data: normalizedData instanceof Uint8Array ? (0, import_provider_utils9.convertUint8ArrayToBase64)(normalizedData) : normalizedData,
1557
3050
  providerOptions: (_d = part.providerOptions) != null ? _d : part.experimental_providerMetadata
1558
3051
  };
1559
3052
  }
@@ -1668,7 +3161,7 @@ function prepareCallSettings({
1668
3161
 
1669
3162
  // core/prompt/standardize-prompt.ts
1670
3163
  var import_provider10 = require("@ai-sdk/provider");
1671
- var import_provider_utils5 = require("@ai-sdk/provider-utils");
3164
+ var import_provider_utils10 = require("@ai-sdk/provider-utils");
1672
3165
  var import_zod7 = require("zod");
1673
3166
 
1674
3167
  // core/prompt/attachments-to-parts.ts
@@ -2247,7 +3740,7 @@ function standardizePrompt({
2247
3740
  message: "messages must not be empty"
2248
3741
  });
2249
3742
  }
2250
- const validationResult = (0, import_provider_utils5.safeValidateTypes)({
3743
+ const validationResult = (0, import_provider_utils10.safeValidateTypes)({
2251
3744
  value: messages,
2252
3745
  schema: import_zod7.z.array(coreMessageSchema)
2253
3746
  });
@@ -2268,7 +3761,7 @@ function standardizePrompt({
2268
3761
  }
2269
3762
 
2270
3763
  // core/types/usage.ts
2271
- function calculateLanguageModelUsage({
3764
+ function calculateLanguageModelUsage2({
2272
3765
  promptTokens,
2273
3766
  completionTokens
2274
3767
  }) {
@@ -2308,8 +3801,7 @@ function injectJsonInstruction({
2308
3801
 
2309
3802
  // core/generate-object/output-strategy.ts
2310
3803
  var import_provider11 = require("@ai-sdk/provider");
2311
- var import_provider_utils6 = require("@ai-sdk/provider-utils");
2312
- var import_ui_utils2 = require("@ai-sdk/ui-utils");
3804
+ var import_provider_utils11 = require("@ai-sdk/provider-utils");
2313
3805
 
2314
3806
  // core/util/async-iterable-stream.ts
2315
3807
  function createAsyncIterableStream(source) {
@@ -2365,7 +3857,7 @@ var objectOutputStrategy = (schema) => ({
2365
3857
  };
2366
3858
  },
2367
3859
  validateFinalResult(value) {
2368
- return (0, import_provider_utils6.safeValidateTypes)({ value, schema });
3860
+ return (0, import_provider_utils11.safeValidateTypes)({ value, schema });
2369
3861
  },
2370
3862
  createElementStream() {
2371
3863
  throw new import_provider11.UnsupportedFunctionalityError({
@@ -2404,7 +3896,7 @@ var arrayOutputStrategy = (schema) => {
2404
3896
  const resultArray = [];
2405
3897
  for (let i = 0; i < inputArray.length; i++) {
2406
3898
  const element = inputArray[i];
2407
- const result = (0, import_provider_utils6.safeValidateTypes)({ value: element, schema });
3899
+ const result = (0, import_provider_utils11.safeValidateTypes)({ value: element, schema });
2408
3900
  if (i === inputArray.length - 1 && !isFinalDelta) {
2409
3901
  continue;
2410
3902
  }
@@ -2445,7 +3937,7 @@ var arrayOutputStrategy = (schema) => {
2445
3937
  }
2446
3938
  const inputArray = value.elements;
2447
3939
  for (const element of inputArray) {
2448
- const result = (0, import_provider_utils6.safeValidateTypes)({ value: element, schema });
3940
+ const result = (0, import_provider_utils11.safeValidateTypes)({ value: element, schema });
2449
3941
  if (!result.success) {
2450
3942
  return result;
2451
3943
  }
@@ -2537,9 +4029,9 @@ function getOutputStrategy({
2537
4029
  }) {
2538
4030
  switch (output) {
2539
4031
  case "object":
2540
- return objectOutputStrategy((0, import_ui_utils2.asSchema)(schema));
4032
+ return objectOutputStrategy(asSchema(schema));
2541
4033
  case "array":
2542
- return arrayOutputStrategy((0, import_ui_utils2.asSchema)(schema));
4034
+ return arrayOutputStrategy(asSchema(schema));
2543
4035
  case "enum":
2544
4036
  return enumOutputStrategy(enumValues);
2545
4037
  case "no-schema":
@@ -2678,7 +4170,7 @@ function validateObjectGenerationInput({
2678
4170
  }
2679
4171
 
2680
4172
  // core/generate-object/generate-object.ts
2681
- var originalGenerateId = (0, import_provider_utils7.createIdGenerator)({ prefix: "aiobj", size: 24 });
4173
+ var originalGenerateId = (0, import_provider_utils12.createIdGenerator)({ prefix: "aiobj", size: 24 });
2682
4174
  async function generateObject({
2683
4175
  model,
2684
4176
  enum: enumValues,
@@ -2759,7 +4251,6 @@ async function generateObject({
2759
4251
  let finishReason;
2760
4252
  let usage;
2761
4253
  let warnings;
2762
- let rawResponse;
2763
4254
  let response;
2764
4255
  let request;
2765
4256
  let logprobs;
@@ -2814,7 +4305,7 @@ async function generateObject({
2814
4305
  }),
2815
4306
  tracer,
2816
4307
  fn: async (span2) => {
2817
- var _a18, _b2, _c2, _d2, _e, _f;
4308
+ var _a18, _b2, _c2, _d2, _e, _f, _g, _h;
2818
4309
  const result2 = await model.doGenerate({
2819
4310
  responseFormat: {
2820
4311
  type: "json",
@@ -2832,13 +4323,15 @@ async function generateObject({
2832
4323
  const responseData = {
2833
4324
  id: (_b2 = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId3(),
2834
4325
  timestamp: (_d2 = (_c2 = result2.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
2835
- modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
4326
+ modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
4327
+ headers: (_g = result2.response) == null ? void 0 : _g.headers,
4328
+ body: (_h = result2.response) == null ? void 0 : _h.body
2836
4329
  };
2837
4330
  if (result2.text === void 0) {
2838
4331
  throw new NoObjectGeneratedError({
2839
4332
  message: "No object generated: the model did not return a response.",
2840
4333
  response: responseData,
2841
- usage: calculateLanguageModelUsage(result2.usage),
4334
+ usage: calculateLanguageModelUsage2(result2.usage),
2842
4335
  finishReason: result2.finishReason
2843
4336
  });
2844
4337
  }
@@ -2870,7 +4363,6 @@ async function generateObject({
2870
4363
  finishReason = generateResult.finishReason;
2871
4364
  usage = generateResult.usage;
2872
4365
  warnings = generateResult.warnings;
2873
- rawResponse = generateResult.rawResponse;
2874
4366
  logprobs = generateResult.logprobs;
2875
4367
  resultProviderMetadata = generateResult.providerMetadata;
2876
4368
  request = (_b = generateResult.request) != null ? _b : {};
@@ -2920,7 +4412,7 @@ async function generateObject({
2920
4412
  }),
2921
4413
  tracer,
2922
4414
  fn: async (span2) => {
2923
- var _a18, _b2, _c2, _d2, _e, _f, _g, _h;
4415
+ var _a18, _b2, _c2, _d2, _e, _f, _g, _h, _i, _j;
2924
4416
  const result2 = await model.doGenerate({
2925
4417
  tools: [
2926
4418
  {
@@ -2942,13 +4434,15 @@ async function generateObject({
2942
4434
  const responseData = {
2943
4435
  id: (_d2 = (_c2 = result2.response) == null ? void 0 : _c2.id) != null ? _d2 : generateId3(),
2944
4436
  timestamp: (_f = (_e = result2.response) == null ? void 0 : _e.timestamp) != null ? _f : currentDate(),
2945
- modelId: (_h = (_g = result2.response) == null ? void 0 : _g.modelId) != null ? _h : model.modelId
4437
+ modelId: (_h = (_g = result2.response) == null ? void 0 : _g.modelId) != null ? _h : model.modelId,
4438
+ headers: (_i = result2.response) == null ? void 0 : _i.headers,
4439
+ body: (_j = result2.response) == null ? void 0 : _j.body
2946
4440
  };
2947
4441
  if (objectText === void 0) {
2948
4442
  throw new NoObjectGeneratedError({
2949
4443
  message: "No object generated: the tool was not called.",
2950
4444
  response: responseData,
2951
- usage: calculateLanguageModelUsage(result2.usage),
4445
+ usage: calculateLanguageModelUsage2(result2.usage),
2952
4446
  finishReason: result2.finishReason
2953
4447
  });
2954
4448
  }
@@ -2980,7 +4474,6 @@ async function generateObject({
2980
4474
  finishReason = generateResult.finishReason;
2981
4475
  usage = generateResult.usage;
2982
4476
  warnings = generateResult.warnings;
2983
- rawResponse = generateResult.rawResponse;
2984
4477
  logprobs = generateResult.logprobs;
2985
4478
  resultProviderMetadata = generateResult.providerMetadata;
2986
4479
  request = (_d = generateResult.request) != null ? _d : {};
@@ -2998,14 +4491,14 @@ async function generateObject({
2998
4491
  }
2999
4492
  }
3000
4493
  function processResult(result2) {
3001
- const parseResult = (0, import_provider_utils7.safeParseJSON)({ text: result2 });
4494
+ const parseResult = (0, import_provider_utils12.safeParseJSON)({ text: result2 });
3002
4495
  if (!parseResult.success) {
3003
4496
  throw new NoObjectGeneratedError({
3004
4497
  message: "No object generated: could not parse the response.",
3005
4498
  cause: parseResult.error,
3006
4499
  text: result2,
3007
4500
  response,
3008
- usage: calculateLanguageModelUsage(usage),
4501
+ usage: calculateLanguageModelUsage2(usage),
3009
4502
  finishReason
3010
4503
  });
3011
4504
  }
@@ -3014,7 +4507,7 @@ async function generateObject({
3014
4507
  {
3015
4508
  text: result2,
3016
4509
  response,
3017
- usage: calculateLanguageModelUsage(usage)
4510
+ usage: calculateLanguageModelUsage2(usage)
3018
4511
  }
3019
4512
  );
3020
4513
  if (!validationResult.success) {
@@ -3023,7 +4516,7 @@ async function generateObject({
3023
4516
  cause: validationResult.error,
3024
4517
  text: result2,
3025
4518
  response,
3026
- usage: calculateLanguageModelUsage(usage),
4519
+ usage: calculateLanguageModelUsage2(usage),
3027
4520
  finishReason
3028
4521
  });
3029
4522
  }
@@ -3062,14 +4555,10 @@ async function generateObject({
3062
4555
  return new DefaultGenerateObjectResult({
3063
4556
  object: object2,
3064
4557
  finishReason,
3065
- usage: calculateLanguageModelUsage(usage),
4558
+ usage: calculateLanguageModelUsage2(usage),
3066
4559
  warnings,
3067
4560
  request,
3068
- response: {
3069
- ...response,
3070
- headers: rawResponse == null ? void 0 : rawResponse.headers,
3071
- body: rawResponse == null ? void 0 : rawResponse.body
3072
- },
4561
+ response,
3073
4562
  logprobs,
3074
4563
  providerMetadata: resultProviderMetadata
3075
4564
  });
@@ -3100,8 +4589,7 @@ var DefaultGenerateObjectResult = class {
3100
4589
  };
3101
4590
 
3102
4591
  // core/generate-object/stream-object.ts
3103
- var import_provider_utils8 = require("@ai-sdk/provider-utils");
3104
- var import_ui_utils3 = require("@ai-sdk/ui-utils");
4592
+ var import_provider_utils13 = require("@ai-sdk/provider-utils");
3105
4593
 
3106
4594
  // util/delayed-promise.ts
3107
4595
  var DelayedPromise = class {
@@ -3245,7 +4733,7 @@ function now() {
3245
4733
  }
3246
4734
 
3247
4735
  // core/generate-object/stream-object.ts
3248
- var originalGenerateId2 = (0, import_provider_utils8.createIdGenerator)({ prefix: "aiobj", size: 24 });
4736
+ var originalGenerateId2 = (0, import_provider_utils13.createIdGenerator)({ prefix: "aiobj", size: 24 });
3249
4737
  function streamObject({
3250
4738
  model,
3251
4739
  schema: inputSchema,
@@ -3485,7 +4973,7 @@ var DefaultStreamObjectResult = class {
3485
4973
  }
3486
4974
  }
3487
4975
  const {
3488
- result: { stream, warnings, rawResponse, request },
4976
+ result: { stream, warnings, response, request },
3489
4977
  doStreamSpan,
3490
4978
  startTimestampMs
3491
4979
  } = await retry(
@@ -3534,7 +5022,7 @@ var DefaultStreamObjectResult = class {
3534
5022
  let error;
3535
5023
  let accumulatedText = "";
3536
5024
  let textDelta = "";
3537
- let response = {
5025
+ let fullResponse = {
3538
5026
  id: generateId3(),
3539
5027
  timestamp: currentDate(),
3540
5028
  modelId: model.modelId
@@ -3560,8 +5048,8 @@ var DefaultStreamObjectResult = class {
3560
5048
  if (typeof chunk === "string") {
3561
5049
  accumulatedText += chunk;
3562
5050
  textDelta += chunk;
3563
- const { value: currentObjectJson, state: parseState } = (0, import_ui_utils3.parsePartialJson)(accumulatedText);
3564
- if (currentObjectJson !== void 0 && !(0, import_ui_utils3.isDeepEqualData)(latestObjectJson, currentObjectJson)) {
5051
+ const { value: currentObjectJson, state: parseState } = parsePartialJson(accumulatedText);
5052
+ if (currentObjectJson !== void 0 && !isDeepEqualData(latestObjectJson, currentObjectJson)) {
3565
5053
  const validationResult = outputStrategy.validatePartialResult({
3566
5054
  value: currentObjectJson,
3567
5055
  textDelta,
@@ -3569,7 +5057,7 @@ var DefaultStreamObjectResult = class {
3569
5057
  isFirstDelta,
3570
5058
  isFinalDelta: parseState === "successful-parse"
3571
5059
  });
3572
- if (validationResult.success && !(0, import_ui_utils3.isDeepEqualData)(
5060
+ if (validationResult.success && !isDeepEqualData(
3573
5061
  latestObject,
3574
5062
  validationResult.value.partial
3575
5063
  )) {
@@ -3591,10 +5079,10 @@ var DefaultStreamObjectResult = class {
3591
5079
  }
3592
5080
  switch (chunk.type) {
3593
5081
  case "response-metadata": {
3594
- response = {
3595
- id: (_a18 = chunk.id) != null ? _a18 : response.id,
3596
- timestamp: (_b2 = chunk.timestamp) != null ? _b2 : response.timestamp,
3597
- modelId: (_c = chunk.modelId) != null ? _c : response.modelId
5082
+ fullResponse = {
5083
+ id: (_a18 = chunk.id) != null ? _a18 : fullResponse.id,
5084
+ timestamp: (_b2 = chunk.timestamp) != null ? _b2 : fullResponse.timestamp,
5085
+ modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
3598
5086
  };
3599
5087
  break;
3600
5088
  }
@@ -3603,20 +5091,24 @@ var DefaultStreamObjectResult = class {
3603
5091
  controller.enqueue({ type: "text-delta", textDelta });
3604
5092
  }
3605
5093
  finishReason = chunk.finishReason;
3606
- usage = calculateLanguageModelUsage(chunk.usage);
5094
+ usage = calculateLanguageModelUsage2(chunk.usage);
3607
5095
  providerMetadata = chunk.providerMetadata;
3608
- controller.enqueue({ ...chunk, usage, response });
5096
+ controller.enqueue({
5097
+ ...chunk,
5098
+ usage,
5099
+ response: fullResponse
5100
+ });
3609
5101
  self.usagePromise.resolve(usage);
3610
5102
  self.providerMetadataPromise.resolve(providerMetadata);
3611
5103
  self.responsePromise.resolve({
3612
- ...response,
3613
- headers: rawResponse == null ? void 0 : rawResponse.headers
5104
+ ...fullResponse,
5105
+ headers: response == null ? void 0 : response.headers
3614
5106
  });
3615
5107
  const validationResult = outputStrategy.validateFinalResult(
3616
5108
  latestObjectJson,
3617
5109
  {
3618
5110
  text: accumulatedText,
3619
- response,
5111
+ response: fullResponse,
3620
5112
  usage
3621
5113
  }
3622
5114
  );
@@ -3628,7 +5120,7 @@ var DefaultStreamObjectResult = class {
3628
5120
  message: "No object generated: response did not match schema.",
3629
5121
  cause: validationResult.error,
3630
5122
  text: accumulatedText,
3631
- response,
5123
+ response: fullResponse,
3632
5124
  usage,
3633
5125
  finishReason
3634
5126
  });
@@ -3658,15 +5150,15 @@ var DefaultStreamObjectResult = class {
3658
5150
  "ai.response.object": {
3659
5151
  output: () => JSON.stringify(object2)
3660
5152
  },
3661
- "ai.response.id": response.id,
3662
- "ai.response.model": response.modelId,
3663
- "ai.response.timestamp": response.timestamp.toISOString(),
5153
+ "ai.response.id": fullResponse.id,
5154
+ "ai.response.model": fullResponse.modelId,
5155
+ "ai.response.timestamp": fullResponse.timestamp.toISOString(),
3664
5156
  "ai.usage.promptTokens": finalUsage.promptTokens,
3665
5157
  "ai.usage.completionTokens": finalUsage.completionTokens,
3666
5158
  // standardized gen-ai llm span attributes:
3667
5159
  "gen_ai.response.finish_reasons": [finishReason],
3668
- "gen_ai.response.id": response.id,
3669
- "gen_ai.response.model": response.modelId,
5160
+ "gen_ai.response.id": fullResponse.id,
5161
+ "gen_ai.response.model": fullResponse.modelId,
3670
5162
  "gen_ai.usage.input_tokens": finalUsage.promptTokens,
3671
5163
  "gen_ai.usage.output_tokens": finalUsage.completionTokens
3672
5164
  }
@@ -3690,8 +5182,8 @@ var DefaultStreamObjectResult = class {
3690
5182
  object: object2,
3691
5183
  error,
3692
5184
  response: {
3693
- ...response,
3694
- headers: rawResponse == null ? void 0 : rawResponse.headers
5185
+ ...fullResponse,
5186
+ headers: response == null ? void 0 : response.headers
3695
5187
  },
3696
5188
  warnings,
3697
5189
  providerMetadata,
@@ -3817,7 +5309,7 @@ var DefaultStreamObjectResult = class {
3817
5309
  };
3818
5310
 
3819
5311
  // core/generate-text/generate-text.ts
3820
- var import_provider_utils10 = require("@ai-sdk/provider-utils");
5312
+ var import_provider_utils15 = require("@ai-sdk/provider-utils");
3821
5313
 
3822
5314
  // errors/no-output-specified-error.ts
3823
5315
  var import_provider13 = require("@ai-sdk/provider");
@@ -3863,9 +5355,6 @@ var ToolExecutionError = class extends import_provider14.AISDKError {
3863
5355
  };
3864
5356
  _a10 = symbol10;
3865
5357
 
3866
- // core/prompt/prepare-tools-and-tool-choice.ts
3867
- var import_ui_utils4 = require("@ai-sdk/ui-utils");
3868
-
3869
5358
  // core/util/is-non-empty-object.ts
3870
5359
  function isNonEmptyObject(object2) {
3871
5360
  return object2 != null && Object.keys(object2).length > 0;
@@ -3896,7 +5385,7 @@ function prepareToolsAndToolChoice({
3896
5385
  type: "function",
3897
5386
  name: name17,
3898
5387
  description: tool2.description,
3899
- parameters: (0, import_ui_utils4.asSchema)(tool2.parameters).jsonSchema
5388
+ parameters: asSchema(tool2.parameters).jsonSchema
3900
5389
  };
3901
5390
  case "provider-defined":
3902
5391
  return {
@@ -3929,8 +5418,7 @@ function removeTextAfterLastWhitespace(text2) {
3929
5418
  }
3930
5419
 
3931
5420
  // core/generate-text/parse-tool-call.ts
3932
- var import_provider_utils9 = require("@ai-sdk/provider-utils");
3933
- var import_ui_utils5 = require("@ai-sdk/ui-utils");
5421
+ var import_provider_utils14 = require("@ai-sdk/provider-utils");
3934
5422
 
3935
5423
  // errors/invalid-tool-arguments-error.ts
3936
5424
  var import_provider15 = require("@ai-sdk/provider");
@@ -4025,7 +5513,7 @@ async function parseToolCall({
4025
5513
  repairedToolCall = await repairToolCall({
4026
5514
  toolCall,
4027
5515
  tools,
4028
- parameterSchema: ({ toolName }) => (0, import_ui_utils5.asSchema)(tools[toolName].parameters).jsonSchema,
5516
+ parameterSchema: ({ toolName }) => asSchema(tools[toolName].parameters).jsonSchema,
4029
5517
  system,
4030
5518
  messages,
4031
5519
  error
@@ -4054,8 +5542,8 @@ async function doParseToolCall({
4054
5542
  availableTools: Object.keys(tools)
4055
5543
  });
4056
5544
  }
4057
- const schema = (0, import_ui_utils5.asSchema)(tool2.parameters);
4058
- const parseResult = toolCall.args.trim() === "" ? (0, import_provider_utils9.safeValidateTypes)({ value: {}, schema }) : (0, import_provider_utils9.safeParseJSON)({ text: toolCall.args, schema });
5545
+ const schema = asSchema(tool2.parameters);
5546
+ const parseResult = toolCall.args.trim() === "" ? (0, import_provider_utils14.safeValidateTypes)({ value: {}, schema }) : (0, import_provider_utils14.safeParseJSON)({ text: toolCall.args, schema });
4059
5547
  if (parseResult.success === false) {
4060
5548
  throw new InvalidToolArgumentsError({
4061
5549
  toolName,
@@ -4133,11 +5621,11 @@ function toResponseMessages({
4133
5621
  }
4134
5622
 
4135
5623
  // core/generate-text/generate-text.ts
4136
- var originalGenerateId3 = (0, import_provider_utils10.createIdGenerator)({
5624
+ var originalGenerateId3 = (0, import_provider_utils15.createIdGenerator)({
4137
5625
  prefix: "aitxt",
4138
5626
  size: 24
4139
5627
  });
4140
- var originalGenerateMessageId = (0, import_provider_utils10.createIdGenerator)({
5628
+ var originalGenerateMessageId = (0, import_provider_utils15.createIdGenerator)({
4141
5629
  prefix: "msg",
4142
5630
  size: 24
4143
5631
  });
@@ -4210,7 +5698,7 @@ async function generateText({
4210
5698
  }),
4211
5699
  tracer,
4212
5700
  fn: async (span) => {
4213
- var _a18, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
5701
+ var _a18, _b, _c, _d, _e, _f, _g;
4214
5702
  const toolsAndToolChoice = {
4215
5703
  ...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
4216
5704
  };
@@ -4285,7 +5773,7 @@ async function generateText({
4285
5773
  }),
4286
5774
  tracer,
4287
5775
  fn: async (span2) => {
4288
- var _a19, _b2, _c2, _d2, _e2, _f2;
5776
+ var _a19, _b2, _c2, _d2, _e2, _f2, _g2, _h;
4289
5777
  const result = await model.doGenerate({
4290
5778
  ...callSettings,
4291
5779
  ...toolsAndToolChoice,
@@ -4299,7 +5787,9 @@ async function generateText({
4299
5787
  const responseData = {
4300
5788
  id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
4301
5789
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
4302
- modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
5790
+ modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId,
5791
+ headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
5792
+ body: (_h = result.response) == null ? void 0 : _h.body
4303
5793
  };
4304
5794
  span2.setAttributes(
4305
5795
  selectTelemetryAttributes({
@@ -4349,7 +5839,7 @@ async function generateText({
4349
5839
  messages: stepInputMessages,
4350
5840
  abortSignal
4351
5841
  });
4352
- const currentUsage = calculateLanguageModelUsage(
5842
+ const currentUsage = calculateLanguageModelUsage2(
4353
5843
  currentModelResponse.usage
4354
5844
  );
4355
5845
  usage = addLanguageModelUsage(usage, currentUsage);
@@ -4416,8 +5906,6 @@ async function generateText({
4416
5906
  request: (_f = currentModelResponse.request) != null ? _f : {},
4417
5907
  response: {
4418
5908
  ...currentModelResponse.response,
4419
- headers: (_g = currentModelResponse.rawResponse) == null ? void 0 : _g.headers,
4420
- body: (_h = currentModelResponse.rawResponse) == null ? void 0 : _h.body,
4421
5909
  // deep clone msgs to avoid mutating past messages in multi-step:
4422
5910
  messages: structuredClone(responseMessages)
4423
5911
  },
@@ -4469,11 +5957,9 @@ async function generateText({
4469
5957
  finishReason: currentModelResponse.finishReason,
4470
5958
  usage,
4471
5959
  warnings: currentModelResponse.warnings,
4472
- request: (_i = currentModelResponse.request) != null ? _i : {},
5960
+ request: (_g = currentModelResponse.request) != null ? _g : {},
4473
5961
  response: {
4474
5962
  ...currentModelResponse.response,
4475
- headers: (_j = currentModelResponse.rawResponse) == null ? void 0 : _j.headers,
4476
- body: (_k = currentModelResponse.rawResponse) == null ? void 0 : _k.body,
4477
5963
  messages: responseMessages
4478
5964
  },
4479
5965
  logprobs: currentModelResponse.logprobs,
@@ -4602,8 +6088,7 @@ __export(output_exports, {
4602
6088
  object: () => object,
4603
6089
  text: () => text
4604
6090
  });
4605
- var import_provider_utils11 = require("@ai-sdk/provider-utils");
4606
- var import_ui_utils6 = require("@ai-sdk/ui-utils");
6091
+ var import_provider_utils16 = require("@ai-sdk/provider-utils");
4607
6092
 
4608
6093
  // errors/index.ts
4609
6094
  var import_provider20 = require("@ai-sdk/provider");
@@ -4667,7 +6152,7 @@ var text = () => ({
4667
6152
  var object = ({
4668
6153
  schema: inputSchema
4669
6154
  }) => {
4670
- const schema = (0, import_ui_utils6.asSchema)(inputSchema);
6155
+ const schema = asSchema(inputSchema);
4671
6156
  return {
4672
6157
  type: "object",
4673
6158
  responseFormat: ({ model }) => ({
@@ -4681,7 +6166,7 @@ var object = ({
4681
6166
  });
4682
6167
  },
4683
6168
  parsePartial({ text: text2 }) {
4684
- const result = (0, import_ui_utils6.parsePartialJson)(text2);
6169
+ const result = parsePartialJson(text2);
4685
6170
  switch (result.state) {
4686
6171
  case "failed-parse":
4687
6172
  case "undefined-input":
@@ -4699,7 +6184,7 @@ var object = ({
4699
6184
  }
4700
6185
  },
4701
6186
  parseOutput({ text: text2 }, context) {
4702
- const parseResult = (0, import_provider_utils11.safeParseJSON)({ text: text2 });
6187
+ const parseResult = (0, import_provider_utils16.safeParseJSON)({ text: text2 });
4703
6188
  if (!parseResult.success) {
4704
6189
  throw new NoObjectGeneratedError({
4705
6190
  message: "No object generated: could not parse the response.",
@@ -4710,7 +6195,7 @@ var object = ({
4710
6195
  finishReason: context.finishReason
4711
6196
  });
4712
6197
  }
4713
- const validationResult = (0, import_provider_utils11.safeValidateTypes)({
6198
+ const validationResult = (0, import_provider_utils16.safeValidateTypes)({
4714
6199
  value: parseResult.value,
4715
6200
  schema
4716
6201
  });
@@ -4730,7 +6215,7 @@ var object = ({
4730
6215
  };
4731
6216
 
4732
6217
  // core/generate-text/smooth-stream.ts
4733
- var import_provider_utils12 = require("@ai-sdk/provider-utils");
6218
+ var import_provider_utils17 = require("@ai-sdk/provider-utils");
4734
6219
  var import_provider21 = require("@ai-sdk/provider");
4735
6220
  var CHUNKING_REGEXPS = {
4736
6221
  word: /\S+\s+/m,
@@ -4739,7 +6224,7 @@ var CHUNKING_REGEXPS = {
4739
6224
  function smoothStream({
4740
6225
  delayInMs = 10,
4741
6226
  chunking = "word",
4742
- _internal: { delay: delay2 = import_provider_utils12.delay } = {}
6227
+ _internal: { delay: delay2 = import_provider_utils17.delay } = {}
4743
6228
  } = {}) {
4744
6229
  let detectChunk;
4745
6230
  if (typeof chunking === "function") {
@@ -4800,8 +6285,7 @@ function smoothStream({
4800
6285
 
4801
6286
  // core/generate-text/stream-text.ts
4802
6287
  var import_provider22 = require("@ai-sdk/provider");
4803
- var import_provider_utils13 = require("@ai-sdk/provider-utils");
4804
- var import_ui_utils8 = require("@ai-sdk/ui-utils");
6288
+ var import_provider_utils18 = require("@ai-sdk/provider-utils");
4805
6289
 
4806
6290
  // util/as-array.ts
4807
6291
  function asArray(value) {
@@ -4916,7 +6400,6 @@ function mergeStreams(stream1, stream2) {
4916
6400
  }
4917
6401
 
4918
6402
  // core/generate-text/run-tools-transformation.ts
4919
- var import_ui_utils7 = require("@ai-sdk/ui-utils");
4920
6403
  function runToolsTransformation({
4921
6404
  tools,
4922
6405
  generatorStream,
@@ -5000,7 +6483,7 @@ function runToolsTransformation({
5000
6483
  controller.enqueue(toolCall);
5001
6484
  const tool2 = tools[toolCall.toolName];
5002
6485
  if (tool2.execute != null) {
5003
- const toolExecutionId = (0, import_ui_utils7.generateId)();
6486
+ const toolExecutionId = (0, import_provider_utils5.generateId)();
5004
6487
  outstandingToolResults.add(toolExecutionId);
5005
6488
  recordSpan({
5006
6489
  name: "ai.toolCall",
@@ -5075,7 +6558,7 @@ function runToolsTransformation({
5075
6558
  type: "finish",
5076
6559
  finishReason: chunk.finishReason,
5077
6560
  logprobs: chunk.logprobs,
5078
- usage: calculateLanguageModelUsage(chunk.usage),
6561
+ usage: calculateLanguageModelUsage2(chunk.usage),
5079
6562
  experimental_providerMetadata: chunk.providerMetadata
5080
6563
  };
5081
6564
  break;
@@ -5119,11 +6602,11 @@ function runToolsTransformation({
5119
6602
  }
5120
6603
 
5121
6604
  // core/generate-text/stream-text.ts
5122
- var originalGenerateId4 = (0, import_provider_utils13.createIdGenerator)({
6605
+ var originalGenerateId4 = (0, import_provider_utils18.createIdGenerator)({
5123
6606
  prefix: "aitxt",
5124
6607
  size: 24
5125
6608
  });
5126
- var originalGenerateMessageId2 = (0, import_provider_utils13.createIdGenerator)({
6609
+ var originalGenerateMessageId2 = (0, import_provider_utils18.createIdGenerator)({
5127
6610
  prefix: "msg",
5128
6611
  size: 24
5129
6612
  });
@@ -5586,7 +7069,7 @@ var DefaultStreamTextResult = class {
5586
7069
  ...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
5587
7070
  };
5588
7071
  const {
5589
- result: { stream: stream2, warnings, rawResponse, request },
7072
+ result: { stream: stream2, warnings, response, request },
5590
7073
  doStreamSpan,
5591
7074
  startTimestampMs
5592
7075
  } = await retry(
@@ -5897,7 +7380,7 @@ var DefaultStreamTextResult = class {
5897
7380
  request: stepRequest,
5898
7381
  response: {
5899
7382
  ...stepResponse,
5900
- headers: rawResponse == null ? void 0 : rawResponse.headers
7383
+ headers: response == null ? void 0 : response.headers
5901
7384
  },
5902
7385
  warnings,
5903
7386
  isContinued: nextStepType === "continue",
@@ -5914,7 +7397,7 @@ var DefaultStreamTextResult = class {
5914
7397
  logprobs: stepLogProbs,
5915
7398
  response: {
5916
7399
  ...stepResponse,
5917
- headers: rawResponse == null ? void 0 : rawResponse.headers
7400
+ headers: response == null ? void 0 : response.headers
5918
7401
  }
5919
7402
  });
5920
7403
  self.closeStream();
@@ -6110,13 +7593,13 @@ var DefaultStreamTextResult = class {
6110
7593
  const chunkType = chunk.type;
6111
7594
  switch (chunkType) {
6112
7595
  case "text-delta": {
6113
- controller.enqueue((0, import_ui_utils8.formatDataStreamPart)("text", chunk.textDelta));
7596
+ controller.enqueue(formatDataStreamPart("text", chunk.textDelta));
6114
7597
  break;
6115
7598
  }
6116
7599
  case "reasoning": {
6117
7600
  if (sendReasoning) {
6118
7601
  controller.enqueue(
6119
- (0, import_ui_utils8.formatDataStreamPart)("reasoning", chunk.textDelta)
7602
+ formatDataStreamPart("reasoning", chunk.textDelta)
6120
7603
  );
6121
7604
  }
6122
7605
  break;
@@ -6124,7 +7607,7 @@ var DefaultStreamTextResult = class {
6124
7607
  case "redacted-reasoning": {
6125
7608
  if (sendReasoning) {
6126
7609
  controller.enqueue(
6127
- (0, import_ui_utils8.formatDataStreamPart)("redacted_reasoning", {
7610
+ formatDataStreamPart("redacted_reasoning", {
6128
7611
  data: chunk.data
6129
7612
  })
6130
7613
  );
@@ -6134,7 +7617,7 @@ var DefaultStreamTextResult = class {
6134
7617
  case "reasoning-signature": {
6135
7618
  if (sendReasoning) {
6136
7619
  controller.enqueue(
6137
- (0, import_ui_utils8.formatDataStreamPart)("reasoning_signature", {
7620
+ formatDataStreamPart("reasoning_signature", {
6138
7621
  signature: chunk.signature
6139
7622
  })
6140
7623
  );
@@ -6143,7 +7626,7 @@ var DefaultStreamTextResult = class {
6143
7626
  }
6144
7627
  case "file": {
6145
7628
  controller.enqueue(
6146
- (0, import_ui_utils8.formatDataStreamPart)("file", {
7629
+ formatDataStreamPart("file", {
6147
7630
  mimeType: chunk.mediaType,
6148
7631
  data: chunk.base64
6149
7632
  })
@@ -6153,14 +7636,14 @@ var DefaultStreamTextResult = class {
6153
7636
  case "source": {
6154
7637
  if (sendSources) {
6155
7638
  controller.enqueue(
6156
- (0, import_ui_utils8.formatDataStreamPart)("source", chunk.source)
7639
+ formatDataStreamPart("source", chunk.source)
6157
7640
  );
6158
7641
  }
6159
7642
  break;
6160
7643
  }
6161
7644
  case "tool-call-streaming-start": {
6162
7645
  controller.enqueue(
6163
- (0, import_ui_utils8.formatDataStreamPart)("tool_call_streaming_start", {
7646
+ formatDataStreamPart("tool_call_streaming_start", {
6164
7647
  toolCallId: chunk.toolCallId,
6165
7648
  toolName: chunk.toolName
6166
7649
  })
@@ -6169,7 +7652,7 @@ var DefaultStreamTextResult = class {
6169
7652
  }
6170
7653
  case "tool-call-delta": {
6171
7654
  controller.enqueue(
6172
- (0, import_ui_utils8.formatDataStreamPart)("tool_call_delta", {
7655
+ formatDataStreamPart("tool_call_delta", {
6173
7656
  toolCallId: chunk.toolCallId,
6174
7657
  argsTextDelta: chunk.argsTextDelta
6175
7658
  })
@@ -6178,7 +7661,7 @@ var DefaultStreamTextResult = class {
6178
7661
  }
6179
7662
  case "tool-call": {
6180
7663
  controller.enqueue(
6181
- (0, import_ui_utils8.formatDataStreamPart)("tool_call", {
7664
+ formatDataStreamPart("tool_call", {
6182
7665
  toolCallId: chunk.toolCallId,
6183
7666
  toolName: chunk.toolName,
6184
7667
  args: chunk.args
@@ -6188,7 +7671,7 @@ var DefaultStreamTextResult = class {
6188
7671
  }
6189
7672
  case "tool-result": {
6190
7673
  controller.enqueue(
6191
- (0, import_ui_utils8.formatDataStreamPart)("tool_result", {
7674
+ formatDataStreamPart("tool_result", {
6192
7675
  toolCallId: chunk.toolCallId,
6193
7676
  result: chunk.result
6194
7677
  })
@@ -6197,13 +7680,13 @@ var DefaultStreamTextResult = class {
6197
7680
  }
6198
7681
  case "error": {
6199
7682
  controller.enqueue(
6200
- (0, import_ui_utils8.formatDataStreamPart)("error", getErrorMessage5(chunk.error))
7683
+ formatDataStreamPart("error", getErrorMessage5(chunk.error))
6201
7684
  );
6202
7685
  break;
6203
7686
  }
6204
7687
  case "step-start": {
6205
7688
  controller.enqueue(
6206
- (0, import_ui_utils8.formatDataStreamPart)("start_step", {
7689
+ formatDataStreamPart("start_step", {
6207
7690
  messageId: chunk.messageId
6208
7691
  })
6209
7692
  );
@@ -6211,7 +7694,7 @@ var DefaultStreamTextResult = class {
6211
7694
  }
6212
7695
  case "step-finish": {
6213
7696
  controller.enqueue(
6214
- (0, import_ui_utils8.formatDataStreamPart)("finish_step", {
7697
+ formatDataStreamPart("finish_step", {
6215
7698
  finishReason: chunk.finishReason,
6216
7699
  usage: sendUsage ? {
6217
7700
  promptTokens: chunk.usage.promptTokens,
@@ -6225,7 +7708,7 @@ var DefaultStreamTextResult = class {
6225
7708
  case "finish": {
6226
7709
  if (experimental_sendFinish) {
6227
7710
  controller.enqueue(
6228
- (0, import_ui_utils8.formatDataStreamPart)("finish_message", {
7711
+ formatDataStreamPart("finish_message", {
6229
7712
  finishReason: chunk.finishReason,
6230
7713
  usage: sendUsage ? {
6231
7714
  promptTokens: chunk.usage.promptTokens,
@@ -6653,8 +8136,8 @@ function simulateStreamingMiddleware() {
6653
8136
  });
6654
8137
  return {
6655
8138
  stream: simulatedStream,
6656
- rawCall: result.rawCall,
6657
- rawResponse: result.rawResponse,
8139
+ request: result.request,
8140
+ response: result.response,
6658
8141
  warnings: result.warnings
6659
8142
  };
6660
8143
  }
@@ -6726,7 +8209,6 @@ function appendClientMessage({
6726
8209
  }
6727
8210
 
6728
8211
  // core/prompt/append-response-messages.ts
6729
- var import_ui_utils9 = require("@ai-sdk/ui-utils");
6730
8212
  var import_provider24 = require("@ai-sdk/provider");
6731
8213
  function appendResponseMessages({
6732
8214
  messages,
@@ -6825,7 +8307,7 @@ function appendResponseMessages({
6825
8307
  }
6826
8308
  }
6827
8309
  if (isLastMessageAssistant) {
6828
- const maxStep = (0, import_ui_utils9.extractMaxToolInvocationStep)(
8310
+ const maxStep = extractMaxToolInvocationStep(
6829
8311
  lastMessage.toolInvocations
6830
8312
  );
6831
8313
  (_b = lastMessage.parts) != null ? _b : lastMessage.parts = [];
@@ -7050,16 +8532,13 @@ var DefaultProviderRegistry = class {
7050
8532
  }
7051
8533
  };
7052
8534
 
7053
- // core/tool/mcp/mcp-client.ts
7054
- var import_ui_utils10 = require("@ai-sdk/ui-utils");
7055
-
7056
8535
  // core/tool/tool.ts
7057
8536
  function tool(tool2) {
7058
8537
  return tool2;
7059
8538
  }
7060
8539
 
7061
8540
  // core/tool/mcp/mcp-sse-transport.ts
7062
- var import_provider_utils14 = require("@ai-sdk/provider-utils");
8541
+ var import_provider_utils19 = require("@ai-sdk/provider-utils");
7063
8542
 
7064
8543
  // core/tool/mcp/json-rpc-message.ts
7065
8544
  var import_zod9 = require("zod");
@@ -7230,7 +8709,7 @@ var SseMCPTransport = class {
7230
8709
  (_b = this.onerror) == null ? void 0 : _b.call(this, error);
7231
8710
  return reject(error);
7232
8711
  }
7233
- const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough((0, import_provider_utils14.createEventSourceParserStream)());
8712
+ const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough((0, import_provider_utils19.createEventSourceParserStream)());
7234
8713
  const reader = stream.getReader();
7235
8714
  const processEvents = async () => {
7236
8715
  var _a18, _b2, _c2;
@@ -7544,7 +9023,7 @@ var MCPClient = class {
7544
9023
  if (schemas !== "automatic" && !(name17 in schemas)) {
7545
9024
  continue;
7546
9025
  }
7547
- const parameters = schemas === "automatic" ? (0, import_ui_utils10.jsonSchema)(inputSchema) : schemas[name17].parameters;
9026
+ const parameters = schemas === "automatic" ? jsonSchema(inputSchema) : schemas[name17].parameters;
7548
9027
  const self = this;
7549
9028
  const toolWithExecute = tool({
7550
9029
  description,
@@ -7637,7 +9116,7 @@ function cosineSimilarity(vector1, vector2, options) {
7637
9116
  }
7638
9117
 
7639
9118
  // core/util/simulate-readable-stream.ts
7640
- var import_provider_utils15 = require("@ai-sdk/provider-utils");
9119
+ var import_provider_utils20 = require("@ai-sdk/provider-utils");
7641
9120
  function simulateReadableStream({
7642
9121
  chunks,
7643
9122
  initialDelayInMs = 0,
@@ -7645,7 +9124,7 @@ function simulateReadableStream({
7645
9124
  _internal
7646
9125
  }) {
7647
9126
  var _a17;
7648
- const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : import_provider_utils15.delay;
9127
+ const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : import_provider_utils20.delay;
7649
9128
  let index = 0;
7650
9129
  return new ReadableStream({
7651
9130
  async pull(controller) {
@@ -7666,7 +9145,6 @@ __export(langchain_adapter_exports, {
7666
9145
  toDataStream: () => toDataStream,
7667
9146
  toDataStreamResponse: () => toDataStreamResponse
7668
9147
  });
7669
- var import_ui_utils12 = require("@ai-sdk/ui-utils");
7670
9148
 
7671
9149
  // streams/stream-callbacks.ts
7672
9150
  function createCallbacksTransformer(callbacks = {}) {
@@ -7722,7 +9200,7 @@ function toDataStreamInternal(stream, callbacks) {
7722
9200
  ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
7723
9201
  new TransformStream({
7724
9202
  transform: async (chunk, controller) => {
7725
- controller.enqueue((0, import_ui_utils12.formatDataStreamPart)("text", chunk));
9203
+ controller.enqueue(formatDataStreamPart("text", chunk));
7726
9204
  }
7727
9205
  })
7728
9206
  );
@@ -7773,11 +9251,10 @@ __export(llamaindex_adapter_exports, {
7773
9251
  toDataStream: () => toDataStream2,
7774
9252
  toDataStreamResponse: () => toDataStreamResponse2
7775
9253
  });
7776
- var import_provider_utils17 = require("@ai-sdk/provider-utils");
7777
- var import_ui_utils13 = require("@ai-sdk/ui-utils");
9254
+ var import_provider_utils22 = require("@ai-sdk/provider-utils");
7778
9255
  function toDataStreamInternal2(stream, callbacks) {
7779
9256
  const trimStart = trimStartOfStream();
7780
- return (0, import_provider_utils17.convertAsyncIteratorToReadableStream)(stream[Symbol.asyncIterator]()).pipeThrough(
9257
+ return (0, import_provider_utils22.convertAsyncIteratorToReadableStream)(stream[Symbol.asyncIterator]()).pipeThrough(
7781
9258
  new TransformStream({
7782
9259
  async transform(message, controller) {
7783
9260
  controller.enqueue(trimStart(message.delta));
@@ -7786,7 +9263,7 @@ function toDataStreamInternal2(stream, callbacks) {
7786
9263
  ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
7787
9264
  new TransformStream({
7788
9265
  transform: async (chunk, controller) => {
7789
- controller.enqueue((0, import_ui_utils13.formatDataStreamPart)("text", chunk));
9266
+ controller.enqueue(formatDataStreamPart("text", chunk));
7790
9267
  }
7791
9268
  })
7792
9269
  );
@@ -7827,9 +9304,6 @@ function trimStartOfStream() {
7827
9304
  };
7828
9305
  }
7829
9306
 
7830
- // streams/stream-data.ts
7831
- var import_ui_utils14 = require("@ai-sdk/ui-utils");
7832
-
7833
9307
  // util/constants.ts
7834
9308
  var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
7835
9309
 
@@ -7880,7 +9354,7 @@ var StreamData = class {
7880
9354
  throw new Error("Stream controller is not initialized.");
7881
9355
  }
7882
9356
  this.controller.enqueue(
7883
- this.encoder.encode((0, import_ui_utils14.formatDataStreamPart)("data", [value]))
9357
+ this.encoder.encode(formatDataStreamPart("data", [value]))
7884
9358
  );
7885
9359
  }
7886
9360
  appendMessageAnnotation(value) {
@@ -7891,7 +9365,7 @@ var StreamData = class {
7891
9365
  throw new Error("Stream controller is not initialized.");
7892
9366
  }
7893
9367
  this.controller.enqueue(
7894
- this.encoder.encode((0, import_ui_utils14.formatDataStreamPart)("message_annotations", [value]))
9368
+ this.encoder.encode(formatDataStreamPart("message_annotations", [value]))
7895
9369
  );
7896
9370
  }
7897
9371
  };
@@ -7930,6 +9404,9 @@ var StreamData = class {
7930
9404
  UnsupportedFunctionalityError,
7931
9405
  appendClientMessage,
7932
9406
  appendResponseMessages,
9407
+ asSchema,
9408
+ callChatApi,
9409
+ callCompletionApi,
7933
9410
  convertToCoreMessages,
7934
9411
  coreAssistantMessageSchema,
7935
9412
  coreMessageSchema,
@@ -7951,22 +9428,32 @@ var StreamData = class {
7951
9428
  experimental_generateImage,
7952
9429
  experimental_transcribe,
7953
9430
  experimental_wrapLanguageModel,
9431
+ extractMaxToolInvocationStep,
7954
9432
  extractReasoningMiddleware,
9433
+ fillMessageParts,
7955
9434
  formatDataStreamPart,
7956
9435
  generateId,
7957
9436
  generateObject,
7958
9437
  generateText,
9438
+ getMessageParts,
9439
+ getTextFromDataUrl,
9440
+ isAssistantMessageWithCompletedToolCalls,
9441
+ isDeepEqualData,
7959
9442
  jsonSchema,
7960
9443
  parseDataStreamPart,
9444
+ parsePartialJson,
7961
9445
  pipeDataStreamToResponse,
9446
+ prepareAttachmentsForRequest,
7962
9447
  processDataStream,
7963
9448
  processTextStream,
9449
+ shouldResubmitMessages,
7964
9450
  simulateReadableStream,
7965
9451
  simulateStreamingMiddleware,
7966
9452
  smoothStream,
7967
9453
  streamObject,
7968
9454
  streamText,
7969
9455
  tool,
9456
+ updateToolCallResult,
7970
9457
  wrapLanguageModel,
7971
9458
  zodSchema
7972
9459
  });