ai 5.0.0-canary.4 → 5.0.0-canary.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/dist/index.d.mts +859 -143
- package/dist/index.d.ts +859 -143
- package/dist/index.js +1648 -161
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1567 -118
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +205 -15
- package/dist/internal/index.d.ts +205 -15
- package/dist/internal/index.js +63 -9
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +46 -2
- package/dist/internal/index.mjs.map +1 -1
- package/mcp-stdio/dist/index.d.mts +6 -6
- package/mcp-stdio/dist/index.d.ts +6 -6
- package/package.json +7 -5
package/dist/index.mjs
CHANGED
@@ -6,17 +6,1480 @@ var __export = (target, all) => {
|
|
6
6
|
|
7
7
|
// core/index.ts
|
8
8
|
import { createIdGenerator as createIdGenerator5, generateId as generateId2 } from "@ai-sdk/provider-utils";
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
9
|
+
|
10
|
+
// core/util/index.ts
|
11
|
+
import { generateId } from "@ai-sdk/provider-utils";
|
12
|
+
|
13
|
+
// core/util/process-chat-response.ts
|
14
|
+
import { generateId as generateIdFunction } from "@ai-sdk/provider-utils";
|
15
|
+
|
16
|
+
// core/types/duplicated/usage.ts
|
17
|
+
function calculateLanguageModelUsage({
|
18
|
+
promptTokens,
|
19
|
+
completionTokens
|
20
|
+
}) {
|
21
|
+
return {
|
22
|
+
promptTokens,
|
23
|
+
completionTokens,
|
24
|
+
totalTokens: promptTokens + completionTokens
|
25
|
+
};
|
26
|
+
}
|
27
|
+
|
28
|
+
// core/util/parse-partial-json.ts
|
29
|
+
import { safeParseJSON } from "@ai-sdk/provider-utils";
|
30
|
+
|
31
|
+
// core/util/fix-json.ts
|
32
|
+
function fixJson(input) {
|
33
|
+
const stack = ["ROOT"];
|
34
|
+
let lastValidIndex = -1;
|
35
|
+
let literalStart = null;
|
36
|
+
function processValueStart(char, i, swapState) {
|
37
|
+
{
|
38
|
+
switch (char) {
|
39
|
+
case '"': {
|
40
|
+
lastValidIndex = i;
|
41
|
+
stack.pop();
|
42
|
+
stack.push(swapState);
|
43
|
+
stack.push("INSIDE_STRING");
|
44
|
+
break;
|
45
|
+
}
|
46
|
+
case "f":
|
47
|
+
case "t":
|
48
|
+
case "n": {
|
49
|
+
lastValidIndex = i;
|
50
|
+
literalStart = i;
|
51
|
+
stack.pop();
|
52
|
+
stack.push(swapState);
|
53
|
+
stack.push("INSIDE_LITERAL");
|
54
|
+
break;
|
55
|
+
}
|
56
|
+
case "-": {
|
57
|
+
stack.pop();
|
58
|
+
stack.push(swapState);
|
59
|
+
stack.push("INSIDE_NUMBER");
|
60
|
+
break;
|
61
|
+
}
|
62
|
+
case "0":
|
63
|
+
case "1":
|
64
|
+
case "2":
|
65
|
+
case "3":
|
66
|
+
case "4":
|
67
|
+
case "5":
|
68
|
+
case "6":
|
69
|
+
case "7":
|
70
|
+
case "8":
|
71
|
+
case "9": {
|
72
|
+
lastValidIndex = i;
|
73
|
+
stack.pop();
|
74
|
+
stack.push(swapState);
|
75
|
+
stack.push("INSIDE_NUMBER");
|
76
|
+
break;
|
77
|
+
}
|
78
|
+
case "{": {
|
79
|
+
lastValidIndex = i;
|
80
|
+
stack.pop();
|
81
|
+
stack.push(swapState);
|
82
|
+
stack.push("INSIDE_OBJECT_START");
|
83
|
+
break;
|
84
|
+
}
|
85
|
+
case "[": {
|
86
|
+
lastValidIndex = i;
|
87
|
+
stack.pop();
|
88
|
+
stack.push(swapState);
|
89
|
+
stack.push("INSIDE_ARRAY_START");
|
90
|
+
break;
|
91
|
+
}
|
92
|
+
}
|
93
|
+
}
|
94
|
+
}
|
95
|
+
function processAfterObjectValue(char, i) {
|
96
|
+
switch (char) {
|
97
|
+
case ",": {
|
98
|
+
stack.pop();
|
99
|
+
stack.push("INSIDE_OBJECT_AFTER_COMMA");
|
100
|
+
break;
|
101
|
+
}
|
102
|
+
case "}": {
|
103
|
+
lastValidIndex = i;
|
104
|
+
stack.pop();
|
105
|
+
break;
|
106
|
+
}
|
107
|
+
}
|
108
|
+
}
|
109
|
+
function processAfterArrayValue(char, i) {
|
110
|
+
switch (char) {
|
111
|
+
case ",": {
|
112
|
+
stack.pop();
|
113
|
+
stack.push("INSIDE_ARRAY_AFTER_COMMA");
|
114
|
+
break;
|
115
|
+
}
|
116
|
+
case "]": {
|
117
|
+
lastValidIndex = i;
|
118
|
+
stack.pop();
|
119
|
+
break;
|
120
|
+
}
|
121
|
+
}
|
122
|
+
}
|
123
|
+
for (let i = 0; i < input.length; i++) {
|
124
|
+
const char = input[i];
|
125
|
+
const currentState = stack[stack.length - 1];
|
126
|
+
switch (currentState) {
|
127
|
+
case "ROOT":
|
128
|
+
processValueStart(char, i, "FINISH");
|
129
|
+
break;
|
130
|
+
case "INSIDE_OBJECT_START": {
|
131
|
+
switch (char) {
|
132
|
+
case '"': {
|
133
|
+
stack.pop();
|
134
|
+
stack.push("INSIDE_OBJECT_KEY");
|
135
|
+
break;
|
136
|
+
}
|
137
|
+
case "}": {
|
138
|
+
lastValidIndex = i;
|
139
|
+
stack.pop();
|
140
|
+
break;
|
141
|
+
}
|
142
|
+
}
|
143
|
+
break;
|
144
|
+
}
|
145
|
+
case "INSIDE_OBJECT_AFTER_COMMA": {
|
146
|
+
switch (char) {
|
147
|
+
case '"': {
|
148
|
+
stack.pop();
|
149
|
+
stack.push("INSIDE_OBJECT_KEY");
|
150
|
+
break;
|
151
|
+
}
|
152
|
+
}
|
153
|
+
break;
|
154
|
+
}
|
155
|
+
case "INSIDE_OBJECT_KEY": {
|
156
|
+
switch (char) {
|
157
|
+
case '"': {
|
158
|
+
stack.pop();
|
159
|
+
stack.push("INSIDE_OBJECT_AFTER_KEY");
|
160
|
+
break;
|
161
|
+
}
|
162
|
+
}
|
163
|
+
break;
|
164
|
+
}
|
165
|
+
case "INSIDE_OBJECT_AFTER_KEY": {
|
166
|
+
switch (char) {
|
167
|
+
case ":": {
|
168
|
+
stack.pop();
|
169
|
+
stack.push("INSIDE_OBJECT_BEFORE_VALUE");
|
170
|
+
break;
|
171
|
+
}
|
172
|
+
}
|
173
|
+
break;
|
174
|
+
}
|
175
|
+
case "INSIDE_OBJECT_BEFORE_VALUE": {
|
176
|
+
processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
|
177
|
+
break;
|
178
|
+
}
|
179
|
+
case "INSIDE_OBJECT_AFTER_VALUE": {
|
180
|
+
processAfterObjectValue(char, i);
|
181
|
+
break;
|
182
|
+
}
|
183
|
+
case "INSIDE_STRING": {
|
184
|
+
switch (char) {
|
185
|
+
case '"': {
|
186
|
+
stack.pop();
|
187
|
+
lastValidIndex = i;
|
188
|
+
break;
|
189
|
+
}
|
190
|
+
case "\\": {
|
191
|
+
stack.push("INSIDE_STRING_ESCAPE");
|
192
|
+
break;
|
193
|
+
}
|
194
|
+
default: {
|
195
|
+
lastValidIndex = i;
|
196
|
+
}
|
197
|
+
}
|
198
|
+
break;
|
199
|
+
}
|
200
|
+
case "INSIDE_ARRAY_START": {
|
201
|
+
switch (char) {
|
202
|
+
case "]": {
|
203
|
+
lastValidIndex = i;
|
204
|
+
stack.pop();
|
205
|
+
break;
|
206
|
+
}
|
207
|
+
default: {
|
208
|
+
lastValidIndex = i;
|
209
|
+
processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
|
210
|
+
break;
|
211
|
+
}
|
212
|
+
}
|
213
|
+
break;
|
214
|
+
}
|
215
|
+
case "INSIDE_ARRAY_AFTER_VALUE": {
|
216
|
+
switch (char) {
|
217
|
+
case ",": {
|
218
|
+
stack.pop();
|
219
|
+
stack.push("INSIDE_ARRAY_AFTER_COMMA");
|
220
|
+
break;
|
221
|
+
}
|
222
|
+
case "]": {
|
223
|
+
lastValidIndex = i;
|
224
|
+
stack.pop();
|
225
|
+
break;
|
226
|
+
}
|
227
|
+
default: {
|
228
|
+
lastValidIndex = i;
|
229
|
+
break;
|
230
|
+
}
|
231
|
+
}
|
232
|
+
break;
|
233
|
+
}
|
234
|
+
case "INSIDE_ARRAY_AFTER_COMMA": {
|
235
|
+
processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
|
236
|
+
break;
|
237
|
+
}
|
238
|
+
case "INSIDE_STRING_ESCAPE": {
|
239
|
+
stack.pop();
|
240
|
+
lastValidIndex = i;
|
241
|
+
break;
|
242
|
+
}
|
243
|
+
case "INSIDE_NUMBER": {
|
244
|
+
switch (char) {
|
245
|
+
case "0":
|
246
|
+
case "1":
|
247
|
+
case "2":
|
248
|
+
case "3":
|
249
|
+
case "4":
|
250
|
+
case "5":
|
251
|
+
case "6":
|
252
|
+
case "7":
|
253
|
+
case "8":
|
254
|
+
case "9": {
|
255
|
+
lastValidIndex = i;
|
256
|
+
break;
|
257
|
+
}
|
258
|
+
case "e":
|
259
|
+
case "E":
|
260
|
+
case "-":
|
261
|
+
case ".": {
|
262
|
+
break;
|
263
|
+
}
|
264
|
+
case ",": {
|
265
|
+
stack.pop();
|
266
|
+
if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
|
267
|
+
processAfterArrayValue(char, i);
|
268
|
+
}
|
269
|
+
if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
|
270
|
+
processAfterObjectValue(char, i);
|
271
|
+
}
|
272
|
+
break;
|
273
|
+
}
|
274
|
+
case "}": {
|
275
|
+
stack.pop();
|
276
|
+
if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
|
277
|
+
processAfterObjectValue(char, i);
|
278
|
+
}
|
279
|
+
break;
|
280
|
+
}
|
281
|
+
case "]": {
|
282
|
+
stack.pop();
|
283
|
+
if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
|
284
|
+
processAfterArrayValue(char, i);
|
285
|
+
}
|
286
|
+
break;
|
287
|
+
}
|
288
|
+
default: {
|
289
|
+
stack.pop();
|
290
|
+
break;
|
291
|
+
}
|
292
|
+
}
|
293
|
+
break;
|
294
|
+
}
|
295
|
+
case "INSIDE_LITERAL": {
|
296
|
+
const partialLiteral = input.substring(literalStart, i + 1);
|
297
|
+
if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
|
298
|
+
stack.pop();
|
299
|
+
if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
|
300
|
+
processAfterObjectValue(char, i);
|
301
|
+
} else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
|
302
|
+
processAfterArrayValue(char, i);
|
303
|
+
}
|
304
|
+
} else {
|
305
|
+
lastValidIndex = i;
|
306
|
+
}
|
307
|
+
break;
|
308
|
+
}
|
309
|
+
}
|
310
|
+
}
|
311
|
+
let result = input.slice(0, lastValidIndex + 1);
|
312
|
+
for (let i = stack.length - 1; i >= 0; i--) {
|
313
|
+
const state = stack[i];
|
314
|
+
switch (state) {
|
315
|
+
case "INSIDE_STRING": {
|
316
|
+
result += '"';
|
317
|
+
break;
|
318
|
+
}
|
319
|
+
case "INSIDE_OBJECT_KEY":
|
320
|
+
case "INSIDE_OBJECT_AFTER_KEY":
|
321
|
+
case "INSIDE_OBJECT_AFTER_COMMA":
|
322
|
+
case "INSIDE_OBJECT_START":
|
323
|
+
case "INSIDE_OBJECT_BEFORE_VALUE":
|
324
|
+
case "INSIDE_OBJECT_AFTER_VALUE": {
|
325
|
+
result += "}";
|
326
|
+
break;
|
327
|
+
}
|
328
|
+
case "INSIDE_ARRAY_START":
|
329
|
+
case "INSIDE_ARRAY_AFTER_COMMA":
|
330
|
+
case "INSIDE_ARRAY_AFTER_VALUE": {
|
331
|
+
result += "]";
|
332
|
+
break;
|
333
|
+
}
|
334
|
+
case "INSIDE_LITERAL": {
|
335
|
+
const partialLiteral = input.substring(literalStart, input.length);
|
336
|
+
if ("true".startsWith(partialLiteral)) {
|
337
|
+
result += "true".slice(partialLiteral.length);
|
338
|
+
} else if ("false".startsWith(partialLiteral)) {
|
339
|
+
result += "false".slice(partialLiteral.length);
|
340
|
+
} else if ("null".startsWith(partialLiteral)) {
|
341
|
+
result += "null".slice(partialLiteral.length);
|
342
|
+
}
|
343
|
+
}
|
344
|
+
}
|
345
|
+
}
|
346
|
+
return result;
|
347
|
+
}
|
348
|
+
|
349
|
+
// core/util/parse-partial-json.ts
|
350
|
+
function parsePartialJson(jsonText) {
|
351
|
+
if (jsonText === void 0) {
|
352
|
+
return { value: void 0, state: "undefined-input" };
|
353
|
+
}
|
354
|
+
let result = safeParseJSON({ text: jsonText });
|
355
|
+
if (result.success) {
|
356
|
+
return { value: result.value, state: "successful-parse" };
|
357
|
+
}
|
358
|
+
result = safeParseJSON({ text: fixJson(jsonText) });
|
359
|
+
if (result.success) {
|
360
|
+
return { value: result.value, state: "repaired-parse" };
|
361
|
+
}
|
362
|
+
return { value: void 0, state: "failed-parse" };
|
363
|
+
}
|
364
|
+
|
365
|
+
// core/util/data-stream-parts.ts
|
366
|
+
var textStreamPart = {
|
367
|
+
code: "0",
|
368
|
+
name: "text",
|
369
|
+
parse: (value) => {
|
370
|
+
if (typeof value !== "string") {
|
371
|
+
throw new Error('"text" parts expect a string value.');
|
372
|
+
}
|
373
|
+
return { type: "text", value };
|
374
|
+
}
|
375
|
+
};
|
376
|
+
var dataStreamPart = {
|
377
|
+
code: "2",
|
378
|
+
name: "data",
|
379
|
+
parse: (value) => {
|
380
|
+
if (!Array.isArray(value)) {
|
381
|
+
throw new Error('"data" parts expect an array value.');
|
382
|
+
}
|
383
|
+
return { type: "data", value };
|
384
|
+
}
|
385
|
+
};
|
386
|
+
var errorStreamPart = {
|
387
|
+
code: "3",
|
388
|
+
name: "error",
|
389
|
+
parse: (value) => {
|
390
|
+
if (typeof value !== "string") {
|
391
|
+
throw new Error('"error" parts expect a string value.');
|
392
|
+
}
|
393
|
+
return { type: "error", value };
|
394
|
+
}
|
395
|
+
};
|
396
|
+
var messageAnnotationsStreamPart = {
|
397
|
+
code: "8",
|
398
|
+
name: "message_annotations",
|
399
|
+
parse: (value) => {
|
400
|
+
if (!Array.isArray(value)) {
|
401
|
+
throw new Error('"message_annotations" parts expect an array value.');
|
402
|
+
}
|
403
|
+
return { type: "message_annotations", value };
|
404
|
+
}
|
405
|
+
};
|
406
|
+
var toolCallStreamPart = {
|
407
|
+
code: "9",
|
408
|
+
name: "tool_call",
|
409
|
+
parse: (value) => {
|
410
|
+
if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string" || !("args" in value) || typeof value.args !== "object") {
|
411
|
+
throw new Error(
|
412
|
+
'"tool_call" parts expect an object with a "toolCallId", "toolName", and "args" property.'
|
413
|
+
);
|
414
|
+
}
|
415
|
+
return {
|
416
|
+
type: "tool_call",
|
417
|
+
value
|
418
|
+
};
|
419
|
+
}
|
420
|
+
};
|
421
|
+
var toolResultStreamPart = {
|
422
|
+
code: "a",
|
423
|
+
name: "tool_result",
|
424
|
+
parse: (value) => {
|
425
|
+
if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("result" in value)) {
|
426
|
+
throw new Error(
|
427
|
+
'"tool_result" parts expect an object with a "toolCallId" and a "result" property.'
|
428
|
+
);
|
429
|
+
}
|
430
|
+
return {
|
431
|
+
type: "tool_result",
|
432
|
+
value
|
433
|
+
};
|
434
|
+
}
|
435
|
+
};
|
436
|
+
var toolCallStreamingStartStreamPart = {
|
437
|
+
code: "b",
|
438
|
+
name: "tool_call_streaming_start",
|
439
|
+
parse: (value) => {
|
440
|
+
if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string") {
|
441
|
+
throw new Error(
|
442
|
+
'"tool_call_streaming_start" parts expect an object with a "toolCallId" and "toolName" property.'
|
443
|
+
);
|
444
|
+
}
|
445
|
+
return {
|
446
|
+
type: "tool_call_streaming_start",
|
447
|
+
value
|
448
|
+
};
|
449
|
+
}
|
450
|
+
};
|
451
|
+
var toolCallDeltaStreamPart = {
|
452
|
+
code: "c",
|
453
|
+
name: "tool_call_delta",
|
454
|
+
parse: (value) => {
|
455
|
+
if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("argsTextDelta" in value) || typeof value.argsTextDelta !== "string") {
|
456
|
+
throw new Error(
|
457
|
+
'"tool_call_delta" parts expect an object with a "toolCallId" and "argsTextDelta" property.'
|
458
|
+
);
|
459
|
+
}
|
460
|
+
return {
|
461
|
+
type: "tool_call_delta",
|
462
|
+
value
|
463
|
+
};
|
464
|
+
}
|
465
|
+
};
|
466
|
+
var finishMessageStreamPart = {
|
467
|
+
code: "d",
|
468
|
+
name: "finish_message",
|
469
|
+
parse: (value) => {
|
470
|
+
if (value == null || typeof value !== "object" || !("finishReason" in value) || typeof value.finishReason !== "string") {
|
471
|
+
throw new Error(
|
472
|
+
'"finish_message" parts expect an object with a "finishReason" property.'
|
473
|
+
);
|
474
|
+
}
|
475
|
+
const result = {
|
476
|
+
finishReason: value.finishReason
|
477
|
+
};
|
478
|
+
if ("usage" in value && value.usage != null && typeof value.usage === "object" && "promptTokens" in value.usage && "completionTokens" in value.usage) {
|
479
|
+
result.usage = {
|
480
|
+
promptTokens: typeof value.usage.promptTokens === "number" ? value.usage.promptTokens : Number.NaN,
|
481
|
+
completionTokens: typeof value.usage.completionTokens === "number" ? value.usage.completionTokens : Number.NaN
|
482
|
+
};
|
483
|
+
}
|
484
|
+
return {
|
485
|
+
type: "finish_message",
|
486
|
+
value: result
|
487
|
+
};
|
488
|
+
}
|
489
|
+
};
|
490
|
+
var finishStepStreamPart = {
|
491
|
+
code: "e",
|
492
|
+
name: "finish_step",
|
493
|
+
parse: (value) => {
|
494
|
+
if (value == null || typeof value !== "object" || !("finishReason" in value) || typeof value.finishReason !== "string") {
|
495
|
+
throw new Error(
|
496
|
+
'"finish_step" parts expect an object with a "finishReason" property.'
|
497
|
+
);
|
498
|
+
}
|
499
|
+
const result = {
|
500
|
+
finishReason: value.finishReason,
|
501
|
+
isContinued: false
|
502
|
+
};
|
503
|
+
if ("usage" in value && value.usage != null && typeof value.usage === "object" && "promptTokens" in value.usage && "completionTokens" in value.usage) {
|
504
|
+
result.usage = {
|
505
|
+
promptTokens: typeof value.usage.promptTokens === "number" ? value.usage.promptTokens : Number.NaN,
|
506
|
+
completionTokens: typeof value.usage.completionTokens === "number" ? value.usage.completionTokens : Number.NaN
|
507
|
+
};
|
508
|
+
}
|
509
|
+
if ("isContinued" in value && typeof value.isContinued === "boolean") {
|
510
|
+
result.isContinued = value.isContinued;
|
511
|
+
}
|
512
|
+
return {
|
513
|
+
type: "finish_step",
|
514
|
+
value: result
|
515
|
+
};
|
516
|
+
}
|
517
|
+
};
|
518
|
+
var startStepStreamPart = {
|
519
|
+
code: "f",
|
520
|
+
name: "start_step",
|
521
|
+
parse: (value) => {
|
522
|
+
if (value == null || typeof value !== "object" || !("messageId" in value) || typeof value.messageId !== "string") {
|
523
|
+
throw new Error(
|
524
|
+
'"start_step" parts expect an object with an "id" property.'
|
525
|
+
);
|
526
|
+
}
|
527
|
+
return {
|
528
|
+
type: "start_step",
|
529
|
+
value: {
|
530
|
+
messageId: value.messageId
|
531
|
+
}
|
532
|
+
};
|
533
|
+
}
|
534
|
+
};
|
535
|
+
var reasoningStreamPart = {
|
536
|
+
code: "g",
|
537
|
+
name: "reasoning",
|
538
|
+
parse: (value) => {
|
539
|
+
if (typeof value !== "string") {
|
540
|
+
throw new Error('"reasoning" parts expect a string value.');
|
541
|
+
}
|
542
|
+
return { type: "reasoning", value };
|
543
|
+
}
|
544
|
+
};
|
545
|
+
var sourcePart = {
|
546
|
+
code: "h",
|
547
|
+
name: "source",
|
548
|
+
parse: (value) => {
|
549
|
+
if (value == null || typeof value !== "object") {
|
550
|
+
throw new Error('"source" parts expect a Source object.');
|
551
|
+
}
|
552
|
+
return {
|
553
|
+
type: "source",
|
554
|
+
value
|
555
|
+
};
|
556
|
+
}
|
557
|
+
};
|
558
|
+
var redactedReasoningStreamPart = {
|
559
|
+
code: "i",
|
560
|
+
name: "redacted_reasoning",
|
561
|
+
parse: (value) => {
|
562
|
+
if (value == null || typeof value !== "object" || !("data" in value) || typeof value.data !== "string") {
|
563
|
+
throw new Error(
|
564
|
+
'"redacted_reasoning" parts expect an object with a "data" property.'
|
565
|
+
);
|
566
|
+
}
|
567
|
+
return { type: "redacted_reasoning", value: { data: value.data } };
|
568
|
+
}
|
569
|
+
};
|
570
|
+
var reasoningSignatureStreamPart = {
|
571
|
+
code: "j",
|
572
|
+
name: "reasoning_signature",
|
573
|
+
parse: (value) => {
|
574
|
+
if (value == null || typeof value !== "object" || !("signature" in value) || typeof value.signature !== "string") {
|
575
|
+
throw new Error(
|
576
|
+
'"reasoning_signature" parts expect an object with a "signature" property.'
|
577
|
+
);
|
578
|
+
}
|
579
|
+
return {
|
580
|
+
type: "reasoning_signature",
|
581
|
+
value: { signature: value.signature }
|
582
|
+
};
|
583
|
+
}
|
584
|
+
};
|
585
|
+
var fileStreamPart = {
|
586
|
+
code: "k",
|
587
|
+
name: "file",
|
588
|
+
parse: (value) => {
|
589
|
+
if (value == null || typeof value !== "object" || !("data" in value) || typeof value.data !== "string" || !("mimeType" in value) || typeof value.mimeType !== "string") {
|
590
|
+
throw new Error(
|
591
|
+
'"file" parts expect an object with a "data" and "mimeType" property.'
|
592
|
+
);
|
593
|
+
}
|
594
|
+
return { type: "file", value };
|
595
|
+
}
|
596
|
+
};
|
597
|
+
var dataStreamParts = [
|
598
|
+
textStreamPart,
|
599
|
+
dataStreamPart,
|
600
|
+
errorStreamPart,
|
601
|
+
messageAnnotationsStreamPart,
|
602
|
+
toolCallStreamPart,
|
603
|
+
toolResultStreamPart,
|
604
|
+
toolCallStreamingStartStreamPart,
|
605
|
+
toolCallDeltaStreamPart,
|
606
|
+
finishMessageStreamPart,
|
607
|
+
finishStepStreamPart,
|
608
|
+
startStepStreamPart,
|
609
|
+
reasoningStreamPart,
|
610
|
+
sourcePart,
|
611
|
+
redactedReasoningStreamPart,
|
612
|
+
reasoningSignatureStreamPart,
|
613
|
+
fileStreamPart
|
614
|
+
];
|
615
|
+
var dataStreamPartsByCode = Object.fromEntries(
|
616
|
+
dataStreamParts.map((part) => [part.code, part])
|
617
|
+
);
|
618
|
+
var DataStreamStringPrefixes = Object.fromEntries(
|
619
|
+
dataStreamParts.map((part) => [part.name, part.code])
|
620
|
+
);
|
621
|
+
var validCodes = dataStreamParts.map((part) => part.code);
|
622
|
+
var parseDataStreamPart = (line) => {
|
623
|
+
const firstSeparatorIndex = line.indexOf(":");
|
624
|
+
if (firstSeparatorIndex === -1) {
|
625
|
+
throw new Error("Failed to parse stream string. No separator found.");
|
626
|
+
}
|
627
|
+
const prefix = line.slice(0, firstSeparatorIndex);
|
628
|
+
if (!validCodes.includes(prefix)) {
|
629
|
+
throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`);
|
630
|
+
}
|
631
|
+
const code = prefix;
|
632
|
+
const textValue = line.slice(firstSeparatorIndex + 1);
|
633
|
+
const jsonValue = JSON.parse(textValue);
|
634
|
+
return dataStreamPartsByCode[code].parse(jsonValue);
|
635
|
+
};
|
636
|
+
function formatDataStreamPart(type, value) {
|
637
|
+
const streamPart = dataStreamParts.find((part) => part.name === type);
|
638
|
+
if (!streamPart) {
|
639
|
+
throw new Error(`Invalid stream part type: ${type}`);
|
640
|
+
}
|
641
|
+
return `${streamPart.code}:${JSON.stringify(value)}
|
642
|
+
`;
|
643
|
+
}
|
644
|
+
|
645
|
+
// core/util/process-data-stream.ts
|
646
|
+
var NEWLINE = "\n".charCodeAt(0);
|
647
|
+
function concatChunks(chunks, totalLength) {
|
648
|
+
const concatenatedChunks = new Uint8Array(totalLength);
|
649
|
+
let offset = 0;
|
650
|
+
for (const chunk of chunks) {
|
651
|
+
concatenatedChunks.set(chunk, offset);
|
652
|
+
offset += chunk.length;
|
653
|
+
}
|
654
|
+
chunks.length = 0;
|
655
|
+
return concatenatedChunks;
|
656
|
+
}
|
657
|
+
async function processDataStream({
|
658
|
+
stream,
|
659
|
+
onTextPart,
|
660
|
+
onReasoningPart,
|
661
|
+
onReasoningSignaturePart,
|
662
|
+
onRedactedReasoningPart,
|
663
|
+
onSourcePart,
|
664
|
+
onFilePart,
|
665
|
+
onDataPart,
|
666
|
+
onErrorPart,
|
667
|
+
onToolCallStreamingStartPart,
|
668
|
+
onToolCallDeltaPart,
|
669
|
+
onToolCallPart,
|
670
|
+
onToolResultPart,
|
671
|
+
onMessageAnnotationsPart,
|
672
|
+
onFinishMessagePart,
|
673
|
+
onFinishStepPart,
|
674
|
+
onStartStepPart
|
675
|
+
}) {
|
676
|
+
const reader = stream.getReader();
|
677
|
+
const decoder = new TextDecoder();
|
678
|
+
const chunks = [];
|
679
|
+
let totalLength = 0;
|
680
|
+
while (true) {
|
681
|
+
const { value } = await reader.read();
|
682
|
+
if (value) {
|
683
|
+
chunks.push(value);
|
684
|
+
totalLength += value.length;
|
685
|
+
if (value[value.length - 1] !== NEWLINE) {
|
686
|
+
continue;
|
687
|
+
}
|
688
|
+
}
|
689
|
+
if (chunks.length === 0) {
|
690
|
+
break;
|
691
|
+
}
|
692
|
+
const concatenatedChunks = concatChunks(chunks, totalLength);
|
693
|
+
totalLength = 0;
|
694
|
+
const streamParts = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseDataStreamPart);
|
695
|
+
for (const { type, value: value2 } of streamParts) {
|
696
|
+
switch (type) {
|
697
|
+
case "text":
|
698
|
+
await (onTextPart == null ? void 0 : onTextPart(value2));
|
699
|
+
break;
|
700
|
+
case "reasoning":
|
701
|
+
await (onReasoningPart == null ? void 0 : onReasoningPart(value2));
|
702
|
+
break;
|
703
|
+
case "reasoning_signature":
|
704
|
+
await (onReasoningSignaturePart == null ? void 0 : onReasoningSignaturePart(value2));
|
705
|
+
break;
|
706
|
+
case "redacted_reasoning":
|
707
|
+
await (onRedactedReasoningPart == null ? void 0 : onRedactedReasoningPart(value2));
|
708
|
+
break;
|
709
|
+
case "file":
|
710
|
+
await (onFilePart == null ? void 0 : onFilePart(value2));
|
711
|
+
break;
|
712
|
+
case "source":
|
713
|
+
await (onSourcePart == null ? void 0 : onSourcePart(value2));
|
714
|
+
break;
|
715
|
+
case "data":
|
716
|
+
await (onDataPart == null ? void 0 : onDataPart(value2));
|
717
|
+
break;
|
718
|
+
case "error":
|
719
|
+
await (onErrorPart == null ? void 0 : onErrorPart(value2));
|
720
|
+
break;
|
721
|
+
case "message_annotations":
|
722
|
+
await (onMessageAnnotationsPart == null ? void 0 : onMessageAnnotationsPart(value2));
|
723
|
+
break;
|
724
|
+
case "tool_call_streaming_start":
|
725
|
+
await (onToolCallStreamingStartPart == null ? void 0 : onToolCallStreamingStartPart(value2));
|
726
|
+
break;
|
727
|
+
case "tool_call_delta":
|
728
|
+
await (onToolCallDeltaPart == null ? void 0 : onToolCallDeltaPart(value2));
|
729
|
+
break;
|
730
|
+
case "tool_call":
|
731
|
+
await (onToolCallPart == null ? void 0 : onToolCallPart(value2));
|
732
|
+
break;
|
733
|
+
case "tool_result":
|
734
|
+
await (onToolResultPart == null ? void 0 : onToolResultPart(value2));
|
735
|
+
break;
|
736
|
+
case "finish_message":
|
737
|
+
await (onFinishMessagePart == null ? void 0 : onFinishMessagePart(value2));
|
738
|
+
break;
|
739
|
+
case "finish_step":
|
740
|
+
await (onFinishStepPart == null ? void 0 : onFinishStepPart(value2));
|
741
|
+
break;
|
742
|
+
case "start_step":
|
743
|
+
await (onStartStepPart == null ? void 0 : onStartStepPart(value2));
|
744
|
+
break;
|
745
|
+
default: {
|
746
|
+
const exhaustiveCheck = type;
|
747
|
+
throw new Error(`Unknown stream part type: ${exhaustiveCheck}`);
|
748
|
+
}
|
749
|
+
}
|
750
|
+
}
|
751
|
+
}
|
752
|
+
}
|
753
|
+
|
754
|
+
// core/util/process-chat-response.ts
|
755
|
+
async function processChatResponse({
|
756
|
+
stream,
|
757
|
+
update,
|
758
|
+
onToolCall,
|
759
|
+
onFinish,
|
760
|
+
generateId: generateId3 = generateIdFunction,
|
761
|
+
getCurrentDate = () => /* @__PURE__ */ new Date(),
|
762
|
+
lastMessage
|
763
|
+
}) {
|
764
|
+
var _a17, _b;
|
765
|
+
const replaceLastMessage = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
766
|
+
let step = replaceLastMessage ? 1 + // find max step in existing tool invocations:
|
767
|
+
((_b = (_a17 = lastMessage.toolInvocations) == null ? void 0 : _a17.reduce((max, toolInvocation) => {
|
768
|
+
var _a18;
|
769
|
+
return Math.max(max, (_a18 = toolInvocation.step) != null ? _a18 : 0);
|
770
|
+
}, 0)) != null ? _b : 0) : 0;
|
771
|
+
const message = replaceLastMessage ? structuredClone(lastMessage) : {
|
772
|
+
id: generateId3(),
|
773
|
+
createdAt: getCurrentDate(),
|
774
|
+
role: "assistant",
|
775
|
+
content: "",
|
776
|
+
parts: []
|
777
|
+
};
|
778
|
+
let currentTextPart = void 0;
|
779
|
+
let currentReasoningPart = void 0;
|
780
|
+
let currentReasoningTextDetail = void 0;
|
781
|
+
function updateToolInvocationPart(toolCallId, invocation) {
|
782
|
+
const part = message.parts.find(
|
783
|
+
(part2) => part2.type === "tool-invocation" && part2.toolInvocation.toolCallId === toolCallId
|
784
|
+
);
|
785
|
+
if (part != null) {
|
786
|
+
part.toolInvocation = invocation;
|
787
|
+
} else {
|
788
|
+
message.parts.push({
|
789
|
+
type: "tool-invocation",
|
790
|
+
toolInvocation: invocation
|
791
|
+
});
|
792
|
+
}
|
793
|
+
}
|
794
|
+
const data = [];
|
795
|
+
let messageAnnotations = replaceLastMessage ? lastMessage == null ? void 0 : lastMessage.annotations : void 0;
|
796
|
+
const partialToolCalls = {};
|
797
|
+
let usage = {
|
798
|
+
completionTokens: NaN,
|
799
|
+
promptTokens: NaN,
|
800
|
+
totalTokens: NaN
|
801
|
+
};
|
802
|
+
let finishReason = "unknown";
|
803
|
+
function execUpdate() {
|
804
|
+
const copiedData = [...data];
|
805
|
+
if (messageAnnotations == null ? void 0 : messageAnnotations.length) {
|
806
|
+
message.annotations = messageAnnotations;
|
807
|
+
}
|
808
|
+
const copiedMessage = {
|
809
|
+
// deep copy the message to ensure that deep changes (msg attachments) are updated
|
810
|
+
// with SolidJS. SolidJS uses referential integration of sub-objects to detect changes.
|
811
|
+
...structuredClone(message),
|
812
|
+
// add a revision id to ensure that the message is updated with SWR. SWR uses a
|
813
|
+
// hashing approach by default to detect changes, but it only works for shallow
|
814
|
+
// changes. This is why we need to add a revision id to ensure that the message
|
815
|
+
// is updated with SWR (without it, the changes get stuck in SWR and are not
|
816
|
+
// forwarded to rendering):
|
817
|
+
revisionId: generateId3()
|
818
|
+
};
|
819
|
+
update({
|
820
|
+
message: copiedMessage,
|
821
|
+
data: copiedData,
|
822
|
+
replaceLastMessage
|
823
|
+
});
|
824
|
+
}
|
825
|
+
await processDataStream({
|
826
|
+
stream,
|
827
|
+
onTextPart(value) {
|
828
|
+
if (currentTextPart == null) {
|
829
|
+
currentTextPart = {
|
830
|
+
type: "text",
|
831
|
+
text: value
|
832
|
+
};
|
833
|
+
message.parts.push(currentTextPart);
|
834
|
+
} else {
|
835
|
+
currentTextPart.text += value;
|
836
|
+
}
|
837
|
+
message.content += value;
|
838
|
+
execUpdate();
|
839
|
+
},
|
840
|
+
onReasoningPart(value) {
|
841
|
+
var _a18;
|
842
|
+
if (currentReasoningTextDetail == null) {
|
843
|
+
currentReasoningTextDetail = { type: "text", text: value };
|
844
|
+
if (currentReasoningPart != null) {
|
845
|
+
currentReasoningPart.details.push(currentReasoningTextDetail);
|
846
|
+
}
|
847
|
+
} else {
|
848
|
+
currentReasoningTextDetail.text += value;
|
849
|
+
}
|
850
|
+
if (currentReasoningPart == null) {
|
851
|
+
currentReasoningPart = {
|
852
|
+
type: "reasoning",
|
853
|
+
reasoning: value,
|
854
|
+
details: [currentReasoningTextDetail]
|
855
|
+
};
|
856
|
+
message.parts.push(currentReasoningPart);
|
857
|
+
} else {
|
858
|
+
currentReasoningPart.reasoning += value;
|
859
|
+
}
|
860
|
+
message.reasoning = ((_a18 = message.reasoning) != null ? _a18 : "") + value;
|
861
|
+
execUpdate();
|
862
|
+
},
|
863
|
+
onReasoningSignaturePart(value) {
|
864
|
+
if (currentReasoningTextDetail != null) {
|
865
|
+
currentReasoningTextDetail.signature = value.signature;
|
866
|
+
}
|
867
|
+
},
|
868
|
+
onRedactedReasoningPart(value) {
|
869
|
+
if (currentReasoningPart == null) {
|
870
|
+
currentReasoningPart = {
|
871
|
+
type: "reasoning",
|
872
|
+
reasoning: "",
|
873
|
+
details: []
|
874
|
+
};
|
875
|
+
message.parts.push(currentReasoningPart);
|
876
|
+
}
|
877
|
+
currentReasoningPart.details.push({
|
878
|
+
type: "redacted",
|
879
|
+
data: value.data
|
880
|
+
});
|
881
|
+
currentReasoningTextDetail = void 0;
|
882
|
+
execUpdate();
|
883
|
+
},
|
884
|
+
onFilePart(value) {
|
885
|
+
message.parts.push({
|
886
|
+
type: "file",
|
887
|
+
mediaType: value.mimeType,
|
888
|
+
data: value.data
|
889
|
+
});
|
890
|
+
execUpdate();
|
891
|
+
},
|
892
|
+
onSourcePart(value) {
|
893
|
+
message.parts.push({
|
894
|
+
type: "source",
|
895
|
+
source: value
|
896
|
+
});
|
897
|
+
execUpdate();
|
898
|
+
},
|
899
|
+
onToolCallStreamingStartPart(value) {
|
900
|
+
if (message.toolInvocations == null) {
|
901
|
+
message.toolInvocations = [];
|
902
|
+
}
|
903
|
+
partialToolCalls[value.toolCallId] = {
|
904
|
+
text: "",
|
905
|
+
step,
|
906
|
+
toolName: value.toolName,
|
907
|
+
index: message.toolInvocations.length
|
908
|
+
};
|
909
|
+
const invocation = {
|
910
|
+
state: "partial-call",
|
911
|
+
step,
|
912
|
+
toolCallId: value.toolCallId,
|
913
|
+
toolName: value.toolName,
|
914
|
+
args: void 0
|
915
|
+
};
|
916
|
+
message.toolInvocations.push(invocation);
|
917
|
+
updateToolInvocationPart(value.toolCallId, invocation);
|
918
|
+
execUpdate();
|
919
|
+
},
|
920
|
+
onToolCallDeltaPart(value) {
|
921
|
+
const partialToolCall = partialToolCalls[value.toolCallId];
|
922
|
+
partialToolCall.text += value.argsTextDelta;
|
923
|
+
const { value: partialArgs } = parsePartialJson(partialToolCall.text);
|
924
|
+
const invocation = {
|
925
|
+
state: "partial-call",
|
926
|
+
step: partialToolCall.step,
|
927
|
+
toolCallId: value.toolCallId,
|
928
|
+
toolName: partialToolCall.toolName,
|
929
|
+
args: partialArgs
|
930
|
+
};
|
931
|
+
message.toolInvocations[partialToolCall.index] = invocation;
|
932
|
+
updateToolInvocationPart(value.toolCallId, invocation);
|
933
|
+
execUpdate();
|
934
|
+
},
|
935
|
+
async onToolCallPart(value) {
|
936
|
+
const invocation = {
|
937
|
+
state: "call",
|
938
|
+
step,
|
939
|
+
...value
|
940
|
+
};
|
941
|
+
if (partialToolCalls[value.toolCallId] != null) {
|
942
|
+
message.toolInvocations[partialToolCalls[value.toolCallId].index] = invocation;
|
943
|
+
} else {
|
944
|
+
if (message.toolInvocations == null) {
|
945
|
+
message.toolInvocations = [];
|
946
|
+
}
|
947
|
+
message.toolInvocations.push(invocation);
|
948
|
+
}
|
949
|
+
updateToolInvocationPart(value.toolCallId, invocation);
|
950
|
+
execUpdate();
|
951
|
+
if (onToolCall) {
|
952
|
+
const result = await onToolCall({ toolCall: value });
|
953
|
+
if (result != null) {
|
954
|
+
const invocation2 = {
|
955
|
+
state: "result",
|
956
|
+
step,
|
957
|
+
...value,
|
958
|
+
result
|
959
|
+
};
|
960
|
+
message.toolInvocations[message.toolInvocations.length - 1] = invocation2;
|
961
|
+
updateToolInvocationPart(value.toolCallId, invocation2);
|
962
|
+
execUpdate();
|
963
|
+
}
|
964
|
+
}
|
965
|
+
},
|
966
|
+
onToolResultPart(value) {
|
967
|
+
const toolInvocations = message.toolInvocations;
|
968
|
+
if (toolInvocations == null) {
|
969
|
+
throw new Error("tool_result must be preceded by a tool_call");
|
970
|
+
}
|
971
|
+
const toolInvocationIndex = toolInvocations.findIndex(
|
972
|
+
(invocation2) => invocation2.toolCallId === value.toolCallId
|
973
|
+
);
|
974
|
+
if (toolInvocationIndex === -1) {
|
975
|
+
throw new Error(
|
976
|
+
"tool_result must be preceded by a tool_call with the same toolCallId"
|
977
|
+
);
|
978
|
+
}
|
979
|
+
const invocation = {
|
980
|
+
...toolInvocations[toolInvocationIndex],
|
981
|
+
state: "result",
|
982
|
+
...value
|
983
|
+
};
|
984
|
+
toolInvocations[toolInvocationIndex] = invocation;
|
985
|
+
updateToolInvocationPart(value.toolCallId, invocation);
|
986
|
+
execUpdate();
|
987
|
+
},
|
988
|
+
onDataPart(value) {
|
989
|
+
data.push(...value);
|
990
|
+
execUpdate();
|
991
|
+
},
|
992
|
+
onMessageAnnotationsPart(value) {
|
993
|
+
if (messageAnnotations == null) {
|
994
|
+
messageAnnotations = [...value];
|
995
|
+
} else {
|
996
|
+
messageAnnotations.push(...value);
|
997
|
+
}
|
998
|
+
execUpdate();
|
999
|
+
},
|
1000
|
+
onFinishStepPart(value) {
|
1001
|
+
step += 1;
|
1002
|
+
currentTextPart = value.isContinued ? currentTextPart : void 0;
|
1003
|
+
currentReasoningPart = void 0;
|
1004
|
+
currentReasoningTextDetail = void 0;
|
1005
|
+
},
|
1006
|
+
onStartStepPart(value) {
|
1007
|
+
if (!replaceLastMessage) {
|
1008
|
+
message.id = value.messageId;
|
1009
|
+
}
|
1010
|
+
message.parts.push({ type: "step-start" });
|
1011
|
+
execUpdate();
|
1012
|
+
},
|
1013
|
+
onFinishMessagePart(value) {
|
1014
|
+
finishReason = value.finishReason;
|
1015
|
+
if (value.usage != null) {
|
1016
|
+
usage = calculateLanguageModelUsage(value.usage);
|
1017
|
+
}
|
1018
|
+
},
|
1019
|
+
onErrorPart(error) {
|
1020
|
+
throw new Error(error);
|
1021
|
+
}
|
1022
|
+
});
|
1023
|
+
onFinish == null ? void 0 : onFinish({ message, finishReason, usage });
|
1024
|
+
}
|
1025
|
+
|
1026
|
+
// core/util/process-chat-text-response.ts
|
1027
|
+
import { generateId as generateIdFunction2 } from "@ai-sdk/provider-utils";
|
1028
|
+
|
1029
|
+
// core/util/process-text-stream.ts
|
1030
|
+
async function processTextStream({
|
1031
|
+
stream,
|
1032
|
+
onTextPart
|
1033
|
+
}) {
|
1034
|
+
const reader = stream.pipeThrough(new TextDecoderStream()).getReader();
|
1035
|
+
while (true) {
|
1036
|
+
const { done, value } = await reader.read();
|
1037
|
+
if (done) {
|
1038
|
+
break;
|
1039
|
+
}
|
1040
|
+
await onTextPart(value);
|
1041
|
+
}
|
1042
|
+
}
|
1043
|
+
|
1044
|
+
// core/util/process-chat-text-response.ts
|
1045
|
+
async function processChatTextResponse({
|
1046
|
+
stream,
|
1047
|
+
update,
|
1048
|
+
onFinish,
|
1049
|
+
getCurrentDate = () => /* @__PURE__ */ new Date(),
|
1050
|
+
generateId: generateId3 = generateIdFunction2
|
1051
|
+
}) {
|
1052
|
+
const textPart = { type: "text", text: "" };
|
1053
|
+
const resultMessage = {
|
1054
|
+
id: generateId3(),
|
1055
|
+
createdAt: getCurrentDate(),
|
1056
|
+
role: "assistant",
|
1057
|
+
content: "",
|
1058
|
+
parts: [textPart]
|
1059
|
+
};
|
1060
|
+
await processTextStream({
|
1061
|
+
stream,
|
1062
|
+
onTextPart: (chunk) => {
|
1063
|
+
resultMessage.content += chunk;
|
1064
|
+
textPart.text += chunk;
|
1065
|
+
update({
|
1066
|
+
message: { ...resultMessage },
|
1067
|
+
data: [],
|
1068
|
+
replaceLastMessage: false
|
1069
|
+
});
|
1070
|
+
}
|
1071
|
+
});
|
1072
|
+
onFinish == null ? void 0 : onFinish(resultMessage, {
|
1073
|
+
usage: { completionTokens: NaN, promptTokens: NaN, totalTokens: NaN },
|
1074
|
+
finishReason: "unknown"
|
1075
|
+
});
|
1076
|
+
}
|
1077
|
+
|
1078
|
+
// core/util/call-chat-api.ts
|
1079
|
+
var getOriginalFetch = () => fetch;
|
1080
|
+
async function callChatApi({
|
1081
|
+
api,
|
1082
|
+
body,
|
1083
|
+
streamProtocol = "data",
|
1084
|
+
credentials,
|
1085
|
+
headers,
|
1086
|
+
abortController,
|
1087
|
+
restoreMessagesOnFailure,
|
1088
|
+
onResponse,
|
1089
|
+
onUpdate,
|
1090
|
+
onFinish,
|
1091
|
+
onToolCall,
|
1092
|
+
generateId: generateId3,
|
1093
|
+
fetch: fetch2 = getOriginalFetch(),
|
1094
|
+
lastMessage
|
1095
|
+
}) {
|
1096
|
+
var _a17, _b;
|
1097
|
+
const response = await fetch2(api, {
|
1098
|
+
method: "POST",
|
1099
|
+
body: JSON.stringify(body),
|
1100
|
+
headers: {
|
1101
|
+
"Content-Type": "application/json",
|
1102
|
+
...headers
|
1103
|
+
},
|
1104
|
+
signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
|
1105
|
+
credentials
|
1106
|
+
}).catch((err) => {
|
1107
|
+
restoreMessagesOnFailure();
|
1108
|
+
throw err;
|
1109
|
+
});
|
1110
|
+
if (onResponse) {
|
1111
|
+
try {
|
1112
|
+
await onResponse(response);
|
1113
|
+
} catch (err) {
|
1114
|
+
throw err;
|
1115
|
+
}
|
1116
|
+
}
|
1117
|
+
if (!response.ok) {
|
1118
|
+
restoreMessagesOnFailure();
|
1119
|
+
throw new Error(
|
1120
|
+
(_b = await response.text()) != null ? _b : "Failed to fetch the chat response."
|
1121
|
+
);
|
1122
|
+
}
|
1123
|
+
if (!response.body) {
|
1124
|
+
throw new Error("The response body is empty.");
|
1125
|
+
}
|
1126
|
+
switch (streamProtocol) {
|
1127
|
+
case "text": {
|
1128
|
+
await processChatTextResponse({
|
1129
|
+
stream: response.body,
|
1130
|
+
update: onUpdate,
|
1131
|
+
onFinish,
|
1132
|
+
generateId: generateId3
|
1133
|
+
});
|
1134
|
+
return;
|
1135
|
+
}
|
1136
|
+
case "data": {
|
1137
|
+
await processChatResponse({
|
1138
|
+
stream: response.body,
|
1139
|
+
update: onUpdate,
|
1140
|
+
lastMessage,
|
1141
|
+
onToolCall,
|
1142
|
+
onFinish({ message, finishReason, usage }) {
|
1143
|
+
if (onFinish && message != null) {
|
1144
|
+
onFinish(message, { usage, finishReason });
|
1145
|
+
}
|
1146
|
+
},
|
1147
|
+
generateId: generateId3
|
1148
|
+
});
|
1149
|
+
return;
|
1150
|
+
}
|
1151
|
+
default: {
|
1152
|
+
const exhaustiveCheck = streamProtocol;
|
1153
|
+
throw new Error(`Unknown stream protocol: ${exhaustiveCheck}`);
|
1154
|
+
}
|
1155
|
+
}
|
1156
|
+
}
|
1157
|
+
|
1158
|
+
// core/util/call-completion-api.ts
|
1159
|
+
var getOriginalFetch2 = () => fetch;
|
1160
|
+
async function callCompletionApi({
|
1161
|
+
api,
|
1162
|
+
prompt,
|
1163
|
+
credentials,
|
1164
|
+
headers,
|
1165
|
+
body,
|
1166
|
+
streamProtocol = "data",
|
1167
|
+
setCompletion,
|
1168
|
+
setLoading,
|
1169
|
+
setError,
|
1170
|
+
setAbortController,
|
1171
|
+
onResponse,
|
1172
|
+
onFinish,
|
1173
|
+
onError,
|
1174
|
+
onData,
|
1175
|
+
fetch: fetch2 = getOriginalFetch2()
|
1176
|
+
}) {
|
1177
|
+
var _a17;
|
1178
|
+
try {
|
1179
|
+
setLoading(true);
|
1180
|
+
setError(void 0);
|
1181
|
+
const abortController = new AbortController();
|
1182
|
+
setAbortController(abortController);
|
1183
|
+
setCompletion("");
|
1184
|
+
const response = await fetch2(api, {
|
1185
|
+
method: "POST",
|
1186
|
+
body: JSON.stringify({
|
1187
|
+
prompt,
|
1188
|
+
...body
|
1189
|
+
}),
|
1190
|
+
credentials,
|
1191
|
+
headers: {
|
1192
|
+
"Content-Type": "application/json",
|
1193
|
+
...headers
|
1194
|
+
},
|
1195
|
+
signal: abortController.signal
|
1196
|
+
}).catch((err) => {
|
1197
|
+
throw err;
|
1198
|
+
});
|
1199
|
+
if (onResponse) {
|
1200
|
+
try {
|
1201
|
+
await onResponse(response);
|
1202
|
+
} catch (err) {
|
1203
|
+
throw err;
|
1204
|
+
}
|
1205
|
+
}
|
1206
|
+
if (!response.ok) {
|
1207
|
+
throw new Error(
|
1208
|
+
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
1209
|
+
);
|
1210
|
+
}
|
1211
|
+
if (!response.body) {
|
1212
|
+
throw new Error("The response body is empty.");
|
1213
|
+
}
|
1214
|
+
let result = "";
|
1215
|
+
switch (streamProtocol) {
|
1216
|
+
case "text": {
|
1217
|
+
await processTextStream({
|
1218
|
+
stream: response.body,
|
1219
|
+
onTextPart: (chunk) => {
|
1220
|
+
result += chunk;
|
1221
|
+
setCompletion(result);
|
1222
|
+
}
|
1223
|
+
});
|
1224
|
+
break;
|
1225
|
+
}
|
1226
|
+
case "data": {
|
1227
|
+
await processDataStream({
|
1228
|
+
stream: response.body,
|
1229
|
+
onTextPart(value) {
|
1230
|
+
result += value;
|
1231
|
+
setCompletion(result);
|
1232
|
+
},
|
1233
|
+
onDataPart(value) {
|
1234
|
+
onData == null ? void 0 : onData(value);
|
1235
|
+
},
|
1236
|
+
onErrorPart(value) {
|
1237
|
+
throw new Error(value);
|
1238
|
+
}
|
1239
|
+
});
|
1240
|
+
break;
|
1241
|
+
}
|
1242
|
+
default: {
|
1243
|
+
const exhaustiveCheck = streamProtocol;
|
1244
|
+
throw new Error(`Unknown stream protocol: ${exhaustiveCheck}`);
|
1245
|
+
}
|
1246
|
+
}
|
1247
|
+
if (onFinish) {
|
1248
|
+
onFinish(prompt, result);
|
1249
|
+
}
|
1250
|
+
setAbortController(null);
|
1251
|
+
return result;
|
1252
|
+
} catch (err) {
|
1253
|
+
if (err.name === "AbortError") {
|
1254
|
+
setAbortController(null);
|
1255
|
+
return null;
|
1256
|
+
}
|
1257
|
+
if (err instanceof Error) {
|
1258
|
+
if (onError) {
|
1259
|
+
onError(err);
|
1260
|
+
}
|
1261
|
+
}
|
1262
|
+
setError(err);
|
1263
|
+
} finally {
|
1264
|
+
setLoading(false);
|
1265
|
+
}
|
1266
|
+
}
|
1267
|
+
|
1268
|
+
// core/util/data-url.ts
|
1269
|
+
function getTextFromDataUrl(dataUrl) {
|
1270
|
+
const [header, base64Content] = dataUrl.split(",");
|
1271
|
+
const mediaType = header.split(";")[0].split(":")[1];
|
1272
|
+
if (mediaType == null || base64Content == null) {
|
1273
|
+
throw new Error("Invalid data URL format");
|
1274
|
+
}
|
1275
|
+
try {
|
1276
|
+
return window.atob(base64Content);
|
1277
|
+
} catch (error) {
|
1278
|
+
throw new Error(`Error decoding data URL`);
|
1279
|
+
}
|
1280
|
+
}
|
1281
|
+
|
1282
|
+
// core/util/extract-max-tool-invocation-step.ts
|
1283
|
+
function extractMaxToolInvocationStep(toolInvocations) {
|
1284
|
+
return toolInvocations == null ? void 0 : toolInvocations.reduce((max, toolInvocation) => {
|
1285
|
+
var _a17;
|
1286
|
+
return Math.max(max, (_a17 = toolInvocation.step) != null ? _a17 : 0);
|
1287
|
+
}, 0);
|
1288
|
+
}
|
1289
|
+
|
1290
|
+
// core/util/get-message-parts.ts
|
1291
|
+
function getMessageParts(message) {
|
1292
|
+
var _a17;
|
1293
|
+
return (_a17 = message.parts) != null ? _a17 : [
|
1294
|
+
...message.toolInvocations ? message.toolInvocations.map((toolInvocation) => ({
|
1295
|
+
type: "tool-invocation",
|
1296
|
+
toolInvocation
|
1297
|
+
})) : [],
|
1298
|
+
...message.reasoning ? [
|
1299
|
+
{
|
1300
|
+
type: "reasoning",
|
1301
|
+
reasoning: message.reasoning,
|
1302
|
+
details: [{ type: "text", text: message.reasoning }]
|
1303
|
+
}
|
1304
|
+
] : [],
|
1305
|
+
...message.content ? [{ type: "text", text: message.content }] : []
|
1306
|
+
];
|
1307
|
+
}
|
1308
|
+
|
1309
|
+
// core/util/fill-message-parts.ts
|
1310
|
+
function fillMessageParts(messages) {
|
1311
|
+
return messages.map((message) => ({
|
1312
|
+
...message,
|
1313
|
+
parts: getMessageParts(message)
|
1314
|
+
}));
|
1315
|
+
}
|
1316
|
+
|
1317
|
+
// core/util/is-deep-equal-data.ts
|
1318
|
+
function isDeepEqualData(obj1, obj2) {
|
1319
|
+
if (obj1 === obj2)
|
1320
|
+
return true;
|
1321
|
+
if (obj1 == null || obj2 == null)
|
1322
|
+
return false;
|
1323
|
+
if (typeof obj1 !== "object" && typeof obj2 !== "object")
|
1324
|
+
return obj1 === obj2;
|
1325
|
+
if (obj1.constructor !== obj2.constructor)
|
1326
|
+
return false;
|
1327
|
+
if (obj1 instanceof Date && obj2 instanceof Date) {
|
1328
|
+
return obj1.getTime() === obj2.getTime();
|
1329
|
+
}
|
1330
|
+
if (Array.isArray(obj1)) {
|
1331
|
+
if (obj1.length !== obj2.length)
|
1332
|
+
return false;
|
1333
|
+
for (let i = 0; i < obj1.length; i++) {
|
1334
|
+
if (!isDeepEqualData(obj1[i], obj2[i]))
|
1335
|
+
return false;
|
1336
|
+
}
|
1337
|
+
return true;
|
1338
|
+
}
|
1339
|
+
const keys1 = Object.keys(obj1);
|
1340
|
+
const keys2 = Object.keys(obj2);
|
1341
|
+
if (keys1.length !== keys2.length)
|
1342
|
+
return false;
|
1343
|
+
for (const key of keys1) {
|
1344
|
+
if (!keys2.includes(key))
|
1345
|
+
return false;
|
1346
|
+
if (!isDeepEqualData(obj1[key], obj2[key]))
|
1347
|
+
return false;
|
1348
|
+
}
|
1349
|
+
return true;
|
1350
|
+
}
|
1351
|
+
|
1352
|
+
// core/util/prepare-attachments-for-request.ts
|
1353
|
+
async function prepareAttachmentsForRequest(attachmentsFromOptions) {
|
1354
|
+
if (!attachmentsFromOptions) {
|
1355
|
+
return [];
|
1356
|
+
}
|
1357
|
+
if (attachmentsFromOptions instanceof FileList) {
|
1358
|
+
return Promise.all(
|
1359
|
+
Array.from(attachmentsFromOptions).map(async (attachment) => {
|
1360
|
+
const { name: name17, type } = attachment;
|
1361
|
+
const dataUrl = await new Promise((resolve, reject) => {
|
1362
|
+
const reader = new FileReader();
|
1363
|
+
reader.onload = (readerEvent) => {
|
1364
|
+
var _a17;
|
1365
|
+
resolve((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
|
1366
|
+
};
|
1367
|
+
reader.onerror = (error) => reject(error);
|
1368
|
+
reader.readAsDataURL(attachment);
|
1369
|
+
});
|
1370
|
+
return {
|
1371
|
+
name: name17,
|
1372
|
+
contentType: type,
|
1373
|
+
url: dataUrl
|
1374
|
+
};
|
1375
|
+
})
|
1376
|
+
);
|
1377
|
+
}
|
1378
|
+
if (Array.isArray(attachmentsFromOptions)) {
|
1379
|
+
return attachmentsFromOptions;
|
1380
|
+
}
|
1381
|
+
throw new Error("Invalid attachments type");
|
1382
|
+
}
|
1383
|
+
|
1384
|
+
// core/util/schema.ts
|
1385
|
+
import { validatorSymbol } from "@ai-sdk/provider-utils";
|
1386
|
+
|
1387
|
+
// core/util/zod-schema.ts
|
1388
|
+
import zodToJsonSchema from "zod-to-json-schema";
|
1389
|
+
function zodSchema(zodSchema2, options) {
|
1390
|
+
var _a17;
|
1391
|
+
const useReferences = (_a17 = options == null ? void 0 : options.useReferences) != null ? _a17 : false;
|
1392
|
+
return jsonSchema(
|
1393
|
+
zodToJsonSchema(zodSchema2, {
|
1394
|
+
$refStrategy: useReferences ? "root" : "none",
|
1395
|
+
target: "jsonSchema7"
|
1396
|
+
// note: openai mode breaks various gemini conversions
|
1397
|
+
}),
|
1398
|
+
{
|
1399
|
+
validate: (value) => {
|
1400
|
+
const result = zodSchema2.safeParse(value);
|
1401
|
+
return result.success ? { success: true, value: result.data } : { success: false, error: result.error };
|
1402
|
+
}
|
1403
|
+
}
|
1404
|
+
);
|
1405
|
+
}
|
1406
|
+
|
1407
|
+
// core/util/schema.ts
|
1408
|
+
var schemaSymbol = Symbol.for("vercel.ai.schema");
|
1409
|
+
function jsonSchema(jsonSchema2, {
|
1410
|
+
validate
|
1411
|
+
} = {}) {
|
1412
|
+
return {
|
1413
|
+
[schemaSymbol]: true,
|
1414
|
+
_type: void 0,
|
1415
|
+
// should never be used directly
|
1416
|
+
[validatorSymbol]: true,
|
1417
|
+
jsonSchema: jsonSchema2,
|
1418
|
+
validate
|
1419
|
+
};
|
1420
|
+
}
|
1421
|
+
function isSchema(value) {
|
1422
|
+
return typeof value === "object" && value !== null && schemaSymbol in value && value[schemaSymbol] === true && "jsonSchema" in value && "validate" in value;
|
1423
|
+
}
|
1424
|
+
function asSchema(schema) {
|
1425
|
+
return isSchema(schema) ? schema : zodSchema(schema);
|
1426
|
+
}
|
1427
|
+
|
1428
|
+
// core/util/should-resubmit-messages.ts
|
1429
|
+
function shouldResubmitMessages({
|
1430
|
+
originalMaxToolInvocationStep,
|
1431
|
+
originalMessageCount,
|
1432
|
+
maxSteps,
|
1433
|
+
messages
|
1434
|
+
}) {
|
1435
|
+
var _a17;
|
1436
|
+
const lastMessage = messages[messages.length - 1];
|
1437
|
+
return (
|
1438
|
+
// check if the feature is enabled:
|
1439
|
+
maxSteps > 1 && // ensure there is a last message:
|
1440
|
+
lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors):
|
1441
|
+
(messages.length > originalMessageCount || extractMaxToolInvocationStep(lastMessage.toolInvocations) !== originalMaxToolInvocationStep) && // check that next step is possible:
|
1442
|
+
isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
|
1443
|
+
((_a17 = extractMaxToolInvocationStep(lastMessage.toolInvocations)) != null ? _a17 : 0) < maxSteps
|
1444
|
+
);
|
1445
|
+
}
|
1446
|
+
function isAssistantMessageWithCompletedToolCalls(message) {
|
1447
|
+
if (message.role !== "assistant") {
|
1448
|
+
return false;
|
1449
|
+
}
|
1450
|
+
const lastStepStartIndex = message.parts.reduce((lastIndex, part, index) => {
|
1451
|
+
return part.type === "step-start" ? index : lastIndex;
|
1452
|
+
}, -1);
|
1453
|
+
const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter((part) => part.type === "tool-invocation");
|
1454
|
+
return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
|
1455
|
+
}
|
1456
|
+
|
1457
|
+
// core/util/update-tool-call-result.ts
|
1458
|
+
function updateToolCallResult({
|
1459
|
+
messages,
|
1460
|
+
toolCallId,
|
1461
|
+
toolResult: result
|
1462
|
+
}) {
|
1463
|
+
var _a17;
|
1464
|
+
const lastMessage = messages[messages.length - 1];
|
1465
|
+
const invocationPart = lastMessage.parts.find(
|
1466
|
+
(part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
|
1467
|
+
);
|
1468
|
+
if (invocationPart == null) {
|
1469
|
+
return;
|
1470
|
+
}
|
1471
|
+
const toolResult = {
|
1472
|
+
...invocationPart.toolInvocation,
|
1473
|
+
state: "result",
|
1474
|
+
result
|
1475
|
+
};
|
1476
|
+
invocationPart.toolInvocation = toolResult;
|
1477
|
+
lastMessage.toolInvocations = (_a17 = lastMessage.toolInvocations) == null ? void 0 : _a17.map(
|
1478
|
+
(toolInvocation) => toolInvocation.toolCallId === toolCallId ? toolResult : toolInvocation
|
1479
|
+
);
|
1480
|
+
}
|
17
1481
|
|
18
1482
|
// core/data-stream/create-data-stream.ts
|
19
|
-
import { formatDataStreamPart } from "@ai-sdk/ui-utils";
|
20
1483
|
function createDataStream({
|
21
1484
|
execute,
|
22
1485
|
onError = () => "An error occurred."
|
@@ -1059,7 +2522,7 @@ import {
|
|
1059
2522
|
JSONParseError,
|
1060
2523
|
TypeValidationError as TypeValidationError2
|
1061
2524
|
} from "@ai-sdk/provider";
|
1062
|
-
import { createIdGenerator, safeParseJSON } from "@ai-sdk/provider-utils";
|
2525
|
+
import { createIdGenerator, safeParseJSON as safeParseJSON2 } from "@ai-sdk/provider-utils";
|
1063
2526
|
|
1064
2527
|
// errors/no-object-generated-error.ts
|
1065
2528
|
import { AISDKError as AISDKError4 } from "@ai-sdk/provider";
|
@@ -2193,7 +3656,7 @@ function standardizePrompt({
|
|
2193
3656
|
}
|
2194
3657
|
|
2195
3658
|
// core/types/usage.ts
|
2196
|
-
function
|
3659
|
+
function calculateLanguageModelUsage2({
|
2197
3660
|
promptTokens,
|
2198
3661
|
completionTokens
|
2199
3662
|
}) {
|
@@ -2239,7 +3702,6 @@ import {
|
|
2239
3702
|
UnsupportedFunctionalityError
|
2240
3703
|
} from "@ai-sdk/provider";
|
2241
3704
|
import { safeValidateTypes as safeValidateTypes2 } from "@ai-sdk/provider-utils";
|
2242
|
-
import { asSchema } from "@ai-sdk/ui-utils";
|
2243
3705
|
|
2244
3706
|
// core/util/async-iterable-stream.ts
|
2245
3707
|
function createAsyncIterableStream(source) {
|
@@ -2689,7 +4151,6 @@ async function generateObject({
|
|
2689
4151
|
let finishReason;
|
2690
4152
|
let usage;
|
2691
4153
|
let warnings;
|
2692
|
-
let rawResponse;
|
2693
4154
|
let response;
|
2694
4155
|
let request;
|
2695
4156
|
let logprobs;
|
@@ -2744,7 +4205,7 @@ async function generateObject({
|
|
2744
4205
|
}),
|
2745
4206
|
tracer,
|
2746
4207
|
fn: async (span2) => {
|
2747
|
-
var _a18, _b2, _c2, _d2, _e, _f;
|
4208
|
+
var _a18, _b2, _c2, _d2, _e, _f, _g, _h;
|
2748
4209
|
const result2 = await model.doGenerate({
|
2749
4210
|
responseFormat: {
|
2750
4211
|
type: "json",
|
@@ -2762,13 +4223,15 @@ async function generateObject({
|
|
2762
4223
|
const responseData = {
|
2763
4224
|
id: (_b2 = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId3(),
|
2764
4225
|
timestamp: (_d2 = (_c2 = result2.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
2765
|
-
modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
|
4226
|
+
modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
|
4227
|
+
headers: (_g = result2.response) == null ? void 0 : _g.headers,
|
4228
|
+
body: (_h = result2.response) == null ? void 0 : _h.body
|
2766
4229
|
};
|
2767
4230
|
if (result2.text === void 0) {
|
2768
4231
|
throw new NoObjectGeneratedError({
|
2769
4232
|
message: "No object generated: the model did not return a response.",
|
2770
4233
|
response: responseData,
|
2771
|
-
usage:
|
4234
|
+
usage: calculateLanguageModelUsage2(result2.usage),
|
2772
4235
|
finishReason: result2.finishReason
|
2773
4236
|
});
|
2774
4237
|
}
|
@@ -2800,7 +4263,6 @@ async function generateObject({
|
|
2800
4263
|
finishReason = generateResult.finishReason;
|
2801
4264
|
usage = generateResult.usage;
|
2802
4265
|
warnings = generateResult.warnings;
|
2803
|
-
rawResponse = generateResult.rawResponse;
|
2804
4266
|
logprobs = generateResult.logprobs;
|
2805
4267
|
resultProviderMetadata = generateResult.providerMetadata;
|
2806
4268
|
request = (_b = generateResult.request) != null ? _b : {};
|
@@ -2850,7 +4312,7 @@ async function generateObject({
|
|
2850
4312
|
}),
|
2851
4313
|
tracer,
|
2852
4314
|
fn: async (span2) => {
|
2853
|
-
var _a18, _b2, _c2, _d2, _e, _f, _g, _h;
|
4315
|
+
var _a18, _b2, _c2, _d2, _e, _f, _g, _h, _i, _j;
|
2854
4316
|
const result2 = await model.doGenerate({
|
2855
4317
|
tools: [
|
2856
4318
|
{
|
@@ -2872,13 +4334,15 @@ async function generateObject({
|
|
2872
4334
|
const responseData = {
|
2873
4335
|
id: (_d2 = (_c2 = result2.response) == null ? void 0 : _c2.id) != null ? _d2 : generateId3(),
|
2874
4336
|
timestamp: (_f = (_e = result2.response) == null ? void 0 : _e.timestamp) != null ? _f : currentDate(),
|
2875
|
-
modelId: (_h = (_g = result2.response) == null ? void 0 : _g.modelId) != null ? _h : model.modelId
|
4337
|
+
modelId: (_h = (_g = result2.response) == null ? void 0 : _g.modelId) != null ? _h : model.modelId,
|
4338
|
+
headers: (_i = result2.response) == null ? void 0 : _i.headers,
|
4339
|
+
body: (_j = result2.response) == null ? void 0 : _j.body
|
2876
4340
|
};
|
2877
4341
|
if (objectText === void 0) {
|
2878
4342
|
throw new NoObjectGeneratedError({
|
2879
4343
|
message: "No object generated: the tool was not called.",
|
2880
4344
|
response: responseData,
|
2881
|
-
usage:
|
4345
|
+
usage: calculateLanguageModelUsage2(result2.usage),
|
2882
4346
|
finishReason: result2.finishReason
|
2883
4347
|
});
|
2884
4348
|
}
|
@@ -2910,7 +4374,6 @@ async function generateObject({
|
|
2910
4374
|
finishReason = generateResult.finishReason;
|
2911
4375
|
usage = generateResult.usage;
|
2912
4376
|
warnings = generateResult.warnings;
|
2913
|
-
rawResponse = generateResult.rawResponse;
|
2914
4377
|
logprobs = generateResult.logprobs;
|
2915
4378
|
resultProviderMetadata = generateResult.providerMetadata;
|
2916
4379
|
request = (_d = generateResult.request) != null ? _d : {};
|
@@ -2928,14 +4391,14 @@ async function generateObject({
|
|
2928
4391
|
}
|
2929
4392
|
}
|
2930
4393
|
function processResult(result2) {
|
2931
|
-
const parseResult =
|
4394
|
+
const parseResult = safeParseJSON2({ text: result2 });
|
2932
4395
|
if (!parseResult.success) {
|
2933
4396
|
throw new NoObjectGeneratedError({
|
2934
4397
|
message: "No object generated: could not parse the response.",
|
2935
4398
|
cause: parseResult.error,
|
2936
4399
|
text: result2,
|
2937
4400
|
response,
|
2938
|
-
usage:
|
4401
|
+
usage: calculateLanguageModelUsage2(usage),
|
2939
4402
|
finishReason
|
2940
4403
|
});
|
2941
4404
|
}
|
@@ -2944,7 +4407,7 @@ async function generateObject({
|
|
2944
4407
|
{
|
2945
4408
|
text: result2,
|
2946
4409
|
response,
|
2947
|
-
usage:
|
4410
|
+
usage: calculateLanguageModelUsage2(usage)
|
2948
4411
|
}
|
2949
4412
|
);
|
2950
4413
|
if (!validationResult.success) {
|
@@ -2953,7 +4416,7 @@ async function generateObject({
|
|
2953
4416
|
cause: validationResult.error,
|
2954
4417
|
text: result2,
|
2955
4418
|
response,
|
2956
|
-
usage:
|
4419
|
+
usage: calculateLanguageModelUsage2(usage),
|
2957
4420
|
finishReason
|
2958
4421
|
});
|
2959
4422
|
}
|
@@ -2992,14 +4455,10 @@ async function generateObject({
|
|
2992
4455
|
return new DefaultGenerateObjectResult({
|
2993
4456
|
object: object2,
|
2994
4457
|
finishReason,
|
2995
|
-
usage:
|
4458
|
+
usage: calculateLanguageModelUsage2(usage),
|
2996
4459
|
warnings,
|
2997
4460
|
request,
|
2998
|
-
response
|
2999
|
-
...response,
|
3000
|
-
headers: rawResponse == null ? void 0 : rawResponse.headers,
|
3001
|
-
body: rawResponse == null ? void 0 : rawResponse.body
|
3002
|
-
},
|
4461
|
+
response,
|
3003
4462
|
logprobs,
|
3004
4463
|
providerMetadata: resultProviderMetadata
|
3005
4464
|
});
|
@@ -3031,10 +4490,6 @@ var DefaultGenerateObjectResult = class {
|
|
3031
4490
|
|
3032
4491
|
// core/generate-object/stream-object.ts
|
3033
4492
|
import { createIdGenerator as createIdGenerator2 } from "@ai-sdk/provider-utils";
|
3034
|
-
import {
|
3035
|
-
isDeepEqualData,
|
3036
|
-
parsePartialJson
|
3037
|
-
} from "@ai-sdk/ui-utils";
|
3038
4493
|
|
3039
4494
|
// util/delayed-promise.ts
|
3040
4495
|
var DelayedPromise = class {
|
@@ -3418,7 +4873,7 @@ var DefaultStreamObjectResult = class {
|
|
3418
4873
|
}
|
3419
4874
|
}
|
3420
4875
|
const {
|
3421
|
-
result: { stream, warnings,
|
4876
|
+
result: { stream, warnings, response, request },
|
3422
4877
|
doStreamSpan,
|
3423
4878
|
startTimestampMs
|
3424
4879
|
} = await retry(
|
@@ -3467,7 +4922,7 @@ var DefaultStreamObjectResult = class {
|
|
3467
4922
|
let error;
|
3468
4923
|
let accumulatedText = "";
|
3469
4924
|
let textDelta = "";
|
3470
|
-
let
|
4925
|
+
let fullResponse = {
|
3471
4926
|
id: generateId3(),
|
3472
4927
|
timestamp: currentDate(),
|
3473
4928
|
modelId: model.modelId
|
@@ -3524,10 +4979,10 @@ var DefaultStreamObjectResult = class {
|
|
3524
4979
|
}
|
3525
4980
|
switch (chunk.type) {
|
3526
4981
|
case "response-metadata": {
|
3527
|
-
|
3528
|
-
id: (_a18 = chunk.id) != null ? _a18 :
|
3529
|
-
timestamp: (_b2 = chunk.timestamp) != null ? _b2 :
|
3530
|
-
modelId: (_c = chunk.modelId) != null ? _c :
|
4982
|
+
fullResponse = {
|
4983
|
+
id: (_a18 = chunk.id) != null ? _a18 : fullResponse.id,
|
4984
|
+
timestamp: (_b2 = chunk.timestamp) != null ? _b2 : fullResponse.timestamp,
|
4985
|
+
modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
|
3531
4986
|
};
|
3532
4987
|
break;
|
3533
4988
|
}
|
@@ -3536,20 +4991,24 @@ var DefaultStreamObjectResult = class {
|
|
3536
4991
|
controller.enqueue({ type: "text-delta", textDelta });
|
3537
4992
|
}
|
3538
4993
|
finishReason = chunk.finishReason;
|
3539
|
-
usage =
|
4994
|
+
usage = calculateLanguageModelUsage2(chunk.usage);
|
3540
4995
|
providerMetadata = chunk.providerMetadata;
|
3541
|
-
controller.enqueue({
|
4996
|
+
controller.enqueue({
|
4997
|
+
...chunk,
|
4998
|
+
usage,
|
4999
|
+
response: fullResponse
|
5000
|
+
});
|
3542
5001
|
self.usagePromise.resolve(usage);
|
3543
5002
|
self.providerMetadataPromise.resolve(providerMetadata);
|
3544
5003
|
self.responsePromise.resolve({
|
3545
|
-
...
|
3546
|
-
headers:
|
5004
|
+
...fullResponse,
|
5005
|
+
headers: response == null ? void 0 : response.headers
|
3547
5006
|
});
|
3548
5007
|
const validationResult = outputStrategy.validateFinalResult(
|
3549
5008
|
latestObjectJson,
|
3550
5009
|
{
|
3551
5010
|
text: accumulatedText,
|
3552
|
-
response,
|
5011
|
+
response: fullResponse,
|
3553
5012
|
usage
|
3554
5013
|
}
|
3555
5014
|
);
|
@@ -3561,7 +5020,7 @@ var DefaultStreamObjectResult = class {
|
|
3561
5020
|
message: "No object generated: response did not match schema.",
|
3562
5021
|
cause: validationResult.error,
|
3563
5022
|
text: accumulatedText,
|
3564
|
-
response,
|
5023
|
+
response: fullResponse,
|
3565
5024
|
usage,
|
3566
5025
|
finishReason
|
3567
5026
|
});
|
@@ -3591,15 +5050,15 @@ var DefaultStreamObjectResult = class {
|
|
3591
5050
|
"ai.response.object": {
|
3592
5051
|
output: () => JSON.stringify(object2)
|
3593
5052
|
},
|
3594
|
-
"ai.response.id":
|
3595
|
-
"ai.response.model":
|
3596
|
-
"ai.response.timestamp":
|
5053
|
+
"ai.response.id": fullResponse.id,
|
5054
|
+
"ai.response.model": fullResponse.modelId,
|
5055
|
+
"ai.response.timestamp": fullResponse.timestamp.toISOString(),
|
3597
5056
|
"ai.usage.promptTokens": finalUsage.promptTokens,
|
3598
5057
|
"ai.usage.completionTokens": finalUsage.completionTokens,
|
3599
5058
|
// standardized gen-ai llm span attributes:
|
3600
5059
|
"gen_ai.response.finish_reasons": [finishReason],
|
3601
|
-
"gen_ai.response.id":
|
3602
|
-
"gen_ai.response.model":
|
5060
|
+
"gen_ai.response.id": fullResponse.id,
|
5061
|
+
"gen_ai.response.model": fullResponse.modelId,
|
3603
5062
|
"gen_ai.usage.input_tokens": finalUsage.promptTokens,
|
3604
5063
|
"gen_ai.usage.output_tokens": finalUsage.completionTokens
|
3605
5064
|
}
|
@@ -3623,8 +5082,8 @@ var DefaultStreamObjectResult = class {
|
|
3623
5082
|
object: object2,
|
3624
5083
|
error,
|
3625
5084
|
response: {
|
3626
|
-
...
|
3627
|
-
headers:
|
5085
|
+
...fullResponse,
|
5086
|
+
headers: response == null ? void 0 : response.headers
|
3628
5087
|
},
|
3629
5088
|
warnings,
|
3630
5089
|
providerMetadata,
|
@@ -3796,9 +5255,6 @@ var ToolExecutionError = class extends AISDKError10 {
|
|
3796
5255
|
};
|
3797
5256
|
_a10 = symbol10;
|
3798
5257
|
|
3799
|
-
// core/prompt/prepare-tools-and-tool-choice.ts
|
3800
|
-
import { asSchema as asSchema2 } from "@ai-sdk/ui-utils";
|
3801
|
-
|
3802
5258
|
// core/util/is-non-empty-object.ts
|
3803
5259
|
function isNonEmptyObject(object2) {
|
3804
5260
|
return object2 != null && Object.keys(object2).length > 0;
|
@@ -3829,7 +5285,7 @@ function prepareToolsAndToolChoice({
|
|
3829
5285
|
type: "function",
|
3830
5286
|
name: name17,
|
3831
5287
|
description: tool2.description,
|
3832
|
-
parameters:
|
5288
|
+
parameters: asSchema(tool2.parameters).jsonSchema
|
3833
5289
|
};
|
3834
5290
|
case "provider-defined":
|
3835
5291
|
return {
|
@@ -3862,8 +5318,7 @@ function removeTextAfterLastWhitespace(text2) {
|
|
3862
5318
|
}
|
3863
5319
|
|
3864
5320
|
// core/generate-text/parse-tool-call.ts
|
3865
|
-
import { safeParseJSON as
|
3866
|
-
import { asSchema as asSchema3 } from "@ai-sdk/ui-utils";
|
5321
|
+
import { safeParseJSON as safeParseJSON3, safeValidateTypes as safeValidateTypes3 } from "@ai-sdk/provider-utils";
|
3867
5322
|
|
3868
5323
|
// errors/invalid-tool-arguments-error.ts
|
3869
5324
|
import { AISDKError as AISDKError11, getErrorMessage as getErrorMessage3 } from "@ai-sdk/provider";
|
@@ -3958,7 +5413,7 @@ async function parseToolCall({
|
|
3958
5413
|
repairedToolCall = await repairToolCall({
|
3959
5414
|
toolCall,
|
3960
5415
|
tools,
|
3961
|
-
parameterSchema: ({ toolName }) =>
|
5416
|
+
parameterSchema: ({ toolName }) => asSchema(tools[toolName].parameters).jsonSchema,
|
3962
5417
|
system,
|
3963
5418
|
messages,
|
3964
5419
|
error
|
@@ -3987,8 +5442,8 @@ async function doParseToolCall({
|
|
3987
5442
|
availableTools: Object.keys(tools)
|
3988
5443
|
});
|
3989
5444
|
}
|
3990
|
-
const schema =
|
3991
|
-
const parseResult = toolCall.args.trim() === "" ? safeValidateTypes3({ value: {}, schema }) :
|
5445
|
+
const schema = asSchema(tool2.parameters);
|
5446
|
+
const parseResult = toolCall.args.trim() === "" ? safeValidateTypes3({ value: {}, schema }) : safeParseJSON3({ text: toolCall.args, schema });
|
3992
5447
|
if (parseResult.success === false) {
|
3993
5448
|
throw new InvalidToolArgumentsError({
|
3994
5449
|
toolName,
|
@@ -4143,7 +5598,7 @@ async function generateText({
|
|
4143
5598
|
}),
|
4144
5599
|
tracer,
|
4145
5600
|
fn: async (span) => {
|
4146
|
-
var _a18, _b, _c, _d, _e, _f, _g
|
5601
|
+
var _a18, _b, _c, _d, _e, _f, _g;
|
4147
5602
|
const toolsAndToolChoice = {
|
4148
5603
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
4149
5604
|
};
|
@@ -4218,7 +5673,7 @@ async function generateText({
|
|
4218
5673
|
}),
|
4219
5674
|
tracer,
|
4220
5675
|
fn: async (span2) => {
|
4221
|
-
var _a19, _b2, _c2, _d2, _e2, _f2;
|
5676
|
+
var _a19, _b2, _c2, _d2, _e2, _f2, _g2, _h;
|
4222
5677
|
const result = await model.doGenerate({
|
4223
5678
|
...callSettings,
|
4224
5679
|
...toolsAndToolChoice,
|
@@ -4232,7 +5687,9 @@ async function generateText({
|
|
4232
5687
|
const responseData = {
|
4233
5688
|
id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
|
4234
5689
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
4235
|
-
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
|
5690
|
+
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId,
|
5691
|
+
headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
|
5692
|
+
body: (_h = result.response) == null ? void 0 : _h.body
|
4236
5693
|
};
|
4237
5694
|
span2.setAttributes(
|
4238
5695
|
selectTelemetryAttributes({
|
@@ -4282,7 +5739,7 @@ async function generateText({
|
|
4282
5739
|
messages: stepInputMessages,
|
4283
5740
|
abortSignal
|
4284
5741
|
});
|
4285
|
-
const currentUsage =
|
5742
|
+
const currentUsage = calculateLanguageModelUsage2(
|
4286
5743
|
currentModelResponse.usage
|
4287
5744
|
);
|
4288
5745
|
usage = addLanguageModelUsage(usage, currentUsage);
|
@@ -4349,8 +5806,6 @@ async function generateText({
|
|
4349
5806
|
request: (_f = currentModelResponse.request) != null ? _f : {},
|
4350
5807
|
response: {
|
4351
5808
|
...currentModelResponse.response,
|
4352
|
-
headers: (_g = currentModelResponse.rawResponse) == null ? void 0 : _g.headers,
|
4353
|
-
body: (_h = currentModelResponse.rawResponse) == null ? void 0 : _h.body,
|
4354
5809
|
// deep clone msgs to avoid mutating past messages in multi-step:
|
4355
5810
|
messages: structuredClone(responseMessages)
|
4356
5811
|
},
|
@@ -4402,11 +5857,9 @@ async function generateText({
|
|
4402
5857
|
finishReason: currentModelResponse.finishReason,
|
4403
5858
|
usage,
|
4404
5859
|
warnings: currentModelResponse.warnings,
|
4405
|
-
request: (
|
5860
|
+
request: (_g = currentModelResponse.request) != null ? _g : {},
|
4406
5861
|
response: {
|
4407
5862
|
...currentModelResponse.response,
|
4408
|
-
headers: (_j = currentModelResponse.rawResponse) == null ? void 0 : _j.headers,
|
4409
|
-
body: (_k = currentModelResponse.rawResponse) == null ? void 0 : _k.body,
|
4410
5863
|
messages: responseMessages
|
4411
5864
|
},
|
4412
5865
|
logprobs: currentModelResponse.logprobs,
|
@@ -4535,11 +5988,7 @@ __export(output_exports, {
|
|
4535
5988
|
object: () => object,
|
4536
5989
|
text: () => text
|
4537
5990
|
});
|
4538
|
-
import { safeParseJSON as
|
4539
|
-
import {
|
4540
|
-
asSchema as asSchema4,
|
4541
|
-
parsePartialJson as parsePartialJson2
|
4542
|
-
} from "@ai-sdk/ui-utils";
|
5991
|
+
import { safeParseJSON as safeParseJSON4, safeValidateTypes as safeValidateTypes4 } from "@ai-sdk/provider-utils";
|
4543
5992
|
|
4544
5993
|
// errors/index.ts
|
4545
5994
|
import {
|
@@ -4615,7 +6064,7 @@ var text = () => ({
|
|
4615
6064
|
var object = ({
|
4616
6065
|
schema: inputSchema
|
4617
6066
|
}) => {
|
4618
|
-
const schema =
|
6067
|
+
const schema = asSchema(inputSchema);
|
4619
6068
|
return {
|
4620
6069
|
type: "object",
|
4621
6070
|
responseFormat: ({ model }) => ({
|
@@ -4629,7 +6078,7 @@ var object = ({
|
|
4629
6078
|
});
|
4630
6079
|
},
|
4631
6080
|
parsePartial({ text: text2 }) {
|
4632
|
-
const result =
|
6081
|
+
const result = parsePartialJson(text2);
|
4633
6082
|
switch (result.state) {
|
4634
6083
|
case "failed-parse":
|
4635
6084
|
case "undefined-input":
|
@@ -4647,7 +6096,7 @@ var object = ({
|
|
4647
6096
|
}
|
4648
6097
|
},
|
4649
6098
|
parseOutput({ text: text2 }, context) {
|
4650
|
-
const parseResult =
|
6099
|
+
const parseResult = safeParseJSON4({ text: text2 });
|
4651
6100
|
if (!parseResult.success) {
|
4652
6101
|
throw new NoObjectGeneratedError({
|
4653
6102
|
message: "No object generated: could not parse the response.",
|
@@ -4749,7 +6198,6 @@ function smoothStream({
|
|
4749
6198
|
// core/generate-text/stream-text.ts
|
4750
6199
|
import { AISDKError as AISDKError17 } from "@ai-sdk/provider";
|
4751
6200
|
import { createIdGenerator as createIdGenerator4 } from "@ai-sdk/provider-utils";
|
4752
|
-
import { formatDataStreamPart as formatDataStreamPart2 } from "@ai-sdk/ui-utils";
|
4753
6201
|
|
4754
6202
|
// util/as-array.ts
|
4755
6203
|
function asArray(value) {
|
@@ -4864,7 +6312,6 @@ function mergeStreams(stream1, stream2) {
|
|
4864
6312
|
}
|
4865
6313
|
|
4866
6314
|
// core/generate-text/run-tools-transformation.ts
|
4867
|
-
import { generateId } from "@ai-sdk/ui-utils";
|
4868
6315
|
function runToolsTransformation({
|
4869
6316
|
tools,
|
4870
6317
|
generatorStream,
|
@@ -5023,7 +6470,7 @@ function runToolsTransformation({
|
|
5023
6470
|
type: "finish",
|
5024
6471
|
finishReason: chunk.finishReason,
|
5025
6472
|
logprobs: chunk.logprobs,
|
5026
|
-
usage:
|
6473
|
+
usage: calculateLanguageModelUsage2(chunk.usage),
|
5027
6474
|
experimental_providerMetadata: chunk.providerMetadata
|
5028
6475
|
};
|
5029
6476
|
break;
|
@@ -5534,7 +6981,7 @@ var DefaultStreamTextResult = class {
|
|
5534
6981
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
5535
6982
|
};
|
5536
6983
|
const {
|
5537
|
-
result: { stream: stream2, warnings,
|
6984
|
+
result: { stream: stream2, warnings, response, request },
|
5538
6985
|
doStreamSpan,
|
5539
6986
|
startTimestampMs
|
5540
6987
|
} = await retry(
|
@@ -5845,7 +7292,7 @@ var DefaultStreamTextResult = class {
|
|
5845
7292
|
request: stepRequest,
|
5846
7293
|
response: {
|
5847
7294
|
...stepResponse,
|
5848
|
-
headers:
|
7295
|
+
headers: response == null ? void 0 : response.headers
|
5849
7296
|
},
|
5850
7297
|
warnings,
|
5851
7298
|
isContinued: nextStepType === "continue",
|
@@ -5862,7 +7309,7 @@ var DefaultStreamTextResult = class {
|
|
5862
7309
|
logprobs: stepLogProbs,
|
5863
7310
|
response: {
|
5864
7311
|
...stepResponse,
|
5865
|
-
headers:
|
7312
|
+
headers: response == null ? void 0 : response.headers
|
5866
7313
|
}
|
5867
7314
|
});
|
5868
7315
|
self.closeStream();
|
@@ -6058,13 +7505,13 @@ var DefaultStreamTextResult = class {
|
|
6058
7505
|
const chunkType = chunk.type;
|
6059
7506
|
switch (chunkType) {
|
6060
7507
|
case "text-delta": {
|
6061
|
-
controller.enqueue(
|
7508
|
+
controller.enqueue(formatDataStreamPart("text", chunk.textDelta));
|
6062
7509
|
break;
|
6063
7510
|
}
|
6064
7511
|
case "reasoning": {
|
6065
7512
|
if (sendReasoning) {
|
6066
7513
|
controller.enqueue(
|
6067
|
-
|
7514
|
+
formatDataStreamPart("reasoning", chunk.textDelta)
|
6068
7515
|
);
|
6069
7516
|
}
|
6070
7517
|
break;
|
@@ -6072,7 +7519,7 @@ var DefaultStreamTextResult = class {
|
|
6072
7519
|
case "redacted-reasoning": {
|
6073
7520
|
if (sendReasoning) {
|
6074
7521
|
controller.enqueue(
|
6075
|
-
|
7522
|
+
formatDataStreamPart("redacted_reasoning", {
|
6076
7523
|
data: chunk.data
|
6077
7524
|
})
|
6078
7525
|
);
|
@@ -6082,7 +7529,7 @@ var DefaultStreamTextResult = class {
|
|
6082
7529
|
case "reasoning-signature": {
|
6083
7530
|
if (sendReasoning) {
|
6084
7531
|
controller.enqueue(
|
6085
|
-
|
7532
|
+
formatDataStreamPart("reasoning_signature", {
|
6086
7533
|
signature: chunk.signature
|
6087
7534
|
})
|
6088
7535
|
);
|
@@ -6091,7 +7538,7 @@ var DefaultStreamTextResult = class {
|
|
6091
7538
|
}
|
6092
7539
|
case "file": {
|
6093
7540
|
controller.enqueue(
|
6094
|
-
|
7541
|
+
formatDataStreamPart("file", {
|
6095
7542
|
mimeType: chunk.mediaType,
|
6096
7543
|
data: chunk.base64
|
6097
7544
|
})
|
@@ -6101,14 +7548,14 @@ var DefaultStreamTextResult = class {
|
|
6101
7548
|
case "source": {
|
6102
7549
|
if (sendSources) {
|
6103
7550
|
controller.enqueue(
|
6104
|
-
|
7551
|
+
formatDataStreamPart("source", chunk.source)
|
6105
7552
|
);
|
6106
7553
|
}
|
6107
7554
|
break;
|
6108
7555
|
}
|
6109
7556
|
case "tool-call-streaming-start": {
|
6110
7557
|
controller.enqueue(
|
6111
|
-
|
7558
|
+
formatDataStreamPart("tool_call_streaming_start", {
|
6112
7559
|
toolCallId: chunk.toolCallId,
|
6113
7560
|
toolName: chunk.toolName
|
6114
7561
|
})
|
@@ -6117,7 +7564,7 @@ var DefaultStreamTextResult = class {
|
|
6117
7564
|
}
|
6118
7565
|
case "tool-call-delta": {
|
6119
7566
|
controller.enqueue(
|
6120
|
-
|
7567
|
+
formatDataStreamPart("tool_call_delta", {
|
6121
7568
|
toolCallId: chunk.toolCallId,
|
6122
7569
|
argsTextDelta: chunk.argsTextDelta
|
6123
7570
|
})
|
@@ -6126,7 +7573,7 @@ var DefaultStreamTextResult = class {
|
|
6126
7573
|
}
|
6127
7574
|
case "tool-call": {
|
6128
7575
|
controller.enqueue(
|
6129
|
-
|
7576
|
+
formatDataStreamPart("tool_call", {
|
6130
7577
|
toolCallId: chunk.toolCallId,
|
6131
7578
|
toolName: chunk.toolName,
|
6132
7579
|
args: chunk.args
|
@@ -6136,7 +7583,7 @@ var DefaultStreamTextResult = class {
|
|
6136
7583
|
}
|
6137
7584
|
case "tool-result": {
|
6138
7585
|
controller.enqueue(
|
6139
|
-
|
7586
|
+
formatDataStreamPart("tool_result", {
|
6140
7587
|
toolCallId: chunk.toolCallId,
|
6141
7588
|
result: chunk.result
|
6142
7589
|
})
|
@@ -6145,13 +7592,13 @@ var DefaultStreamTextResult = class {
|
|
6145
7592
|
}
|
6146
7593
|
case "error": {
|
6147
7594
|
controller.enqueue(
|
6148
|
-
|
7595
|
+
formatDataStreamPart("error", getErrorMessage5(chunk.error))
|
6149
7596
|
);
|
6150
7597
|
break;
|
6151
7598
|
}
|
6152
7599
|
case "step-start": {
|
6153
7600
|
controller.enqueue(
|
6154
|
-
|
7601
|
+
formatDataStreamPart("start_step", {
|
6155
7602
|
messageId: chunk.messageId
|
6156
7603
|
})
|
6157
7604
|
);
|
@@ -6159,7 +7606,7 @@ var DefaultStreamTextResult = class {
|
|
6159
7606
|
}
|
6160
7607
|
case "step-finish": {
|
6161
7608
|
controller.enqueue(
|
6162
|
-
|
7609
|
+
formatDataStreamPart("finish_step", {
|
6163
7610
|
finishReason: chunk.finishReason,
|
6164
7611
|
usage: sendUsage ? {
|
6165
7612
|
promptTokens: chunk.usage.promptTokens,
|
@@ -6173,7 +7620,7 @@ var DefaultStreamTextResult = class {
|
|
6173
7620
|
case "finish": {
|
6174
7621
|
if (experimental_sendFinish) {
|
6175
7622
|
controller.enqueue(
|
6176
|
-
|
7623
|
+
formatDataStreamPart("finish_message", {
|
6177
7624
|
finishReason: chunk.finishReason,
|
6178
7625
|
usage: sendUsage ? {
|
6179
7626
|
promptTokens: chunk.usage.promptTokens,
|
@@ -6602,7 +8049,7 @@ function simulateStreamingMiddleware() {
|
|
6602
8049
|
return {
|
6603
8050
|
stream: simulatedStream,
|
6604
8051
|
rawCall: result.rawCall,
|
6605
|
-
rawResponse: result.
|
8052
|
+
rawResponse: result.response,
|
6606
8053
|
warnings: result.warnings
|
6607
8054
|
};
|
6608
8055
|
}
|
@@ -6674,9 +8121,6 @@ function appendClientMessage({
|
|
6674
8121
|
}
|
6675
8122
|
|
6676
8123
|
// core/prompt/append-response-messages.ts
|
6677
|
-
import {
|
6678
|
-
extractMaxToolInvocationStep
|
6679
|
-
} from "@ai-sdk/ui-utils";
|
6680
8124
|
import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
|
6681
8125
|
function appendResponseMessages({
|
6682
8126
|
messages,
|
@@ -7000,9 +8444,6 @@ var DefaultProviderRegistry = class {
|
|
7000
8444
|
}
|
7001
8445
|
};
|
7002
8446
|
|
7003
|
-
// core/tool/mcp/mcp-client.ts
|
7004
|
-
import { jsonSchema } from "@ai-sdk/ui-utils";
|
7005
|
-
|
7006
8447
|
// core/tool/tool.ts
|
7007
8448
|
function tool(tool2) {
|
7008
8449
|
return tool2;
|
@@ -7616,7 +9057,6 @@ __export(langchain_adapter_exports, {
|
|
7616
9057
|
toDataStream: () => toDataStream,
|
7617
9058
|
toDataStreamResponse: () => toDataStreamResponse
|
7618
9059
|
});
|
7619
|
-
import { formatDataStreamPart as formatDataStreamPart4 } from "@ai-sdk/ui-utils";
|
7620
9060
|
|
7621
9061
|
// streams/stream-callbacks.ts
|
7622
9062
|
function createCallbacksTransformer(callbacks = {}) {
|
@@ -7672,7 +9112,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
7672
9112
|
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
|
7673
9113
|
new TransformStream({
|
7674
9114
|
transform: async (chunk, controller) => {
|
7675
|
-
controller.enqueue(
|
9115
|
+
controller.enqueue(formatDataStreamPart("text", chunk));
|
7676
9116
|
}
|
7677
9117
|
})
|
7678
9118
|
);
|
@@ -7724,7 +9164,6 @@ __export(llamaindex_adapter_exports, {
|
|
7724
9164
|
toDataStreamResponse: () => toDataStreamResponse2
|
7725
9165
|
});
|
7726
9166
|
import { convertAsyncIteratorToReadableStream } from "@ai-sdk/provider-utils";
|
7727
|
-
import { formatDataStreamPart as formatDataStreamPart5 } from "@ai-sdk/ui-utils";
|
7728
9167
|
function toDataStreamInternal2(stream, callbacks) {
|
7729
9168
|
const trimStart = trimStartOfStream();
|
7730
9169
|
return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]()).pipeThrough(
|
@@ -7736,7 +9175,7 @@ function toDataStreamInternal2(stream, callbacks) {
|
|
7736
9175
|
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
|
7737
9176
|
new TransformStream({
|
7738
9177
|
transform: async (chunk, controller) => {
|
7739
|
-
controller.enqueue(
|
9178
|
+
controller.enqueue(formatDataStreamPart("text", chunk));
|
7740
9179
|
}
|
7741
9180
|
})
|
7742
9181
|
);
|
@@ -7777,9 +9216,6 @@ function trimStartOfStream() {
|
|
7777
9216
|
};
|
7778
9217
|
}
|
7779
9218
|
|
7780
|
-
// streams/stream-data.ts
|
7781
|
-
import { formatDataStreamPart as formatDataStreamPart6 } from "@ai-sdk/ui-utils";
|
7782
|
-
|
7783
9219
|
// util/constants.ts
|
7784
9220
|
var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
|
7785
9221
|
|
@@ -7830,7 +9266,7 @@ var StreamData = class {
|
|
7830
9266
|
throw new Error("Stream controller is not initialized.");
|
7831
9267
|
}
|
7832
9268
|
this.controller.enqueue(
|
7833
|
-
this.encoder.encode(
|
9269
|
+
this.encoder.encode(formatDataStreamPart("data", [value]))
|
7834
9270
|
);
|
7835
9271
|
}
|
7836
9272
|
appendMessageAnnotation(value) {
|
@@ -7841,7 +9277,7 @@ var StreamData = class {
|
|
7841
9277
|
throw new Error("Stream controller is not initialized.");
|
7842
9278
|
}
|
7843
9279
|
this.controller.enqueue(
|
7844
|
-
this.encoder.encode(
|
9280
|
+
this.encoder.encode(formatDataStreamPart("message_annotations", [value]))
|
7845
9281
|
);
|
7846
9282
|
}
|
7847
9283
|
};
|
@@ -7879,6 +9315,9 @@ export {
|
|
7879
9315
|
UnsupportedFunctionalityError2 as UnsupportedFunctionalityError,
|
7880
9316
|
appendClientMessage,
|
7881
9317
|
appendResponseMessages,
|
9318
|
+
asSchema,
|
9319
|
+
callChatApi,
|
9320
|
+
callCompletionApi,
|
7882
9321
|
convertToCoreMessages,
|
7883
9322
|
coreAssistantMessageSchema,
|
7884
9323
|
coreMessageSchema,
|
@@ -7900,22 +9339,32 @@ export {
|
|
7900
9339
|
generateImage as experimental_generateImage,
|
7901
9340
|
transcribe as experimental_transcribe,
|
7902
9341
|
experimental_wrapLanguageModel,
|
9342
|
+
extractMaxToolInvocationStep,
|
7903
9343
|
extractReasoningMiddleware,
|
7904
|
-
|
9344
|
+
fillMessageParts,
|
9345
|
+
formatDataStreamPart,
|
7905
9346
|
generateId2 as generateId,
|
7906
9347
|
generateObject,
|
7907
9348
|
generateText,
|
7908
|
-
|
9349
|
+
getMessageParts,
|
9350
|
+
getTextFromDataUrl,
|
9351
|
+
isAssistantMessageWithCompletedToolCalls,
|
9352
|
+
isDeepEqualData,
|
9353
|
+
jsonSchema,
|
7909
9354
|
parseDataStreamPart,
|
9355
|
+
parsePartialJson,
|
7910
9356
|
pipeDataStreamToResponse,
|
9357
|
+
prepareAttachmentsForRequest,
|
7911
9358
|
processDataStream,
|
7912
9359
|
processTextStream,
|
9360
|
+
shouldResubmitMessages,
|
7913
9361
|
simulateReadableStream,
|
7914
9362
|
simulateStreamingMiddleware,
|
7915
9363
|
smoothStream,
|
7916
9364
|
streamObject,
|
7917
9365
|
streamText,
|
7918
9366
|
tool,
|
9367
|
+
updateToolCallResult,
|
7919
9368
|
wrapLanguageModel,
|
7920
9369
|
zodSchema
|
7921
9370
|
};
|