ai 3.0.27 → 3.0.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +84 -9
- package/dist/index.d.ts +84 -9
- package/dist/index.js +65 -14
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +65 -14
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
- package/react/dist/index.js +1 -0
- package/react/dist/index.js.map +1 -1
- package/react/dist/index.mjs +1 -0
- package/react/dist/index.mjs.map +1 -1
package/dist/index.mjs
CHANGED
@@ -375,6 +375,8 @@ async function experimental_generateObject({
|
|
375
375
|
let finishReason;
|
376
376
|
let usage;
|
377
377
|
let warnings;
|
378
|
+
let rawResponse;
|
379
|
+
let logprobs;
|
378
380
|
switch (mode) {
|
379
381
|
case "json": {
|
380
382
|
const validatedPrompt = getValidatedPrompt({
|
@@ -398,6 +400,8 @@ async function experimental_generateObject({
|
|
398
400
|
finishReason = generateResult.finishReason;
|
399
401
|
usage = generateResult.usage;
|
400
402
|
warnings = generateResult.warnings;
|
403
|
+
rawResponse = generateResult.rawResponse;
|
404
|
+
logprobs = generateResult.logprobs;
|
401
405
|
break;
|
402
406
|
}
|
403
407
|
case "grammar": {
|
@@ -422,6 +426,8 @@ async function experimental_generateObject({
|
|
422
426
|
finishReason = generateResult.finishReason;
|
423
427
|
usage = generateResult.usage;
|
424
428
|
warnings = generateResult.warnings;
|
429
|
+
rawResponse = generateResult.rawResponse;
|
430
|
+
logprobs = generateResult.logprobs;
|
425
431
|
break;
|
426
432
|
}
|
427
433
|
case "tool": {
|
@@ -455,6 +461,8 @@ async function experimental_generateObject({
|
|
455
461
|
finishReason = generateResult.finishReason;
|
456
462
|
usage = generateResult.usage;
|
457
463
|
warnings = generateResult.warnings;
|
464
|
+
rawResponse = generateResult.rawResponse;
|
465
|
+
logprobs = generateResult.logprobs;
|
458
466
|
break;
|
459
467
|
}
|
460
468
|
case void 0: {
|
@@ -473,7 +481,9 @@ async function experimental_generateObject({
|
|
473
481
|
object: parseResult.value,
|
474
482
|
finishReason,
|
475
483
|
usage: calculateTokenUsage(usage),
|
476
|
-
warnings
|
484
|
+
warnings,
|
485
|
+
rawResponse,
|
486
|
+
logprobs
|
477
487
|
});
|
478
488
|
}
|
479
489
|
var GenerateObjectResult = class {
|
@@ -482,6 +492,8 @@ var GenerateObjectResult = class {
|
|
482
492
|
this.finishReason = options.finishReason;
|
483
493
|
this.usage = options.usage;
|
484
494
|
this.warnings = options.warnings;
|
495
|
+
this.rawResponse = options.rawResponse;
|
496
|
+
this.logprobs = options.logprobs;
|
485
497
|
}
|
486
498
|
};
|
487
499
|
|
@@ -913,6 +925,7 @@ async function experimental_streamObject({
|
|
913
925
|
case "text-delta":
|
914
926
|
controller.enqueue(chunk.textDelta);
|
915
927
|
break;
|
928
|
+
case "finish":
|
916
929
|
case "error":
|
917
930
|
controller.enqueue(chunk);
|
918
931
|
break;
|
@@ -940,6 +953,7 @@ async function experimental_streamObject({
|
|
940
953
|
case "text-delta":
|
941
954
|
controller.enqueue(chunk.textDelta);
|
942
955
|
break;
|
956
|
+
case "finish":
|
943
957
|
case "error":
|
944
958
|
controller.enqueue(chunk);
|
945
959
|
break;
|
@@ -975,6 +989,7 @@ async function experimental_streamObject({
|
|
975
989
|
case "tool-call-delta":
|
976
990
|
controller.enqueue(chunk.argsTextDelta);
|
977
991
|
break;
|
992
|
+
case "finish":
|
978
993
|
case "error":
|
979
994
|
controller.enqueue(chunk);
|
980
995
|
break;
|
@@ -994,16 +1009,19 @@ async function experimental_streamObject({
|
|
994
1009
|
const result = await retry(() => model.doStream(callOptions));
|
995
1010
|
return new StreamObjectResult({
|
996
1011
|
stream: result.stream.pipeThrough(new TransformStream(transformer)),
|
997
|
-
warnings: result.warnings
|
1012
|
+
warnings: result.warnings,
|
1013
|
+
rawResponse: result.rawResponse
|
998
1014
|
});
|
999
1015
|
}
|
1000
1016
|
var StreamObjectResult = class {
|
1001
1017
|
constructor({
|
1002
1018
|
stream,
|
1003
|
-
warnings
|
1019
|
+
warnings,
|
1020
|
+
rawResponse
|
1004
1021
|
}) {
|
1005
1022
|
this.originalStream = stream;
|
1006
1023
|
this.warnings = warnings;
|
1024
|
+
this.rawResponse = rawResponse;
|
1007
1025
|
}
|
1008
1026
|
get partialObjectStream() {
|
1009
1027
|
let accumulatedText = "";
|
@@ -1019,13 +1037,42 @@ var StreamObjectResult = class {
|
|
1019
1037
|
latestObject = currentObject;
|
1020
1038
|
controller.enqueue(currentObject);
|
1021
1039
|
}
|
1022
|
-
}
|
1023
|
-
if (typeof chunk === "object" && chunk.type === "error") {
|
1040
|
+
} else if (chunk.type === "error") {
|
1024
1041
|
throw chunk.error;
|
1025
1042
|
}
|
1026
1043
|
}
|
1027
1044
|
});
|
1028
1045
|
}
|
1046
|
+
get fullStream() {
|
1047
|
+
let accumulatedText = "";
|
1048
|
+
let latestObject = void 0;
|
1049
|
+
return createAsyncIterableStream(this.originalStream, {
|
1050
|
+
transform(chunk, controller) {
|
1051
|
+
if (typeof chunk === "string") {
|
1052
|
+
accumulatedText += chunk;
|
1053
|
+
const currentObject = parsePartialJson(
|
1054
|
+
accumulatedText
|
1055
|
+
);
|
1056
|
+
if (!isDeepEqualData(latestObject, currentObject)) {
|
1057
|
+
latestObject = currentObject;
|
1058
|
+
controller.enqueue({ type: "object", object: currentObject });
|
1059
|
+
}
|
1060
|
+
} else {
|
1061
|
+
switch (chunk.type) {
|
1062
|
+
case "finish":
|
1063
|
+
controller.enqueue({
|
1064
|
+
...chunk,
|
1065
|
+
usage: calculateTokenUsage(chunk.usage)
|
1066
|
+
});
|
1067
|
+
break;
|
1068
|
+
default:
|
1069
|
+
controller.enqueue(chunk);
|
1070
|
+
break;
|
1071
|
+
}
|
1072
|
+
}
|
1073
|
+
}
|
1074
|
+
});
|
1075
|
+
}
|
1029
1076
|
};
|
1030
1077
|
|
1031
1078
|
// core/generate-text/tool-call.ts
|
@@ -1113,7 +1160,9 @@ async function experimental_generateText({
|
|
1113
1160
|
toolResults,
|
1114
1161
|
finishReason: modelResponse.finishReason,
|
1115
1162
|
usage: calculateTokenUsage(modelResponse.usage),
|
1116
|
-
warnings: modelResponse.warnings
|
1163
|
+
warnings: modelResponse.warnings,
|
1164
|
+
rawResponse: modelResponse.rawResponse,
|
1165
|
+
logprobs: modelResponse.logprobs
|
1117
1166
|
});
|
1118
1167
|
}
|
1119
1168
|
async function executeTools({
|
@@ -1147,6 +1196,8 @@ var GenerateTextResult = class {
|
|
1147
1196
|
this.finishReason = options.finishReason;
|
1148
1197
|
this.usage = options.usage;
|
1149
1198
|
this.warnings = options.warnings;
|
1199
|
+
this.rawResponse = options.rawResponse;
|
1200
|
+
this.logprobs = options.logprobs;
|
1150
1201
|
}
|
1151
1202
|
};
|
1152
1203
|
|
@@ -1247,11 +1298,8 @@ function runToolsTransformation({
|
|
1247
1298
|
controller.enqueue({
|
1248
1299
|
type: "finish",
|
1249
1300
|
finishReason: chunk.finishReason,
|
1250
|
-
|
1251
|
-
|
1252
|
-
completionTokens: chunk.usage.completionTokens,
|
1253
|
-
totalTokens: chunk.usage.promptTokens + chunk.usage.completionTokens
|
1254
|
-
}
|
1301
|
+
logprobs: chunk.logprobs,
|
1302
|
+
usage: calculateTokenUsage(chunk.usage)
|
1255
1303
|
});
|
1256
1304
|
break;
|
1257
1305
|
}
|
@@ -1309,7 +1357,7 @@ async function experimental_streamText({
|
|
1309
1357
|
}) {
|
1310
1358
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
1311
1359
|
const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
|
1312
|
-
const { stream, warnings } = await retry(
|
1360
|
+
const { stream, warnings, rawResponse } = await retry(
|
1313
1361
|
() => model.doStream({
|
1314
1362
|
mode: {
|
1315
1363
|
type: "regular",
|
@@ -1331,16 +1379,19 @@ async function experimental_streamText({
|
|
1331
1379
|
tools,
|
1332
1380
|
generatorStream: stream
|
1333
1381
|
}),
|
1334
|
-
warnings
|
1382
|
+
warnings,
|
1383
|
+
rawResponse
|
1335
1384
|
});
|
1336
1385
|
}
|
1337
1386
|
var StreamTextResult = class {
|
1338
1387
|
constructor({
|
1339
1388
|
stream,
|
1340
|
-
warnings
|
1389
|
+
warnings,
|
1390
|
+
rawResponse
|
1341
1391
|
}) {
|
1342
1392
|
this.originalStream = stream;
|
1343
1393
|
this.warnings = warnings;
|
1394
|
+
this.rawResponse = rawResponse;
|
1344
1395
|
}
|
1345
1396
|
/**
|
1346
1397
|
A text stream that returns only the generated text deltas. You can use it
|