ai 3.0.26 → 3.0.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +104 -37
- package/dist/index.d.ts +104 -37
- package/dist/index.js +65 -42
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +65 -42
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
@@ -202,13 +202,6 @@ function prepareCallSettings({
|
|
202
202
|
message: "temperature must be a number"
|
203
203
|
});
|
204
204
|
}
|
205
|
-
if (temperature < 0 || temperature > 1) {
|
206
|
-
throw new InvalidArgumentError({
|
207
|
-
parameter: "temperature",
|
208
|
-
value: temperature,
|
209
|
-
message: "temperature must be between 0 and 1 (inclusive)"
|
210
|
-
});
|
211
|
-
}
|
212
205
|
}
|
213
206
|
if (topP != null) {
|
214
207
|
if (typeof topP !== "number") {
|
@@ -218,13 +211,6 @@ function prepareCallSettings({
|
|
218
211
|
message: "topP must be a number"
|
219
212
|
});
|
220
213
|
}
|
221
|
-
if (topP < 0 || topP > 1) {
|
222
|
-
throw new InvalidArgumentError({
|
223
|
-
parameter: "topP",
|
224
|
-
value: topP,
|
225
|
-
message: "topP must be between 0 and 1 (inclusive)"
|
226
|
-
});
|
227
|
-
}
|
228
214
|
}
|
229
215
|
if (presencePenalty != null) {
|
230
216
|
if (typeof presencePenalty !== "number") {
|
@@ -234,13 +220,6 @@ function prepareCallSettings({
|
|
234
220
|
message: "presencePenalty must be a number"
|
235
221
|
});
|
236
222
|
}
|
237
|
-
if (presencePenalty < -1 || presencePenalty > 1) {
|
238
|
-
throw new InvalidArgumentError({
|
239
|
-
parameter: "presencePenalty",
|
240
|
-
value: presencePenalty,
|
241
|
-
message: "presencePenalty must be between -1 and 1 (inclusive)"
|
242
|
-
});
|
243
|
-
}
|
244
223
|
}
|
245
224
|
if (frequencyPenalty != null) {
|
246
225
|
if (typeof frequencyPenalty !== "number") {
|
@@ -250,13 +229,6 @@ function prepareCallSettings({
|
|
250
229
|
message: "frequencyPenalty must be a number"
|
251
230
|
});
|
252
231
|
}
|
253
|
-
if (frequencyPenalty < -1 || frequencyPenalty > 1) {
|
254
|
-
throw new InvalidArgumentError({
|
255
|
-
parameter: "frequencyPenalty",
|
256
|
-
value: frequencyPenalty,
|
257
|
-
message: "frequencyPenalty must be between -1 and 1 (inclusive)"
|
258
|
-
});
|
259
|
-
}
|
260
232
|
}
|
261
233
|
if (seed != null) {
|
262
234
|
if (!Number.isInteger(seed)) {
|
@@ -403,6 +375,8 @@ async function experimental_generateObject({
|
|
403
375
|
let finishReason;
|
404
376
|
let usage;
|
405
377
|
let warnings;
|
378
|
+
let rawResponse;
|
379
|
+
let logprobs;
|
406
380
|
switch (mode) {
|
407
381
|
case "json": {
|
408
382
|
const validatedPrompt = getValidatedPrompt({
|
@@ -426,6 +400,8 @@ async function experimental_generateObject({
|
|
426
400
|
finishReason = generateResult.finishReason;
|
427
401
|
usage = generateResult.usage;
|
428
402
|
warnings = generateResult.warnings;
|
403
|
+
rawResponse = generateResult.rawResponse;
|
404
|
+
logprobs = generateResult.logprobs;
|
429
405
|
break;
|
430
406
|
}
|
431
407
|
case "grammar": {
|
@@ -450,6 +426,8 @@ async function experimental_generateObject({
|
|
450
426
|
finishReason = generateResult.finishReason;
|
451
427
|
usage = generateResult.usage;
|
452
428
|
warnings = generateResult.warnings;
|
429
|
+
rawResponse = generateResult.rawResponse;
|
430
|
+
logprobs = generateResult.logprobs;
|
453
431
|
break;
|
454
432
|
}
|
455
433
|
case "tool": {
|
@@ -483,6 +461,8 @@ async function experimental_generateObject({
|
|
483
461
|
finishReason = generateResult.finishReason;
|
484
462
|
usage = generateResult.usage;
|
485
463
|
warnings = generateResult.warnings;
|
464
|
+
rawResponse = generateResult.rawResponse;
|
465
|
+
logprobs = generateResult.logprobs;
|
486
466
|
break;
|
487
467
|
}
|
488
468
|
case void 0: {
|
@@ -501,7 +481,9 @@ async function experimental_generateObject({
|
|
501
481
|
object: parseResult.value,
|
502
482
|
finishReason,
|
503
483
|
usage: calculateTokenUsage(usage),
|
504
|
-
warnings
|
484
|
+
warnings,
|
485
|
+
rawResponse,
|
486
|
+
logprobs
|
505
487
|
});
|
506
488
|
}
|
507
489
|
var GenerateObjectResult = class {
|
@@ -510,6 +492,8 @@ var GenerateObjectResult = class {
|
|
510
492
|
this.finishReason = options.finishReason;
|
511
493
|
this.usage = options.usage;
|
512
494
|
this.warnings = options.warnings;
|
495
|
+
this.rawResponse = options.rawResponse;
|
496
|
+
this.logprobs = options.logprobs;
|
513
497
|
}
|
514
498
|
};
|
515
499
|
|
@@ -941,6 +925,7 @@ async function experimental_streamObject({
|
|
941
925
|
case "text-delta":
|
942
926
|
controller.enqueue(chunk.textDelta);
|
943
927
|
break;
|
928
|
+
case "finish":
|
944
929
|
case "error":
|
945
930
|
controller.enqueue(chunk);
|
946
931
|
break;
|
@@ -968,6 +953,7 @@ async function experimental_streamObject({
|
|
968
953
|
case "text-delta":
|
969
954
|
controller.enqueue(chunk.textDelta);
|
970
955
|
break;
|
956
|
+
case "finish":
|
971
957
|
case "error":
|
972
958
|
controller.enqueue(chunk);
|
973
959
|
break;
|
@@ -1003,6 +989,7 @@ async function experimental_streamObject({
|
|
1003
989
|
case "tool-call-delta":
|
1004
990
|
controller.enqueue(chunk.argsTextDelta);
|
1005
991
|
break;
|
992
|
+
case "finish":
|
1006
993
|
case "error":
|
1007
994
|
controller.enqueue(chunk);
|
1008
995
|
break;
|
@@ -1022,16 +1009,19 @@ async function experimental_streamObject({
|
|
1022
1009
|
const result = await retry(() => model.doStream(callOptions));
|
1023
1010
|
return new StreamObjectResult({
|
1024
1011
|
stream: result.stream.pipeThrough(new TransformStream(transformer)),
|
1025
|
-
warnings: result.warnings
|
1012
|
+
warnings: result.warnings,
|
1013
|
+
rawResponse: result.rawResponse
|
1026
1014
|
});
|
1027
1015
|
}
|
1028
1016
|
var StreamObjectResult = class {
|
1029
1017
|
constructor({
|
1030
1018
|
stream,
|
1031
|
-
warnings
|
1019
|
+
warnings,
|
1020
|
+
rawResponse
|
1032
1021
|
}) {
|
1033
1022
|
this.originalStream = stream;
|
1034
1023
|
this.warnings = warnings;
|
1024
|
+
this.rawResponse = rawResponse;
|
1035
1025
|
}
|
1036
1026
|
get partialObjectStream() {
|
1037
1027
|
let accumulatedText = "";
|
@@ -1047,13 +1037,42 @@ var StreamObjectResult = class {
|
|
1047
1037
|
latestObject = currentObject;
|
1048
1038
|
controller.enqueue(currentObject);
|
1049
1039
|
}
|
1050
|
-
}
|
1051
|
-
if (typeof chunk === "object" && chunk.type === "error") {
|
1040
|
+
} else if (chunk.type === "error") {
|
1052
1041
|
throw chunk.error;
|
1053
1042
|
}
|
1054
1043
|
}
|
1055
1044
|
});
|
1056
1045
|
}
|
1046
|
+
get fullStream() {
|
1047
|
+
let accumulatedText = "";
|
1048
|
+
let latestObject = void 0;
|
1049
|
+
return createAsyncIterableStream(this.originalStream, {
|
1050
|
+
transform(chunk, controller) {
|
1051
|
+
if (typeof chunk === "string") {
|
1052
|
+
accumulatedText += chunk;
|
1053
|
+
const currentObject = parsePartialJson(
|
1054
|
+
accumulatedText
|
1055
|
+
);
|
1056
|
+
if (!isDeepEqualData(latestObject, currentObject)) {
|
1057
|
+
latestObject = currentObject;
|
1058
|
+
controller.enqueue({ type: "object", object: currentObject });
|
1059
|
+
}
|
1060
|
+
} else {
|
1061
|
+
switch (chunk.type) {
|
1062
|
+
case "finish":
|
1063
|
+
controller.enqueue({
|
1064
|
+
...chunk,
|
1065
|
+
usage: calculateTokenUsage(chunk.usage)
|
1066
|
+
});
|
1067
|
+
break;
|
1068
|
+
default:
|
1069
|
+
controller.enqueue(chunk);
|
1070
|
+
break;
|
1071
|
+
}
|
1072
|
+
}
|
1073
|
+
}
|
1074
|
+
});
|
1075
|
+
}
|
1057
1076
|
};
|
1058
1077
|
|
1059
1078
|
// core/generate-text/tool-call.ts
|
@@ -1141,7 +1160,9 @@ async function experimental_generateText({
|
|
1141
1160
|
toolResults,
|
1142
1161
|
finishReason: modelResponse.finishReason,
|
1143
1162
|
usage: calculateTokenUsage(modelResponse.usage),
|
1144
|
-
warnings: modelResponse.warnings
|
1163
|
+
warnings: modelResponse.warnings,
|
1164
|
+
rawResponse: modelResponse.rawResponse,
|
1165
|
+
logprobs: modelResponse.logprobs
|
1145
1166
|
});
|
1146
1167
|
}
|
1147
1168
|
async function executeTools({
|
@@ -1175,6 +1196,8 @@ var GenerateTextResult = class {
|
|
1175
1196
|
this.finishReason = options.finishReason;
|
1176
1197
|
this.usage = options.usage;
|
1177
1198
|
this.warnings = options.warnings;
|
1199
|
+
this.rawResponse = options.rawResponse;
|
1200
|
+
this.logprobs = options.logprobs;
|
1178
1201
|
}
|
1179
1202
|
};
|
1180
1203
|
|
@@ -1275,11 +1298,8 @@ function runToolsTransformation({
|
|
1275
1298
|
controller.enqueue({
|
1276
1299
|
type: "finish",
|
1277
1300
|
finishReason: chunk.finishReason,
|
1278
|
-
|
1279
|
-
|
1280
|
-
completionTokens: chunk.usage.completionTokens,
|
1281
|
-
totalTokens: chunk.usage.promptTokens + chunk.usage.completionTokens
|
1282
|
-
}
|
1301
|
+
logprobs: chunk.logprobs,
|
1302
|
+
usage: calculateTokenUsage(chunk.usage)
|
1283
1303
|
});
|
1284
1304
|
break;
|
1285
1305
|
}
|
@@ -1337,7 +1357,7 @@ async function experimental_streamText({
|
|
1337
1357
|
}) {
|
1338
1358
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
1339
1359
|
const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
|
1340
|
-
const { stream, warnings } = await retry(
|
1360
|
+
const { stream, warnings, rawResponse } = await retry(
|
1341
1361
|
() => model.doStream({
|
1342
1362
|
mode: {
|
1343
1363
|
type: "regular",
|
@@ -1359,16 +1379,19 @@ async function experimental_streamText({
|
|
1359
1379
|
tools,
|
1360
1380
|
generatorStream: stream
|
1361
1381
|
}),
|
1362
|
-
warnings
|
1382
|
+
warnings,
|
1383
|
+
rawResponse
|
1363
1384
|
});
|
1364
1385
|
}
|
1365
1386
|
var StreamTextResult = class {
|
1366
1387
|
constructor({
|
1367
1388
|
stream,
|
1368
|
-
warnings
|
1389
|
+
warnings,
|
1390
|
+
rawResponse
|
1369
1391
|
}) {
|
1370
1392
|
this.originalStream = stream;
|
1371
1393
|
this.warnings = warnings;
|
1394
|
+
this.rawResponse = rawResponse;
|
1372
1395
|
}
|
1373
1396
|
/**
|
1374
1397
|
A text stream that returns only the generated text deltas. You can use it
|