ai 3.0.26 → 3.0.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -277,13 +277,6 @@ function prepareCallSettings({
277
277
  message: "temperature must be a number"
278
278
  });
279
279
  }
280
- if (temperature < 0 || temperature > 1) {
281
- throw new import_provider3.InvalidArgumentError({
282
- parameter: "temperature",
283
- value: temperature,
284
- message: "temperature must be between 0 and 1 (inclusive)"
285
- });
286
- }
287
280
  }
288
281
  if (topP != null) {
289
282
  if (typeof topP !== "number") {
@@ -293,13 +286,6 @@ function prepareCallSettings({
293
286
  message: "topP must be a number"
294
287
  });
295
288
  }
296
- if (topP < 0 || topP > 1) {
297
- throw new import_provider3.InvalidArgumentError({
298
- parameter: "topP",
299
- value: topP,
300
- message: "topP must be between 0 and 1 (inclusive)"
301
- });
302
- }
303
289
  }
304
290
  if (presencePenalty != null) {
305
291
  if (typeof presencePenalty !== "number") {
@@ -309,13 +295,6 @@ function prepareCallSettings({
309
295
  message: "presencePenalty must be a number"
310
296
  });
311
297
  }
312
- if (presencePenalty < -1 || presencePenalty > 1) {
313
- throw new import_provider3.InvalidArgumentError({
314
- parameter: "presencePenalty",
315
- value: presencePenalty,
316
- message: "presencePenalty must be between -1 and 1 (inclusive)"
317
- });
318
- }
319
298
  }
320
299
  if (frequencyPenalty != null) {
321
300
  if (typeof frequencyPenalty !== "number") {
@@ -325,13 +304,6 @@ function prepareCallSettings({
325
304
  message: "frequencyPenalty must be a number"
326
305
  });
327
306
  }
328
- if (frequencyPenalty < -1 || frequencyPenalty > 1) {
329
- throw new import_provider3.InvalidArgumentError({
330
- parameter: "frequencyPenalty",
331
- value: frequencyPenalty,
332
- message: "frequencyPenalty must be between -1 and 1 (inclusive)"
333
- });
334
- }
335
307
  }
336
308
  if (seed != null) {
337
309
  if (!Number.isInteger(seed)) {
@@ -478,6 +450,8 @@ async function experimental_generateObject({
478
450
  let finishReason;
479
451
  let usage;
480
452
  let warnings;
453
+ let rawResponse;
454
+ let logprobs;
481
455
  switch (mode) {
482
456
  case "json": {
483
457
  const validatedPrompt = getValidatedPrompt({
@@ -501,6 +475,8 @@ async function experimental_generateObject({
501
475
  finishReason = generateResult.finishReason;
502
476
  usage = generateResult.usage;
503
477
  warnings = generateResult.warnings;
478
+ rawResponse = generateResult.rawResponse;
479
+ logprobs = generateResult.logprobs;
504
480
  break;
505
481
  }
506
482
  case "grammar": {
@@ -525,6 +501,8 @@ async function experimental_generateObject({
525
501
  finishReason = generateResult.finishReason;
526
502
  usage = generateResult.usage;
527
503
  warnings = generateResult.warnings;
504
+ rawResponse = generateResult.rawResponse;
505
+ logprobs = generateResult.logprobs;
528
506
  break;
529
507
  }
530
508
  case "tool": {
@@ -558,6 +536,8 @@ async function experimental_generateObject({
558
536
  finishReason = generateResult.finishReason;
559
537
  usage = generateResult.usage;
560
538
  warnings = generateResult.warnings;
539
+ rawResponse = generateResult.rawResponse;
540
+ logprobs = generateResult.logprobs;
561
541
  break;
562
542
  }
563
543
  case void 0: {
@@ -576,7 +556,9 @@ async function experimental_generateObject({
576
556
  object: parseResult.value,
577
557
  finishReason,
578
558
  usage: calculateTokenUsage(usage),
579
- warnings
559
+ warnings,
560
+ rawResponse,
561
+ logprobs
580
562
  });
581
563
  }
582
564
  var GenerateObjectResult = class {
@@ -585,6 +567,8 @@ var GenerateObjectResult = class {
585
567
  this.finishReason = options.finishReason;
586
568
  this.usage = options.usage;
587
569
  this.warnings = options.warnings;
570
+ this.rawResponse = options.rawResponse;
571
+ this.logprobs = options.logprobs;
588
572
  }
589
573
  };
590
574
 
@@ -1016,6 +1000,7 @@ async function experimental_streamObject({
1016
1000
  case "text-delta":
1017
1001
  controller.enqueue(chunk.textDelta);
1018
1002
  break;
1003
+ case "finish":
1019
1004
  case "error":
1020
1005
  controller.enqueue(chunk);
1021
1006
  break;
@@ -1043,6 +1028,7 @@ async function experimental_streamObject({
1043
1028
  case "text-delta":
1044
1029
  controller.enqueue(chunk.textDelta);
1045
1030
  break;
1031
+ case "finish":
1046
1032
  case "error":
1047
1033
  controller.enqueue(chunk);
1048
1034
  break;
@@ -1078,6 +1064,7 @@ async function experimental_streamObject({
1078
1064
  case "tool-call-delta":
1079
1065
  controller.enqueue(chunk.argsTextDelta);
1080
1066
  break;
1067
+ case "finish":
1081
1068
  case "error":
1082
1069
  controller.enqueue(chunk);
1083
1070
  break;
@@ -1097,16 +1084,19 @@ async function experimental_streamObject({
1097
1084
  const result = await retry(() => model.doStream(callOptions));
1098
1085
  return new StreamObjectResult({
1099
1086
  stream: result.stream.pipeThrough(new TransformStream(transformer)),
1100
- warnings: result.warnings
1087
+ warnings: result.warnings,
1088
+ rawResponse: result.rawResponse
1101
1089
  });
1102
1090
  }
1103
1091
  var StreamObjectResult = class {
1104
1092
  constructor({
1105
1093
  stream,
1106
- warnings
1094
+ warnings,
1095
+ rawResponse
1107
1096
  }) {
1108
1097
  this.originalStream = stream;
1109
1098
  this.warnings = warnings;
1099
+ this.rawResponse = rawResponse;
1110
1100
  }
1111
1101
  get partialObjectStream() {
1112
1102
  let accumulatedText = "";
@@ -1122,13 +1112,42 @@ var StreamObjectResult = class {
1122
1112
  latestObject = currentObject;
1123
1113
  controller.enqueue(currentObject);
1124
1114
  }
1125
- }
1126
- if (typeof chunk === "object" && chunk.type === "error") {
1115
+ } else if (chunk.type === "error") {
1127
1116
  throw chunk.error;
1128
1117
  }
1129
1118
  }
1130
1119
  });
1131
1120
  }
1121
+ get fullStream() {
1122
+ let accumulatedText = "";
1123
+ let latestObject = void 0;
1124
+ return createAsyncIterableStream(this.originalStream, {
1125
+ transform(chunk, controller) {
1126
+ if (typeof chunk === "string") {
1127
+ accumulatedText += chunk;
1128
+ const currentObject = parsePartialJson(
1129
+ accumulatedText
1130
+ );
1131
+ if (!isDeepEqualData(latestObject, currentObject)) {
1132
+ latestObject = currentObject;
1133
+ controller.enqueue({ type: "object", object: currentObject });
1134
+ }
1135
+ } else {
1136
+ switch (chunk.type) {
1137
+ case "finish":
1138
+ controller.enqueue({
1139
+ ...chunk,
1140
+ usage: calculateTokenUsage(chunk.usage)
1141
+ });
1142
+ break;
1143
+ default:
1144
+ controller.enqueue(chunk);
1145
+ break;
1146
+ }
1147
+ }
1148
+ }
1149
+ });
1150
+ }
1132
1151
  };
1133
1152
 
1134
1153
  // core/generate-text/tool-call.ts
@@ -1213,7 +1232,9 @@ async function experimental_generateText({
1213
1232
  toolResults,
1214
1233
  finishReason: modelResponse.finishReason,
1215
1234
  usage: calculateTokenUsage(modelResponse.usage),
1216
- warnings: modelResponse.warnings
1235
+ warnings: modelResponse.warnings,
1236
+ rawResponse: modelResponse.rawResponse,
1237
+ logprobs: modelResponse.logprobs
1217
1238
  });
1218
1239
  }
1219
1240
  async function executeTools({
@@ -1247,6 +1268,8 @@ var GenerateTextResult = class {
1247
1268
  this.finishReason = options.finishReason;
1248
1269
  this.usage = options.usage;
1249
1270
  this.warnings = options.warnings;
1271
+ this.rawResponse = options.rawResponse;
1272
+ this.logprobs = options.logprobs;
1250
1273
  }
1251
1274
  };
1252
1275
 
@@ -1347,11 +1370,8 @@ function runToolsTransformation({
1347
1370
  controller.enqueue({
1348
1371
  type: "finish",
1349
1372
  finishReason: chunk.finishReason,
1350
- usage: {
1351
- promptTokens: chunk.usage.promptTokens,
1352
- completionTokens: chunk.usage.completionTokens,
1353
- totalTokens: chunk.usage.promptTokens + chunk.usage.completionTokens
1354
- }
1373
+ logprobs: chunk.logprobs,
1374
+ usage: calculateTokenUsage(chunk.usage)
1355
1375
  });
1356
1376
  break;
1357
1377
  }
@@ -1409,7 +1429,7 @@ async function experimental_streamText({
1409
1429
  }) {
1410
1430
  const retry = retryWithExponentialBackoff({ maxRetries });
1411
1431
  const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
1412
- const { stream, warnings } = await retry(
1432
+ const { stream, warnings, rawResponse } = await retry(
1413
1433
  () => model.doStream({
1414
1434
  mode: {
1415
1435
  type: "regular",
@@ -1431,16 +1451,19 @@ async function experimental_streamText({
1431
1451
  tools,
1432
1452
  generatorStream: stream
1433
1453
  }),
1434
- warnings
1454
+ warnings,
1455
+ rawResponse
1435
1456
  });
1436
1457
  }
1437
1458
  var StreamTextResult = class {
1438
1459
  constructor({
1439
1460
  stream,
1440
- warnings
1461
+ warnings,
1462
+ rawResponse
1441
1463
  }) {
1442
1464
  this.originalStream = stream;
1443
1465
  this.warnings = warnings;
1466
+ this.rawResponse = rawResponse;
1444
1467
  }
1445
1468
  /**
1446
1469
  A text stream that returns only the generated text deltas. You can use it