ai 4.2.10 → 4.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,24 @@
1
1
  # ai
2
2
 
3
+ ## 4.3.0
4
+
5
+ ### Minor Changes
6
+
7
+ - 772a2d7: feat (core): Add finishReason field to NoObjectGeneratedError
8
+
9
+ ### Patch Changes
10
+
11
+ - Updated dependencies [2c19b9a]
12
+ - @ai-sdk/provider-utils@2.2.4
13
+ - @ai-sdk/react@1.2.6
14
+ - @ai-sdk/ui-utils@1.2.5
15
+
16
+ ## 4.2.11
17
+
18
+ ### Patch Changes
19
+
20
+ - c45d100: fix (core): send buffered text in smooth stream when stream parts change
21
+
3
22
  ## 4.2.10
4
23
 
5
24
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -2254,6 +2254,7 @@ interface Output<OUTPUT, PARTIAL> {
2254
2254
  }, context: {
2255
2255
  response: LanguageModelResponseMetadata;
2256
2256
  usage: LanguageModelUsage;
2257
+ finishReason: FinishReason;
2257
2258
  }): OUTPUT;
2258
2259
  }
2259
2260
  declare const text: () => Output<string, string>;
@@ -4114,12 +4115,17 @@ declare class NoObjectGeneratedError extends AISDKError {
4114
4115
  The usage of the model.
4115
4116
  */
4116
4117
  readonly usage: LanguageModelUsage | undefined;
4117
- constructor({ message, cause, text, response, usage, }: {
4118
+ /**
4119
+ Reason why the model finished generating a response.
4120
+ */
4121
+ readonly finishReason: FinishReason | undefined;
4122
+ constructor({ message, cause, text, response, usage, finishReason, }: {
4118
4123
  message?: string;
4119
4124
  cause?: Error;
4120
4125
  text?: string;
4121
4126
  response: LanguageModelResponseMetadata;
4122
4127
  usage: LanguageModelUsage;
4128
+ finishReason: FinishReason;
4123
4129
  });
4124
4130
  static isInstance(error: unknown): error is NoObjectGeneratedError;
4125
4131
  }
package/dist/index.d.ts CHANGED
@@ -2254,6 +2254,7 @@ interface Output<OUTPUT, PARTIAL> {
2254
2254
  }, context: {
2255
2255
  response: LanguageModelResponseMetadata;
2256
2256
  usage: LanguageModelUsage;
2257
+ finishReason: FinishReason;
2257
2258
  }): OUTPUT;
2258
2259
  }
2259
2260
  declare const text: () => Output<string, string>;
@@ -4114,12 +4115,17 @@ declare class NoObjectGeneratedError extends AISDKError {
4114
4115
  The usage of the model.
4115
4116
  */
4116
4117
  readonly usage: LanguageModelUsage | undefined;
4117
- constructor({ message, cause, text, response, usage, }: {
4118
+ /**
4119
+ Reason why the model finished generating a response.
4120
+ */
4121
+ readonly finishReason: FinishReason | undefined;
4122
+ constructor({ message, cause, text, response, usage, finishReason, }: {
4118
4123
  message?: string;
4119
4124
  cause?: Error;
4120
4125
  text?: string;
4121
4126
  response: LanguageModelResponseMetadata;
4122
4127
  usage: LanguageModelUsage;
4128
+ finishReason: FinishReason;
4123
4129
  });
4124
4130
  static isInstance(error: unknown): error is NoObjectGeneratedError;
4125
4131
  }
package/dist/index.js CHANGED
@@ -1115,13 +1115,15 @@ var NoObjectGeneratedError = class extends import_provider5.AISDKError {
1115
1115
  cause,
1116
1116
  text: text2,
1117
1117
  response,
1118
- usage
1118
+ usage,
1119
+ finishReason
1119
1120
  }) {
1120
1121
  super({ name: name4, message, cause });
1121
1122
  this[_a4] = true;
1122
1123
  this.text = text2;
1123
1124
  this.response = response;
1124
1125
  this.usage = usage;
1126
+ this.finishReason = finishReason;
1125
1127
  }
1126
1128
  static isInstance(error) {
1127
1129
  return import_provider5.AISDKError.hasMarker(error, marker4);
@@ -1156,14 +1158,11 @@ var DownloadError = class extends import_provider6.AISDKError {
1156
1158
  _a5 = symbol5;
1157
1159
 
1158
1160
  // util/download.ts
1159
- async function download({
1160
- url,
1161
- fetchImplementation = fetch
1162
- }) {
1161
+ async function download({ url }) {
1163
1162
  var _a17;
1164
1163
  const urlText = url.toString();
1165
1164
  try {
1166
- const response = await fetchImplementation(urlText);
1165
+ const response = await fetch(urlText);
1167
1166
  if (!response.ok) {
1168
1167
  throw new DownloadError({
1169
1168
  url: urlText,
@@ -2286,7 +2285,8 @@ var noSchemaOutputStrategy = {
2286
2285
  message: "No object generated: response did not match schema.",
2287
2286
  text: context.text,
2288
2287
  response: context.response,
2289
- usage: context.usage
2288
+ usage: context.usage,
2289
+ finishReason: context.finishReason
2290
2290
  })
2291
2291
  } : { success: true, value };
2292
2292
  },
@@ -2783,7 +2783,8 @@ async function generateObject({
2783
2783
  throw new NoObjectGeneratedError({
2784
2784
  message: "No object generated: the model did not return a response.",
2785
2785
  response: responseData,
2786
- usage: calculateLanguageModelUsage(result2.usage)
2786
+ usage: calculateLanguageModelUsage(result2.usage),
2787
+ finishReason: result2.finishReason
2787
2788
  });
2788
2789
  }
2789
2790
  span2.setAttributes(
@@ -2892,7 +2893,8 @@ async function generateObject({
2892
2893
  throw new NoObjectGeneratedError({
2893
2894
  message: "No object generated: the tool was not called.",
2894
2895
  response: responseData,
2895
- usage: calculateLanguageModelUsage(result2.usage)
2896
+ usage: calculateLanguageModelUsage(result2.usage),
2897
+ finishReason: result2.finishReason
2896
2898
  });
2897
2899
  }
2898
2900
  span2.setAttributes(
@@ -2948,7 +2950,8 @@ async function generateObject({
2948
2950
  cause: parseResult.error,
2949
2951
  text: result2,
2950
2952
  response,
2951
- usage: calculateLanguageModelUsage(usage)
2953
+ usage: calculateLanguageModelUsage(usage),
2954
+ finishReason
2952
2955
  });
2953
2956
  }
2954
2957
  const validationResult = outputStrategy.validateFinalResult(
@@ -2965,7 +2968,8 @@ async function generateObject({
2965
2968
  cause: validationResult.error,
2966
2969
  text: result2,
2967
2970
  response,
2968
- usage: calculateLanguageModelUsage(usage)
2971
+ usage: calculateLanguageModelUsage(usage),
2972
+ finishReason
2969
2973
  });
2970
2974
  }
2971
2975
  return validationResult.value;
@@ -3570,7 +3574,8 @@ var DefaultStreamObjectResult = class {
3570
3574
  cause: validationResult.error,
3571
3575
  text: accumulatedText,
3572
3576
  response,
3573
- usage
3577
+ usage,
3578
+ finishReason
3574
3579
  });
3575
3580
  self.objectPromise.reject(error);
3576
3581
  }
@@ -4398,7 +4403,11 @@ async function generateText({
4398
4403
  }
4399
4404
  return output.parseOutput(
4400
4405
  { text: text2 },
4401
- { response: currentModelResponse.response, usage }
4406
+ {
4407
+ response: currentModelResponse.response,
4408
+ usage,
4409
+ finishReason: currentModelResponse.finishReason
4410
+ }
4402
4411
  );
4403
4412
  },
4404
4413
  toolCalls: currentToolCalls,
@@ -4643,7 +4652,8 @@ var object = ({
4643
4652
  cause: parseResult.error,
4644
4653
  text: text2,
4645
4654
  response: context.response,
4646
- usage: context.usage
4655
+ usage: context.usage,
4656
+ finishReason: context.finishReason
4647
4657
  });
4648
4658
  }
4649
4659
  const validationResult = (0, import_provider_utils10.safeValidateTypes)({
@@ -4656,7 +4666,8 @@ var object = ({
4656
4666
  cause: validationResult.error,
4657
4667
  text: text2,
4658
4668
  response: context.response,
4659
- usage: context.usage
4669
+ usage: context.usage,
4670
+ finishReason: context.finishReason
4660
4671
  });
4661
4672
  }
4662
4673
  return validationResult.value;
@@ -4687,7 +4698,7 @@ function smoothStream({
4687
4698
  let buffer = "";
4688
4699
  return new TransformStream({
4689
4700
  async transform(chunk, controller) {
4690
- if (chunk.type === "step-finish") {
4701
+ if (chunk.type !== "text-delta") {
4691
4702
  if (buffer.length > 0) {
4692
4703
  controller.enqueue({ type: "text-delta", textDelta: buffer });
4693
4704
  buffer = "";
@@ -4695,10 +4706,6 @@ function smoothStream({
4695
4706
  controller.enqueue(chunk);
4696
4707
  return;
4697
4708
  }
4698
- if (chunk.type !== "text-delta") {
4699
- controller.enqueue(chunk);
4700
- return;
4701
- }
4702
4709
  buffer += chunk.textDelta;
4703
4710
  let match;
4704
4711
  while ((match = chunkingRegexp.exec(buffer)) != null) {