ai 6.0.0-beta.142 → 6.0.0-beta.143

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -866,7 +866,7 @@ import {
866
866
  } from "@ai-sdk/provider-utils";
867
867
 
868
868
  // src/version.ts
869
- var VERSION = true ? "6.0.0-beta.142" : "0.0.0-test";
869
+ var VERSION = true ? "6.0.0-beta.143" : "0.0.0-test";
870
870
 
871
871
  // src/util/download/download.ts
872
872
  var download = async ({ url }) => {
@@ -3421,7 +3421,7 @@ async function generateText({
3421
3421
  }),
3422
3422
  tracer,
3423
3423
  fn: async (span) => {
3424
- var _a15, _b, _c, _d, _e, _f, _g;
3424
+ var _a15, _b, _c, _d, _e, _f, _g, _h;
3425
3425
  const initialMessages = initialPrompt.messages;
3426
3426
  const responseMessages = [];
3427
3427
  const { approvedToolApprovals, deniedToolApprovals } = collectToolApprovals({ messages: initialMessages });
@@ -3489,10 +3489,11 @@ async function generateText({
3489
3489
  supportedUrls: await stepModel.supportedUrls,
3490
3490
  download: download2
3491
3491
  });
3492
+ experimental_context = (_d = prepareStepResult == null ? void 0 : prepareStepResult.experimental_context) != null ? _d : experimental_context;
3492
3493
  const { toolChoice: stepToolChoice, tools: stepTools } = await prepareToolsAndToolChoice({
3493
3494
  tools,
3494
- toolChoice: (_d = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _d : toolChoice,
3495
- activeTools: (_e = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _e : activeTools
3495
+ toolChoice: (_e = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _e : toolChoice,
3496
+ activeTools: (_f = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _f : activeTools
3496
3497
  });
3497
3498
  currentModelResponse = await retry(
3498
3499
  () => {
@@ -3535,7 +3536,7 @@ async function generateText({
3535
3536
  }),
3536
3537
  tracer,
3537
3538
  fn: async (span2) => {
3538
- var _a17, _b2, _c2, _d2, _e2, _f2, _g2, _h;
3539
+ var _a17, _b2, _c2, _d2, _e2, _f2, _g2, _h2;
3539
3540
  const result = await stepModel.doGenerate({
3540
3541
  ...callSettings2,
3541
3542
  tools: stepTools,
@@ -3551,7 +3552,7 @@ async function generateText({
3551
3552
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
3552
3553
  modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
3553
3554
  headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
3554
- body: (_h = result.response) == null ? void 0 : _h.body
3555
+ body: (_h2 = result.response) == null ? void 0 : _h2.body
3555
3556
  };
3556
3557
  span2.setAttributes(
3557
3558
  await selectTelemetryAttributes({
@@ -3684,7 +3685,7 @@ async function generateText({
3684
3685
  usage: asLanguageModelUsage(currentModelResponse.usage),
3685
3686
  warnings: currentModelResponse.warnings,
3686
3687
  providerMetadata: currentModelResponse.providerMetadata,
3687
- request: (_f = currentModelResponse.request) != null ? _f : {},
3688
+ request: (_g = currentModelResponse.request) != null ? _g : {},
3688
3689
  response: {
3689
3690
  ...currentModelResponse.response,
3690
3691
  // deep clone msgs to avoid mutating past messages in multi-step:
@@ -3692,7 +3693,7 @@ async function generateText({
3692
3693
  }
3693
3694
  });
3694
3695
  logWarnings({
3695
- warnings: (_g = currentModelResponse.warnings) != null ? _g : [],
3696
+ warnings: (_h = currentModelResponse.warnings) != null ? _h : [],
3696
3697
  provider: stepModel.provider,
3697
3698
  model: stepModel.modelId
3698
3699
  });
@@ -5870,7 +5871,7 @@ var DefaultStreamTextResult = class {
5870
5871
  responseMessages,
5871
5872
  usage
5872
5873
  }) {
5873
- var _a15, _b, _c, _d, _e;
5874
+ var _a15, _b, _c, _d, _e, _f;
5874
5875
  const includeRawChunks2 = self.includeRawChunks;
5875
5876
  stepFinish = new DelayedPromise();
5876
5877
  const stepInputMessages = [...initialMessages, ...responseMessages];
@@ -5897,6 +5898,7 @@ var DefaultStreamTextResult = class {
5897
5898
  toolChoice: (_d = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _d : toolChoice,
5898
5899
  activeTools: (_e = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _e : activeTools
5899
5900
  });
5901
+ experimental_context = (_f = prepareStepResult == null ? void 0 : prepareStepResult.experimental_context) != null ? _f : experimental_context;
5900
5902
  const {
5901
5903
  result: { stream: stream2, response, request },
5902
5904
  doStreamSpan,