ai 6.0.0-beta.141 → 6.0.0-beta.143
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/dist/index.d.mts +67 -13
- package/dist/index.d.ts +67 -13
- package/dist/index.js +15 -11
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +15 -11
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +1 -1
- package/dist/internal/index.mjs +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
|
@@ -866,7 +866,7 @@ import {
|
|
|
866
866
|
} from "@ai-sdk/provider-utils";
|
|
867
867
|
|
|
868
868
|
// src/version.ts
|
|
869
|
-
var VERSION = true ? "6.0.0-beta.
|
|
869
|
+
var VERSION = true ? "6.0.0-beta.143" : "0.0.0-test";
|
|
870
870
|
|
|
871
871
|
// src/util/download/download.ts
|
|
872
872
|
var download = async ({ url }) => {
|
|
@@ -3421,7 +3421,7 @@ async function generateText({
|
|
|
3421
3421
|
}),
|
|
3422
3422
|
tracer,
|
|
3423
3423
|
fn: async (span) => {
|
|
3424
|
-
var _a15, _b, _c, _d, _e, _f, _g;
|
|
3424
|
+
var _a15, _b, _c, _d, _e, _f, _g, _h;
|
|
3425
3425
|
const initialMessages = initialPrompt.messages;
|
|
3426
3426
|
const responseMessages = [];
|
|
3427
3427
|
const { approvedToolApprovals, deniedToolApprovals } = collectToolApprovals({ messages: initialMessages });
|
|
@@ -3475,7 +3475,8 @@ async function generateText({
|
|
|
3475
3475
|
model,
|
|
3476
3476
|
steps,
|
|
3477
3477
|
stepNumber: steps.length,
|
|
3478
|
-
messages: stepInputMessages
|
|
3478
|
+
messages: stepInputMessages,
|
|
3479
|
+
experimental_context
|
|
3479
3480
|
}));
|
|
3480
3481
|
const stepModel = resolveLanguageModel(
|
|
3481
3482
|
(_a15 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a15 : model
|
|
@@ -3488,10 +3489,11 @@ async function generateText({
|
|
|
3488
3489
|
supportedUrls: await stepModel.supportedUrls,
|
|
3489
3490
|
download: download2
|
|
3490
3491
|
});
|
|
3492
|
+
experimental_context = (_d = prepareStepResult == null ? void 0 : prepareStepResult.experimental_context) != null ? _d : experimental_context;
|
|
3491
3493
|
const { toolChoice: stepToolChoice, tools: stepTools } = await prepareToolsAndToolChoice({
|
|
3492
3494
|
tools,
|
|
3493
|
-
toolChoice: (
|
|
3494
|
-
activeTools: (
|
|
3495
|
+
toolChoice: (_e = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _e : toolChoice,
|
|
3496
|
+
activeTools: (_f = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _f : activeTools
|
|
3495
3497
|
});
|
|
3496
3498
|
currentModelResponse = await retry(
|
|
3497
3499
|
() => {
|
|
@@ -3534,7 +3536,7 @@ async function generateText({
|
|
|
3534
3536
|
}),
|
|
3535
3537
|
tracer,
|
|
3536
3538
|
fn: async (span2) => {
|
|
3537
|
-
var _a17, _b2, _c2, _d2, _e2, _f2, _g2,
|
|
3539
|
+
var _a17, _b2, _c2, _d2, _e2, _f2, _g2, _h2;
|
|
3538
3540
|
const result = await stepModel.doGenerate({
|
|
3539
3541
|
...callSettings2,
|
|
3540
3542
|
tools: stepTools,
|
|
@@ -3550,7 +3552,7 @@ async function generateText({
|
|
|
3550
3552
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
|
3551
3553
|
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
|
|
3552
3554
|
headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
|
|
3553
|
-
body: (
|
|
3555
|
+
body: (_h2 = result.response) == null ? void 0 : _h2.body
|
|
3554
3556
|
};
|
|
3555
3557
|
span2.setAttributes(
|
|
3556
3558
|
await selectTelemetryAttributes({
|
|
@@ -3683,7 +3685,7 @@ async function generateText({
|
|
|
3683
3685
|
usage: asLanguageModelUsage(currentModelResponse.usage),
|
|
3684
3686
|
warnings: currentModelResponse.warnings,
|
|
3685
3687
|
providerMetadata: currentModelResponse.providerMetadata,
|
|
3686
|
-
request: (
|
|
3688
|
+
request: (_g = currentModelResponse.request) != null ? _g : {},
|
|
3687
3689
|
response: {
|
|
3688
3690
|
...currentModelResponse.response,
|
|
3689
3691
|
// deep clone msgs to avoid mutating past messages in multi-step:
|
|
@@ -3691,7 +3693,7 @@ async function generateText({
|
|
|
3691
3693
|
}
|
|
3692
3694
|
});
|
|
3693
3695
|
logWarnings({
|
|
3694
|
-
warnings: (
|
|
3696
|
+
warnings: (_h = currentModelResponse.warnings) != null ? _h : [],
|
|
3695
3697
|
provider: stepModel.provider,
|
|
3696
3698
|
model: stepModel.modelId
|
|
3697
3699
|
});
|
|
@@ -5869,7 +5871,7 @@ var DefaultStreamTextResult = class {
|
|
|
5869
5871
|
responseMessages,
|
|
5870
5872
|
usage
|
|
5871
5873
|
}) {
|
|
5872
|
-
var _a15, _b, _c, _d, _e;
|
|
5874
|
+
var _a15, _b, _c, _d, _e, _f;
|
|
5873
5875
|
const includeRawChunks2 = self.includeRawChunks;
|
|
5874
5876
|
stepFinish = new DelayedPromise();
|
|
5875
5877
|
const stepInputMessages = [...initialMessages, ...responseMessages];
|
|
@@ -5877,7 +5879,8 @@ var DefaultStreamTextResult = class {
|
|
|
5877
5879
|
model,
|
|
5878
5880
|
steps: recordedSteps,
|
|
5879
5881
|
stepNumber: recordedSteps.length,
|
|
5880
|
-
messages: stepInputMessages
|
|
5882
|
+
messages: stepInputMessages,
|
|
5883
|
+
experimental_context
|
|
5881
5884
|
}));
|
|
5882
5885
|
const stepModel = resolveLanguageModel(
|
|
5883
5886
|
(_a15 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a15 : model
|
|
@@ -5895,6 +5898,7 @@ var DefaultStreamTextResult = class {
|
|
|
5895
5898
|
toolChoice: (_d = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _d : toolChoice,
|
|
5896
5899
|
activeTools: (_e = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _e : activeTools
|
|
5897
5900
|
});
|
|
5901
|
+
experimental_context = (_f = prepareStepResult == null ? void 0 : prepareStepResult.experimental_context) != null ? _f : experimental_context;
|
|
5898
5902
|
const {
|
|
5899
5903
|
result: { stream: stream2, response, request },
|
|
5900
5904
|
doStreamSpan,
|