ai 6.0.21 → 6.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/index.d.mts +13 -5
- package/dist/index.d.ts +13 -5
- package/dist/index.js +290 -258
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +289 -258
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +2 -0
- package/dist/internal/index.d.ts +2 -0
- package/dist/internal/index.js +1 -1
- package/dist/internal/index.mjs +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
|
@@ -710,6 +710,12 @@ function getTotalTimeoutMs(timeout) {
|
|
|
710
710
|
}
|
|
711
711
|
return timeout.totalMs;
|
|
712
712
|
}
|
|
713
|
+
function getStepTimeoutMs(timeout) {
|
|
714
|
+
if (timeout == null || typeof timeout === "number") {
|
|
715
|
+
return void 0;
|
|
716
|
+
}
|
|
717
|
+
return timeout.stepMs;
|
|
718
|
+
}
|
|
713
719
|
|
|
714
720
|
// src/prompt/convert-to-language-model-prompt.ts
|
|
715
721
|
import {
|
|
@@ -909,7 +915,7 @@ import {
|
|
|
909
915
|
} from "@ai-sdk/provider-utils";
|
|
910
916
|
|
|
911
917
|
// src/version.ts
|
|
912
|
-
var VERSION = true ? "6.0.
|
|
918
|
+
var VERSION = true ? "6.0.23" : "0.0.0-test";
|
|
913
919
|
|
|
914
920
|
// src/util/download/download.ts
|
|
915
921
|
var download = async ({ url }) => {
|
|
@@ -3582,9 +3588,12 @@ async function generateText({
|
|
|
3582
3588
|
const model = resolveLanguageModel(modelArg);
|
|
3583
3589
|
const stopConditions = asArray(stopWhen);
|
|
3584
3590
|
const totalTimeoutMs = getTotalTimeoutMs(timeout);
|
|
3591
|
+
const stepTimeoutMs = getStepTimeoutMs(timeout);
|
|
3592
|
+
const stepAbortController = stepTimeoutMs != null ? new AbortController() : void 0;
|
|
3585
3593
|
const mergedAbortSignal = mergeAbortSignals(
|
|
3586
3594
|
abortSignal,
|
|
3587
|
-
totalTimeoutMs != null ? AbortSignal.timeout(totalTimeoutMs) : void 0
|
|
3595
|
+
totalTimeoutMs != null ? AbortSignal.timeout(totalTimeoutMs) : void 0,
|
|
3596
|
+
stepAbortController == null ? void 0 : stepAbortController.signal
|
|
3588
3597
|
);
|
|
3589
3598
|
const { maxRetries, retry } = prepareRetries({
|
|
3590
3599
|
maxRetries: maxRetriesArg,
|
|
@@ -3713,263 +3722,270 @@ async function generateText({
|
|
|
3713
3722
|
const steps = [];
|
|
3714
3723
|
const pendingDeferredToolCalls = /* @__PURE__ */ new Map();
|
|
3715
3724
|
do {
|
|
3716
|
-
const
|
|
3717
|
-
|
|
3718
|
-
|
|
3719
|
-
|
|
3720
|
-
|
|
3721
|
-
|
|
3722
|
-
|
|
3723
|
-
|
|
3724
|
-
|
|
3725
|
-
|
|
3726
|
-
|
|
3727
|
-
|
|
3728
|
-
|
|
3729
|
-
|
|
3730
|
-
|
|
3731
|
-
|
|
3732
|
-
|
|
3733
|
-
|
|
3734
|
-
|
|
3735
|
-
|
|
3736
|
-
|
|
3737
|
-
|
|
3738
|
-
toolChoice:
|
|
3739
|
-
|
|
3740
|
-
|
|
3741
|
-
|
|
3742
|
-
|
|
3743
|
-
|
|
3744
|
-
|
|
3745
|
-
|
|
3746
|
-
|
|
3747
|
-
|
|
3748
|
-
attributes: {
|
|
3749
|
-
|
|
3750
|
-
|
|
3751
|
-
|
|
3752
|
-
|
|
3753
|
-
|
|
3754
|
-
|
|
3755
|
-
|
|
3756
|
-
|
|
3757
|
-
|
|
3758
|
-
|
|
3759
|
-
|
|
3760
|
-
|
|
3761
|
-
|
|
3762
|
-
|
|
3763
|
-
|
|
3764
|
-
|
|
3765
|
-
|
|
3766
|
-
|
|
3767
|
-
|
|
3768
|
-
|
|
3769
|
-
|
|
3770
|
-
|
|
3771
|
-
|
|
3772
|
-
|
|
3773
|
-
|
|
3774
|
-
|
|
3775
|
-
|
|
3776
|
-
|
|
3777
|
-
|
|
3725
|
+
const stepTimeoutId = stepTimeoutMs != null ? setTimeout(() => stepAbortController.abort(), stepTimeoutMs) : void 0;
|
|
3726
|
+
try {
|
|
3727
|
+
const stepInputMessages = [...initialMessages, ...responseMessages];
|
|
3728
|
+
const prepareStepResult = await (prepareStep == null ? void 0 : prepareStep({
|
|
3729
|
+
model,
|
|
3730
|
+
steps,
|
|
3731
|
+
stepNumber: steps.length,
|
|
3732
|
+
messages: stepInputMessages,
|
|
3733
|
+
experimental_context
|
|
3734
|
+
}));
|
|
3735
|
+
const stepModel = resolveLanguageModel(
|
|
3736
|
+
(_a16 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a16 : model
|
|
3737
|
+
);
|
|
3738
|
+
const promptMessages = await convertToLanguageModelPrompt({
|
|
3739
|
+
prompt: {
|
|
3740
|
+
system: (_b = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _b : initialPrompt.system,
|
|
3741
|
+
messages: (_c = prepareStepResult == null ? void 0 : prepareStepResult.messages) != null ? _c : stepInputMessages
|
|
3742
|
+
},
|
|
3743
|
+
supportedUrls: await stepModel.supportedUrls,
|
|
3744
|
+
download: download2
|
|
3745
|
+
});
|
|
3746
|
+
experimental_context = (_d = prepareStepResult == null ? void 0 : prepareStepResult.experimental_context) != null ? _d : experimental_context;
|
|
3747
|
+
const { toolChoice: stepToolChoice, tools: stepTools } = await prepareToolsAndToolChoice({
|
|
3748
|
+
tools,
|
|
3749
|
+
toolChoice: (_e = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _e : toolChoice,
|
|
3750
|
+
activeTools: (_f = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _f : activeTools
|
|
3751
|
+
});
|
|
3752
|
+
currentModelResponse = await retry(
|
|
3753
|
+
() => {
|
|
3754
|
+
var _a17;
|
|
3755
|
+
return recordSpan({
|
|
3756
|
+
name: "ai.generateText.doGenerate",
|
|
3757
|
+
attributes: selectTelemetryAttributes({
|
|
3758
|
+
telemetry,
|
|
3759
|
+
attributes: {
|
|
3760
|
+
...assembleOperationName({
|
|
3761
|
+
operationId: "ai.generateText.doGenerate",
|
|
3762
|
+
telemetry
|
|
3763
|
+
}),
|
|
3764
|
+
...baseTelemetryAttributes,
|
|
3765
|
+
// model:
|
|
3766
|
+
"ai.model.provider": stepModel.provider,
|
|
3767
|
+
"ai.model.id": stepModel.modelId,
|
|
3768
|
+
// prompt:
|
|
3769
|
+
"ai.prompt.messages": {
|
|
3770
|
+
input: () => stringifyForTelemetry(promptMessages)
|
|
3771
|
+
},
|
|
3772
|
+
"ai.prompt.tools": {
|
|
3773
|
+
// convert the language model level tools:
|
|
3774
|
+
input: () => stepTools == null ? void 0 : stepTools.map((tool2) => JSON.stringify(tool2))
|
|
3775
|
+
},
|
|
3776
|
+
"ai.prompt.toolChoice": {
|
|
3777
|
+
input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
|
|
3778
|
+
},
|
|
3779
|
+
// standardized gen-ai llm span attributes:
|
|
3780
|
+
"gen_ai.system": stepModel.provider,
|
|
3781
|
+
"gen_ai.request.model": stepModel.modelId,
|
|
3782
|
+
"gen_ai.request.frequency_penalty": settings.frequencyPenalty,
|
|
3783
|
+
"gen_ai.request.max_tokens": settings.maxOutputTokens,
|
|
3784
|
+
"gen_ai.request.presence_penalty": settings.presencePenalty,
|
|
3785
|
+
"gen_ai.request.stop_sequences": settings.stopSequences,
|
|
3786
|
+
"gen_ai.request.temperature": (_a17 = settings.temperature) != null ? _a17 : void 0,
|
|
3787
|
+
"gen_ai.request.top_k": settings.topK,
|
|
3788
|
+
"gen_ai.request.top_p": settings.topP
|
|
3789
|
+
}
|
|
3790
|
+
}),
|
|
3791
|
+
tracer,
|
|
3792
|
+
fn: async (span2) => {
|
|
3793
|
+
var _a18, _b2, _c2, _d2, _e2, _f2, _g2, _h2;
|
|
3794
|
+
const stepProviderOptions = mergeObjects(
|
|
3795
|
+
providerOptions,
|
|
3796
|
+
prepareStepResult == null ? void 0 : prepareStepResult.providerOptions
|
|
3797
|
+
);
|
|
3798
|
+
const result = await stepModel.doGenerate({
|
|
3799
|
+
...callSettings2,
|
|
3800
|
+
tools: stepTools,
|
|
3801
|
+
toolChoice: stepToolChoice,
|
|
3802
|
+
responseFormat: await (output == null ? void 0 : output.responseFormat),
|
|
3803
|
+
prompt: promptMessages,
|
|
3804
|
+
providerOptions: stepProviderOptions,
|
|
3805
|
+
abortSignal: mergedAbortSignal,
|
|
3806
|
+
headers: headersWithUserAgent
|
|
3807
|
+
});
|
|
3808
|
+
const responseData = {
|
|
3809
|
+
id: (_b2 = (_a18 = result.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId2(),
|
|
3810
|
+
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : /* @__PURE__ */ new Date(),
|
|
3811
|
+
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
|
|
3812
|
+
headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
|
|
3813
|
+
body: (_h2 = result.response) == null ? void 0 : _h2.body
|
|
3814
|
+
};
|
|
3815
|
+
span2.setAttributes(
|
|
3816
|
+
await selectTelemetryAttributes({
|
|
3817
|
+
telemetry,
|
|
3818
|
+
attributes: {
|
|
3819
|
+
"ai.response.finishReason": result.finishReason.unified,
|
|
3820
|
+
"ai.response.text": {
|
|
3821
|
+
output: () => extractTextContent(result.content)
|
|
3822
|
+
},
|
|
3823
|
+
"ai.response.toolCalls": {
|
|
3824
|
+
output: () => {
|
|
3825
|
+
const toolCalls = asToolCalls(result.content);
|
|
3826
|
+
return toolCalls == null ? void 0 : JSON.stringify(toolCalls);
|
|
3827
|
+
}
|
|
3828
|
+
},
|
|
3829
|
+
"ai.response.id": responseData.id,
|
|
3830
|
+
"ai.response.model": responseData.modelId,
|
|
3831
|
+
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
|
3832
|
+
"ai.response.providerMetadata": JSON.stringify(
|
|
3833
|
+
result.providerMetadata
|
|
3834
|
+
),
|
|
3835
|
+
// TODO rename telemetry attributes to inputTokens and outputTokens
|
|
3836
|
+
"ai.usage.promptTokens": result.usage.inputTokens.total,
|
|
3837
|
+
"ai.usage.completionTokens": result.usage.outputTokens.total,
|
|
3838
|
+
// standardized gen-ai llm span attributes:
|
|
3839
|
+
"gen_ai.response.finish_reasons": [
|
|
3840
|
+
result.finishReason.unified
|
|
3841
|
+
],
|
|
3842
|
+
"gen_ai.response.id": responseData.id,
|
|
3843
|
+
"gen_ai.response.model": responseData.modelId,
|
|
3844
|
+
"gen_ai.usage.input_tokens": result.usage.inputTokens.total,
|
|
3845
|
+
"gen_ai.usage.output_tokens": result.usage.outputTokens.total
|
|
3846
|
+
}
|
|
3847
|
+
})
|
|
3848
|
+
);
|
|
3849
|
+
return { ...result, response: responseData };
|
|
3778
3850
|
}
|
|
3779
|
-
})
|
|
3780
|
-
|
|
3781
|
-
|
|
3782
|
-
|
|
3783
|
-
|
|
3784
|
-
|
|
3785
|
-
|
|
3786
|
-
|
|
3787
|
-
|
|
3788
|
-
|
|
3789
|
-
|
|
3790
|
-
|
|
3791
|
-
|
|
3792
|
-
|
|
3793
|
-
|
|
3794
|
-
|
|
3795
|
-
|
|
3796
|
-
|
|
3797
|
-
|
|
3798
|
-
|
|
3799
|
-
|
|
3800
|
-
|
|
3801
|
-
|
|
3802
|
-
|
|
3803
|
-
|
|
3804
|
-
|
|
3805
|
-
|
|
3806
|
-
|
|
3807
|
-
|
|
3808
|
-
|
|
3809
|
-
|
|
3810
|
-
|
|
3811
|
-
|
|
3812
|
-
|
|
3813
|
-
|
|
3814
|
-
|
|
3815
|
-
return toolCalls == null ? void 0 : JSON.stringify(toolCalls);
|
|
3816
|
-
}
|
|
3817
|
-
},
|
|
3818
|
-
"ai.response.id": responseData.id,
|
|
3819
|
-
"ai.response.model": responseData.modelId,
|
|
3820
|
-
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
|
3821
|
-
"ai.response.providerMetadata": JSON.stringify(
|
|
3822
|
-
result.providerMetadata
|
|
3823
|
-
),
|
|
3824
|
-
// TODO rename telemetry attributes to inputTokens and outputTokens
|
|
3825
|
-
"ai.usage.promptTokens": result.usage.inputTokens.total,
|
|
3826
|
-
"ai.usage.completionTokens": result.usage.outputTokens.total,
|
|
3827
|
-
// standardized gen-ai llm span attributes:
|
|
3828
|
-
"gen_ai.response.finish_reasons": [
|
|
3829
|
-
result.finishReason.unified
|
|
3830
|
-
],
|
|
3831
|
-
"gen_ai.response.id": responseData.id,
|
|
3832
|
-
"gen_ai.response.model": responseData.modelId,
|
|
3833
|
-
"gen_ai.usage.input_tokens": result.usage.inputTokens.total,
|
|
3834
|
-
"gen_ai.usage.output_tokens": result.usage.outputTokens.total
|
|
3835
|
-
}
|
|
3836
|
-
})
|
|
3837
|
-
);
|
|
3838
|
-
return { ...result, response: responseData };
|
|
3839
|
-
}
|
|
3840
|
-
});
|
|
3841
|
-
}
|
|
3842
|
-
);
|
|
3843
|
-
const stepToolCalls = await Promise.all(
|
|
3844
|
-
currentModelResponse.content.filter(
|
|
3845
|
-
(part) => part.type === "tool-call"
|
|
3846
|
-
).map(
|
|
3847
|
-
(toolCall) => parseToolCall({
|
|
3851
|
+
});
|
|
3852
|
+
}
|
|
3853
|
+
);
|
|
3854
|
+
const stepToolCalls = await Promise.all(
|
|
3855
|
+
currentModelResponse.content.filter(
|
|
3856
|
+
(part) => part.type === "tool-call"
|
|
3857
|
+
).map(
|
|
3858
|
+
(toolCall) => parseToolCall({
|
|
3859
|
+
toolCall,
|
|
3860
|
+
tools,
|
|
3861
|
+
repairToolCall,
|
|
3862
|
+
system,
|
|
3863
|
+
messages: stepInputMessages
|
|
3864
|
+
})
|
|
3865
|
+
)
|
|
3866
|
+
);
|
|
3867
|
+
const toolApprovalRequests = {};
|
|
3868
|
+
for (const toolCall of stepToolCalls) {
|
|
3869
|
+
if (toolCall.invalid) {
|
|
3870
|
+
continue;
|
|
3871
|
+
}
|
|
3872
|
+
const tool2 = tools == null ? void 0 : tools[toolCall.toolName];
|
|
3873
|
+
if (tool2 == null) {
|
|
3874
|
+
continue;
|
|
3875
|
+
}
|
|
3876
|
+
if ((tool2 == null ? void 0 : tool2.onInputAvailable) != null) {
|
|
3877
|
+
await tool2.onInputAvailable({
|
|
3878
|
+
input: toolCall.input,
|
|
3879
|
+
toolCallId: toolCall.toolCallId,
|
|
3880
|
+
messages: stepInputMessages,
|
|
3881
|
+
abortSignal: mergedAbortSignal,
|
|
3882
|
+
experimental_context
|
|
3883
|
+
});
|
|
3884
|
+
}
|
|
3885
|
+
if (await isApprovalNeeded({
|
|
3886
|
+
tool: tool2,
|
|
3848
3887
|
toolCall,
|
|
3849
|
-
tools,
|
|
3850
|
-
repairToolCall,
|
|
3851
|
-
system,
|
|
3852
|
-
messages: stepInputMessages
|
|
3853
|
-
})
|
|
3854
|
-
)
|
|
3855
|
-
);
|
|
3856
|
-
const toolApprovalRequests = {};
|
|
3857
|
-
for (const toolCall of stepToolCalls) {
|
|
3858
|
-
if (toolCall.invalid) {
|
|
3859
|
-
continue;
|
|
3860
|
-
}
|
|
3861
|
-
const tool2 = tools == null ? void 0 : tools[toolCall.toolName];
|
|
3862
|
-
if (tool2 == null) {
|
|
3863
|
-
continue;
|
|
3864
|
-
}
|
|
3865
|
-
if ((tool2 == null ? void 0 : tool2.onInputAvailable) != null) {
|
|
3866
|
-
await tool2.onInputAvailable({
|
|
3867
|
-
input: toolCall.input,
|
|
3868
|
-
toolCallId: toolCall.toolCallId,
|
|
3869
3888
|
messages: stepInputMessages,
|
|
3870
|
-
abortSignal: mergedAbortSignal,
|
|
3871
3889
|
experimental_context
|
|
3872
|
-
})
|
|
3890
|
+
})) {
|
|
3891
|
+
toolApprovalRequests[toolCall.toolCallId] = {
|
|
3892
|
+
type: "tool-approval-request",
|
|
3893
|
+
approvalId: generateId2(),
|
|
3894
|
+
toolCall
|
|
3895
|
+
};
|
|
3896
|
+
}
|
|
3873
3897
|
}
|
|
3874
|
-
|
|
3875
|
-
|
|
3876
|
-
|
|
3877
|
-
|
|
3878
|
-
|
|
3879
|
-
|
|
3880
|
-
|
|
3881
|
-
|
|
3882
|
-
|
|
3883
|
-
toolCall
|
|
3884
|
-
|
|
3898
|
+
const invalidToolCalls = stepToolCalls.filter(
|
|
3899
|
+
(toolCall) => toolCall.invalid && toolCall.dynamic
|
|
3900
|
+
);
|
|
3901
|
+
clientToolOutputs = [];
|
|
3902
|
+
for (const toolCall of invalidToolCalls) {
|
|
3903
|
+
clientToolOutputs.push({
|
|
3904
|
+
type: "tool-error",
|
|
3905
|
+
toolCallId: toolCall.toolCallId,
|
|
3906
|
+
toolName: toolCall.toolName,
|
|
3907
|
+
input: toolCall.input,
|
|
3908
|
+
error: getErrorMessage5(toolCall.error),
|
|
3909
|
+
dynamic: true
|
|
3910
|
+
});
|
|
3885
3911
|
}
|
|
3886
|
-
|
|
3887
|
-
|
|
3888
|
-
(toolCall) => toolCall.invalid && toolCall.dynamic
|
|
3889
|
-
);
|
|
3890
|
-
clientToolOutputs = [];
|
|
3891
|
-
for (const toolCall of invalidToolCalls) {
|
|
3892
|
-
clientToolOutputs.push({
|
|
3893
|
-
type: "tool-error",
|
|
3894
|
-
toolCallId: toolCall.toolCallId,
|
|
3895
|
-
toolName: toolCall.toolName,
|
|
3896
|
-
input: toolCall.input,
|
|
3897
|
-
error: getErrorMessage5(toolCall.error),
|
|
3898
|
-
dynamic: true
|
|
3899
|
-
});
|
|
3900
|
-
}
|
|
3901
|
-
clientToolCalls = stepToolCalls.filter(
|
|
3902
|
-
(toolCall) => !toolCall.providerExecuted
|
|
3903
|
-
);
|
|
3904
|
-
if (tools != null) {
|
|
3905
|
-
clientToolOutputs.push(
|
|
3906
|
-
...await executeTools({
|
|
3907
|
-
toolCalls: clientToolCalls.filter(
|
|
3908
|
-
(toolCall) => !toolCall.invalid && toolApprovalRequests[toolCall.toolCallId] == null
|
|
3909
|
-
),
|
|
3910
|
-
tools,
|
|
3911
|
-
tracer,
|
|
3912
|
-
telemetry,
|
|
3913
|
-
messages: stepInputMessages,
|
|
3914
|
-
abortSignal: mergedAbortSignal,
|
|
3915
|
-
experimental_context
|
|
3916
|
-
})
|
|
3912
|
+
clientToolCalls = stepToolCalls.filter(
|
|
3913
|
+
(toolCall) => !toolCall.providerExecuted
|
|
3917
3914
|
);
|
|
3918
|
-
|
|
3919
|
-
|
|
3920
|
-
|
|
3921
|
-
|
|
3922
|
-
|
|
3923
|
-
|
|
3924
|
-
|
|
3925
|
-
|
|
3915
|
+
if (tools != null) {
|
|
3916
|
+
clientToolOutputs.push(
|
|
3917
|
+
...await executeTools({
|
|
3918
|
+
toolCalls: clientToolCalls.filter(
|
|
3919
|
+
(toolCall) => !toolCall.invalid && toolApprovalRequests[toolCall.toolCallId] == null
|
|
3920
|
+
),
|
|
3921
|
+
tools,
|
|
3922
|
+
tracer,
|
|
3923
|
+
telemetry,
|
|
3924
|
+
messages: stepInputMessages,
|
|
3925
|
+
abortSignal: mergedAbortSignal,
|
|
3926
|
+
experimental_context
|
|
3927
|
+
})
|
|
3926
3928
|
);
|
|
3927
|
-
|
|
3928
|
-
|
|
3929
|
-
|
|
3930
|
-
|
|
3929
|
+
}
|
|
3930
|
+
for (const toolCall of stepToolCalls) {
|
|
3931
|
+
if (!toolCall.providerExecuted)
|
|
3932
|
+
continue;
|
|
3933
|
+
const tool2 = tools == null ? void 0 : tools[toolCall.toolName];
|
|
3934
|
+
if ((tool2 == null ? void 0 : tool2.type) === "provider" && tool2.supportsDeferredResults) {
|
|
3935
|
+
const hasResultInResponse = currentModelResponse.content.some(
|
|
3936
|
+
(part) => part.type === "tool-result" && part.toolCallId === toolCall.toolCallId
|
|
3937
|
+
);
|
|
3938
|
+
if (!hasResultInResponse) {
|
|
3939
|
+
pendingDeferredToolCalls.set(toolCall.toolCallId, {
|
|
3940
|
+
toolName: toolCall.toolName
|
|
3941
|
+
});
|
|
3942
|
+
}
|
|
3931
3943
|
}
|
|
3932
3944
|
}
|
|
3933
|
-
|
|
3934
|
-
|
|
3935
|
-
|
|
3936
|
-
|
|
3945
|
+
for (const part of currentModelResponse.content) {
|
|
3946
|
+
if (part.type === "tool-result") {
|
|
3947
|
+
pendingDeferredToolCalls.delete(part.toolCallId);
|
|
3948
|
+
}
|
|
3937
3949
|
}
|
|
3938
|
-
|
|
3939
|
-
|
|
3940
|
-
|
|
3941
|
-
|
|
3942
|
-
|
|
3943
|
-
toolApprovalRequests: Object.values(toolApprovalRequests),
|
|
3944
|
-
tools
|
|
3945
|
-
});
|
|
3946
|
-
responseMessages.push(
|
|
3947
|
-
...await toResponseMessages({
|
|
3948
|
-
content: stepContent,
|
|
3950
|
+
const stepContent = asContent({
|
|
3951
|
+
content: currentModelResponse.content,
|
|
3952
|
+
toolCalls: stepToolCalls,
|
|
3953
|
+
toolOutputs: clientToolOutputs,
|
|
3954
|
+
toolApprovalRequests: Object.values(toolApprovalRequests),
|
|
3949
3955
|
tools
|
|
3950
|
-
})
|
|
3951
|
-
|
|
3952
|
-
|
|
3953
|
-
|
|
3954
|
-
|
|
3955
|
-
|
|
3956
|
-
|
|
3957
|
-
|
|
3958
|
-
|
|
3959
|
-
|
|
3960
|
-
|
|
3961
|
-
|
|
3962
|
-
|
|
3963
|
-
|
|
3956
|
+
});
|
|
3957
|
+
responseMessages.push(
|
|
3958
|
+
...await toResponseMessages({
|
|
3959
|
+
content: stepContent,
|
|
3960
|
+
tools
|
|
3961
|
+
})
|
|
3962
|
+
);
|
|
3963
|
+
const currentStepResult = new DefaultStepResult({
|
|
3964
|
+
content: stepContent,
|
|
3965
|
+
finishReason: currentModelResponse.finishReason.unified,
|
|
3966
|
+
rawFinishReason: currentModelResponse.finishReason.raw,
|
|
3967
|
+
usage: asLanguageModelUsage(currentModelResponse.usage),
|
|
3968
|
+
warnings: currentModelResponse.warnings,
|
|
3969
|
+
providerMetadata: currentModelResponse.providerMetadata,
|
|
3970
|
+
request: (_g = currentModelResponse.request) != null ? _g : {},
|
|
3971
|
+
response: {
|
|
3972
|
+
...currentModelResponse.response,
|
|
3973
|
+
// deep clone msgs to avoid mutating past messages in multi-step:
|
|
3974
|
+
messages: structuredClone(responseMessages)
|
|
3975
|
+
}
|
|
3976
|
+
});
|
|
3977
|
+
logWarnings({
|
|
3978
|
+
warnings: (_h = currentModelResponse.warnings) != null ? _h : [],
|
|
3979
|
+
provider: stepModel.provider,
|
|
3980
|
+
model: stepModel.modelId
|
|
3981
|
+
});
|
|
3982
|
+
steps.push(currentStepResult);
|
|
3983
|
+
await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
|
|
3984
|
+
} finally {
|
|
3985
|
+
if (stepTimeoutId != null) {
|
|
3986
|
+
clearTimeout(stepTimeoutId);
|
|
3964
3987
|
}
|
|
3965
|
-
}
|
|
3966
|
-
logWarnings({
|
|
3967
|
-
warnings: (_h = currentModelResponse.warnings) != null ? _h : [],
|
|
3968
|
-
provider: stepModel.provider,
|
|
3969
|
-
model: stepModel.modelId
|
|
3970
|
-
});
|
|
3971
|
-
steps.push(currentStepResult);
|
|
3972
|
-
await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
|
|
3988
|
+
}
|
|
3973
3989
|
} while (
|
|
3974
3990
|
// Continue if:
|
|
3975
3991
|
// 1. There are client tool calls that have all been executed, OR
|
|
@@ -5672,14 +5688,12 @@ function streamText({
|
|
|
5672
5688
|
onAbort,
|
|
5673
5689
|
onStepFinish,
|
|
5674
5690
|
experimental_context,
|
|
5675
|
-
_internal: {
|
|
5676
|
-
now: now2 = now,
|
|
5677
|
-
generateId: generateId2 = originalGenerateId2,
|
|
5678
|
-
currentDate = () => /* @__PURE__ */ new Date()
|
|
5679
|
-
} = {},
|
|
5691
|
+
_internal: { now: now2 = now, generateId: generateId2 = originalGenerateId2 } = {},
|
|
5680
5692
|
...settings
|
|
5681
5693
|
}) {
|
|
5682
5694
|
const totalTimeoutMs = getTotalTimeoutMs(timeout);
|
|
5695
|
+
const stepTimeoutMs = getStepTimeoutMs(timeout);
|
|
5696
|
+
const stepAbortController = stepTimeoutMs != null ? new AbortController() : void 0;
|
|
5683
5697
|
return new DefaultStreamTextResult({
|
|
5684
5698
|
model: resolveLanguageModel(model),
|
|
5685
5699
|
telemetry,
|
|
@@ -5688,8 +5702,11 @@ function streamText({
|
|
|
5688
5702
|
maxRetries,
|
|
5689
5703
|
abortSignal: mergeAbortSignals(
|
|
5690
5704
|
abortSignal,
|
|
5691
|
-
totalTimeoutMs != null ? AbortSignal.timeout(totalTimeoutMs) : void 0
|
|
5705
|
+
totalTimeoutMs != null ? AbortSignal.timeout(totalTimeoutMs) : void 0,
|
|
5706
|
+
stepAbortController == null ? void 0 : stepAbortController.signal
|
|
5692
5707
|
),
|
|
5708
|
+
stepTimeoutMs,
|
|
5709
|
+
stepAbortController,
|
|
5693
5710
|
system,
|
|
5694
5711
|
prompt,
|
|
5695
5712
|
messages,
|
|
@@ -5709,7 +5726,6 @@ function streamText({
|
|
|
5709
5726
|
onAbort,
|
|
5710
5727
|
onStepFinish,
|
|
5711
5728
|
now: now2,
|
|
5712
|
-
currentDate,
|
|
5713
5729
|
generateId: generateId2,
|
|
5714
5730
|
experimental_context,
|
|
5715
5731
|
download: download2
|
|
@@ -5785,6 +5801,8 @@ var DefaultStreamTextResult = class {
|
|
|
5785
5801
|
settings,
|
|
5786
5802
|
maxRetries: maxRetriesArg,
|
|
5787
5803
|
abortSignal,
|
|
5804
|
+
stepTimeoutMs,
|
|
5805
|
+
stepAbortController,
|
|
5788
5806
|
system,
|
|
5789
5807
|
prompt,
|
|
5790
5808
|
messages,
|
|
@@ -5799,7 +5817,6 @@ var DefaultStreamTextResult = class {
|
|
|
5799
5817
|
prepareStep,
|
|
5800
5818
|
includeRawChunks,
|
|
5801
5819
|
now: now2,
|
|
5802
|
-
currentDate,
|
|
5803
5820
|
generateId: generateId2,
|
|
5804
5821
|
onChunk,
|
|
5805
5822
|
onError,
|
|
@@ -6251,6 +6268,7 @@ var DefaultStreamTextResult = class {
|
|
|
6251
6268
|
}) {
|
|
6252
6269
|
var _a16, _b, _c, _d, _e, _f;
|
|
6253
6270
|
const includeRawChunks2 = self.includeRawChunks;
|
|
6271
|
+
const stepTimeoutId = stepTimeoutMs != null ? setTimeout(() => stepAbortController.abort(), stepTimeoutMs) : void 0;
|
|
6254
6272
|
stepFinish = new DelayedPromise();
|
|
6255
6273
|
const stepInputMessages = [...initialMessages, ...responseMessages];
|
|
6256
6274
|
const prepareStepResult = await (prepareStep == null ? void 0 : prepareStep({
|
|
@@ -6366,7 +6384,7 @@ var DefaultStreamTextResult = class {
|
|
|
6366
6384
|
let stepFirstChunk = true;
|
|
6367
6385
|
let stepResponse = {
|
|
6368
6386
|
id: generateId2(),
|
|
6369
|
-
timestamp:
|
|
6387
|
+
timestamp: /* @__PURE__ */ new Date(),
|
|
6370
6388
|
modelId: model.modelId
|
|
6371
6389
|
};
|
|
6372
6390
|
let activeText = "";
|
|
@@ -6605,6 +6623,9 @@ var DefaultStreamTextResult = class {
|
|
|
6605
6623
|
pendingDeferredToolCalls.delete(output2.toolCallId);
|
|
6606
6624
|
}
|
|
6607
6625
|
}
|
|
6626
|
+
if (stepTimeoutId != null) {
|
|
6627
|
+
clearTimeout(stepTimeoutId);
|
|
6628
|
+
}
|
|
6608
6629
|
if (
|
|
6609
6630
|
// Continue if:
|
|
6610
6631
|
// 1. There are client tool calls that have all been executed, OR
|
|
@@ -10188,7 +10209,16 @@ function smoothStream({
|
|
|
10188
10209
|
_internal: { delay: delay2 = originalDelay } = {}
|
|
10189
10210
|
} = {}) {
|
|
10190
10211
|
let detectChunk;
|
|
10191
|
-
if (typeof chunking === "function") {
|
|
10212
|
+
if (chunking != null && typeof chunking === "object" && "segment" in chunking && typeof chunking.segment === "function") {
|
|
10213
|
+
const segmenter = chunking;
|
|
10214
|
+
detectChunk = (buffer) => {
|
|
10215
|
+
if (buffer.length === 0)
|
|
10216
|
+
return null;
|
|
10217
|
+
const iterator = segmenter.segment(buffer)[Symbol.iterator]();
|
|
10218
|
+
const first = iterator.next().value;
|
|
10219
|
+
return (first == null ? void 0 : first.segment) || null;
|
|
10220
|
+
};
|
|
10221
|
+
} else if (typeof chunking === "function") {
|
|
10192
10222
|
detectChunk = (buffer) => {
|
|
10193
10223
|
const match = chunking(buffer);
|
|
10194
10224
|
if (match == null) {
|
|
@@ -10205,11 +10235,11 @@ function smoothStream({
|
|
|
10205
10235
|
return match;
|
|
10206
10236
|
};
|
|
10207
10237
|
} else {
|
|
10208
|
-
const chunkingRegex = typeof chunking === "string" ? CHUNKING_REGEXPS[chunking] : chunking;
|
|
10238
|
+
const chunkingRegex = typeof chunking === "string" ? CHUNKING_REGEXPS[chunking] : chunking instanceof RegExp ? chunking : void 0;
|
|
10209
10239
|
if (chunkingRegex == null) {
|
|
10210
10240
|
throw new InvalidArgumentError2({
|
|
10211
10241
|
argument: "chunking",
|
|
10212
|
-
message: `Chunking must be "word"
|
|
10242
|
+
message: `Chunking must be "word", "line", a RegExp, an Intl.Segmenter, or a ChunkDetector function. Received: ${chunking}`
|
|
10213
10243
|
});
|
|
10214
10244
|
}
|
|
10215
10245
|
detectChunk = (buffer) => {
|
|
@@ -12086,6 +12116,7 @@ export {
|
|
|
12086
12116
|
generateObject,
|
|
12087
12117
|
generateText,
|
|
12088
12118
|
getStaticToolName,
|
|
12119
|
+
getStepTimeoutMs,
|
|
12089
12120
|
getTextFromDataUrl,
|
|
12090
12121
|
getToolName,
|
|
12091
12122
|
getToolOrDynamicToolName,
|