@ai-sdk/anthropic 3.0.2 → 3.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.js +36 -28
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +36 -28
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +35 -27
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +35 -27
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.mjs
CHANGED
|
@@ -959,7 +959,8 @@ var webSearch_20250305InputSchema = lazySchema4(
|
|
|
959
959
|
var factory2 = createProviderToolFactoryWithOutputSchema({
|
|
960
960
|
id: "anthropic.web_search_20250305",
|
|
961
961
|
inputSchema: webSearch_20250305InputSchema,
|
|
962
|
-
outputSchema: webSearch_20250305OutputSchema
|
|
962
|
+
outputSchema: webSearch_20250305OutputSchema,
|
|
963
|
+
supportsDeferredResults: true
|
|
963
964
|
});
|
|
964
965
|
var webSearch_20250305 = (args = {}) => {
|
|
965
966
|
return factory2(args);
|
|
@@ -1019,7 +1020,8 @@ var webFetch_20250910InputSchema = lazySchema5(
|
|
|
1019
1020
|
var factory3 = createProviderToolFactoryWithOutputSchema2({
|
|
1020
1021
|
id: "anthropic.web_fetch_20250910",
|
|
1021
1022
|
inputSchema: webFetch_20250910InputSchema,
|
|
1022
|
-
outputSchema: webFetch_20250910OutputSchema
|
|
1023
|
+
outputSchema: webFetch_20250910OutputSchema,
|
|
1024
|
+
supportsDeferredResults: true
|
|
1023
1025
|
});
|
|
1024
1026
|
var webFetch_20250910 = (args = {}) => {
|
|
1025
1027
|
return factory3(args);
|
|
@@ -2719,7 +2721,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2719
2721
|
});
|
|
2720
2722
|
}
|
|
2721
2723
|
async doGenerate(options) {
|
|
2722
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
2724
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
|
2723
2725
|
const { args, warnings, betas, usesJsonResponseTool, toolNameMapping } = await this.getArgs({
|
|
2724
2726
|
...options,
|
|
2725
2727
|
stream: false,
|
|
@@ -2743,6 +2745,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2743
2745
|
});
|
|
2744
2746
|
const content = [];
|
|
2745
2747
|
const mcpToolCalls = {};
|
|
2748
|
+
const serverToolCalls = {};
|
|
2746
2749
|
let isJsonResponseFromTool = false;
|
|
2747
2750
|
for (const part of response.content) {
|
|
2748
2751
|
switch (part.type) {
|
|
@@ -2837,6 +2840,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2837
2840
|
providerExecuted: true
|
|
2838
2841
|
});
|
|
2839
2842
|
} else if (part.name === "tool_search_tool_regex" || part.name === "tool_search_tool_bm25") {
|
|
2843
|
+
serverToolCalls[part.id] = part.name;
|
|
2840
2844
|
content.push({
|
|
2841
2845
|
type: "tool-call",
|
|
2842
2846
|
toolCallId: part.id,
|
|
@@ -3000,11 +3004,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3000
3004
|
}
|
|
3001
3005
|
// tool search tool results:
|
|
3002
3006
|
case "tool_search_tool_result": {
|
|
3007
|
+
const providerToolName = (_c = serverToolCalls[part.tool_use_id]) != null ? _c : "tool_search_tool_regex";
|
|
3003
3008
|
if (part.content.type === "tool_search_tool_search_result") {
|
|
3004
3009
|
content.push({
|
|
3005
3010
|
type: "tool-result",
|
|
3006
3011
|
toolCallId: part.tool_use_id,
|
|
3007
|
-
toolName: toolNameMapping.toCustomToolName(
|
|
3012
|
+
toolName: toolNameMapping.toCustomToolName(providerToolName),
|
|
3008
3013
|
result: part.content.tool_references.map((ref) => ({
|
|
3009
3014
|
type: ref.type,
|
|
3010
3015
|
toolName: ref.tool_name
|
|
@@ -3014,7 +3019,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3014
3019
|
content.push({
|
|
3015
3020
|
type: "tool-result",
|
|
3016
3021
|
toolCallId: part.tool_use_id,
|
|
3017
|
-
toolName: toolNameMapping.toCustomToolName(
|
|
3022
|
+
toolName: toolNameMapping.toCustomToolName(providerToolName),
|
|
3018
3023
|
isError: true,
|
|
3019
3024
|
result: {
|
|
3020
3025
|
type: "tool_search_tool_result_error",
|
|
@@ -3033,13 +3038,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3033
3038
|
finishReason: response.stop_reason,
|
|
3034
3039
|
isJsonResponseFromTool
|
|
3035
3040
|
}),
|
|
3036
|
-
raw: (
|
|
3041
|
+
raw: (_d = response.stop_reason) != null ? _d : void 0
|
|
3037
3042
|
},
|
|
3038
3043
|
usage: convertAnthropicMessagesUsage(response.usage),
|
|
3039
3044
|
request: { body: args },
|
|
3040
3045
|
response: {
|
|
3041
|
-
id: (
|
|
3042
|
-
modelId: (
|
|
3046
|
+
id: (_e = response.id) != null ? _e : void 0,
|
|
3047
|
+
modelId: (_f = response.model) != null ? _f : void 0,
|
|
3043
3048
|
headers: responseHeaders,
|
|
3044
3049
|
body: rawResponse
|
|
3045
3050
|
},
|
|
@@ -3047,20 +3052,20 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3047
3052
|
providerMetadata: {
|
|
3048
3053
|
anthropic: {
|
|
3049
3054
|
usage: response.usage,
|
|
3050
|
-
cacheCreationInputTokens: (
|
|
3051
|
-
stopSequence: (
|
|
3055
|
+
cacheCreationInputTokens: (_g = response.usage.cache_creation_input_tokens) != null ? _g : null,
|
|
3056
|
+
stopSequence: (_h = response.stop_sequence) != null ? _h : null,
|
|
3052
3057
|
container: response.container ? {
|
|
3053
3058
|
expiresAt: response.container.expires_at,
|
|
3054
3059
|
id: response.container.id,
|
|
3055
|
-
skills: (
|
|
3060
|
+
skills: (_j = (_i = response.container.skills) == null ? void 0 : _i.map((skill) => ({
|
|
3056
3061
|
type: skill.type,
|
|
3057
3062
|
skillId: skill.skill_id,
|
|
3058
3063
|
version: skill.version
|
|
3059
|
-
}))) != null ?
|
|
3064
|
+
}))) != null ? _j : null
|
|
3060
3065
|
} : null,
|
|
3061
|
-
contextManagement: (
|
|
3066
|
+
contextManagement: (_k = mapAnthropicResponseContextManagement(
|
|
3062
3067
|
response.context_management
|
|
3063
|
-
)) != null ?
|
|
3068
|
+
)) != null ? _k : null
|
|
3064
3069
|
}
|
|
3065
3070
|
}
|
|
3066
3071
|
};
|
|
@@ -3103,6 +3108,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3103
3108
|
};
|
|
3104
3109
|
const contentBlocks = {};
|
|
3105
3110
|
const mcpToolCalls = {};
|
|
3111
|
+
const serverToolCalls = {};
|
|
3106
3112
|
let contextManagement = null;
|
|
3107
3113
|
let rawUsage = void 0;
|
|
3108
3114
|
let cacheCreationInputTokens = null;
|
|
@@ -3117,7 +3123,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3117
3123
|
controller.enqueue({ type: "stream-start", warnings });
|
|
3118
3124
|
},
|
|
3119
3125
|
transform(chunk, controller) {
|
|
3120
|
-
var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
3126
|
+
var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
|
|
3121
3127
|
if (options.includeRawChunks) {
|
|
3122
3128
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
3123
3129
|
}
|
|
@@ -3229,6 +3235,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3229
3235
|
providerExecuted: true
|
|
3230
3236
|
});
|
|
3231
3237
|
} else if (part.name === "tool_search_tool_regex" || part.name === "tool_search_tool_bm25") {
|
|
3238
|
+
serverToolCalls[part.id] = part.name;
|
|
3232
3239
|
const customToolName = toolNameMapping.toCustomToolName(
|
|
3233
3240
|
part.name
|
|
3234
3241
|
);
|
|
@@ -3373,11 +3380,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3373
3380
|
}
|
|
3374
3381
|
// tool search tool results:
|
|
3375
3382
|
case "tool_search_tool_result": {
|
|
3383
|
+
const providerToolName = (_c = serverToolCalls[part.tool_use_id]) != null ? _c : "tool_search_tool_regex";
|
|
3376
3384
|
if (part.content.type === "tool_search_tool_search_result") {
|
|
3377
3385
|
controller.enqueue({
|
|
3378
3386
|
type: "tool-result",
|
|
3379
3387
|
toolCallId: part.tool_use_id,
|
|
3380
|
-
toolName: toolNameMapping.toCustomToolName(
|
|
3388
|
+
toolName: toolNameMapping.toCustomToolName(providerToolName),
|
|
3381
3389
|
result: part.content.tool_references.map((ref) => ({
|
|
3382
3390
|
type: ref.type,
|
|
3383
3391
|
toolName: ref.tool_name
|
|
@@ -3387,7 +3395,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3387
3395
|
controller.enqueue({
|
|
3388
3396
|
type: "tool-result",
|
|
3389
3397
|
toolCallId: part.tool_use_id,
|
|
3390
|
-
toolName: toolNameMapping.toCustomToolName(
|
|
3398
|
+
toolName: toolNameMapping.toCustomToolName(providerToolName),
|
|
3391
3399
|
isError: true,
|
|
3392
3400
|
result: {
|
|
3393
3401
|
type: "tool_search_tool_result_error",
|
|
@@ -3586,12 +3594,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3586
3594
|
}
|
|
3587
3595
|
case "message_start": {
|
|
3588
3596
|
usage.input_tokens = value.message.usage.input_tokens;
|
|
3589
|
-
usage.cache_read_input_tokens = (
|
|
3590
|
-
usage.cache_creation_input_tokens = (
|
|
3597
|
+
usage.cache_read_input_tokens = (_d = value.message.usage.cache_read_input_tokens) != null ? _d : 0;
|
|
3598
|
+
usage.cache_creation_input_tokens = (_e = value.message.usage.cache_creation_input_tokens) != null ? _e : 0;
|
|
3591
3599
|
rawUsage = {
|
|
3592
3600
|
...value.message.usage
|
|
3593
3601
|
};
|
|
3594
|
-
cacheCreationInputTokens = (
|
|
3602
|
+
cacheCreationInputTokens = (_f = value.message.usage.cache_creation_input_tokens) != null ? _f : null;
|
|
3595
3603
|
if (value.message.container != null) {
|
|
3596
3604
|
container = {
|
|
3597
3605
|
expiresAt: value.message.container.expires_at,
|
|
@@ -3610,8 +3618,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3610
3618
|
}
|
|
3611
3619
|
controller.enqueue({
|
|
3612
3620
|
type: "response-metadata",
|
|
3613
|
-
id: (
|
|
3614
|
-
modelId: (
|
|
3621
|
+
id: (_g = value.message.id) != null ? _g : void 0,
|
|
3622
|
+
modelId: (_h = value.message.model) != null ? _h : void 0
|
|
3615
3623
|
});
|
|
3616
3624
|
if (value.message.content != null) {
|
|
3617
3625
|
for (let contentIndex = 0; contentIndex < value.message.content.length; contentIndex++) {
|
|
@@ -3627,7 +3635,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3627
3635
|
id: part.id,
|
|
3628
3636
|
toolName: part.name
|
|
3629
3637
|
});
|
|
3630
|
-
const inputStr = JSON.stringify((
|
|
3638
|
+
const inputStr = JSON.stringify((_i = part.input) != null ? _i : {});
|
|
3631
3639
|
controller.enqueue({
|
|
3632
3640
|
type: "tool-input-delta",
|
|
3633
3641
|
id: part.id,
|
|
@@ -3662,17 +3670,17 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3662
3670
|
finishReason: value.delta.stop_reason,
|
|
3663
3671
|
isJsonResponseFromTool
|
|
3664
3672
|
}),
|
|
3665
|
-
raw: (
|
|
3673
|
+
raw: (_j = value.delta.stop_reason) != null ? _j : void 0
|
|
3666
3674
|
};
|
|
3667
|
-
stopSequence = (
|
|
3675
|
+
stopSequence = (_k = value.delta.stop_sequence) != null ? _k : null;
|
|
3668
3676
|
container = value.delta.container != null ? {
|
|
3669
3677
|
expiresAt: value.delta.container.expires_at,
|
|
3670
3678
|
id: value.delta.container.id,
|
|
3671
|
-
skills: (
|
|
3679
|
+
skills: (_m = (_l = value.delta.container.skills) == null ? void 0 : _l.map((skill) => ({
|
|
3672
3680
|
type: skill.type,
|
|
3673
3681
|
skillId: skill.skill_id,
|
|
3674
3682
|
version: skill.version
|
|
3675
|
-
}))) != null ?
|
|
3683
|
+
}))) != null ? _m : null
|
|
3676
3684
|
} : null;
|
|
3677
3685
|
if (value.delta.context_management) {
|
|
3678
3686
|
contextManagement = mapAnthropicResponseContextManagement(
|