@ai-sdk/anthropic 3.0.2 → 3.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.js +36 -28
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +36 -28
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +35 -27
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +35 -27
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.js
CHANGED
|
@@ -966,7 +966,8 @@ var webSearch_20250305InputSchema = (0, import_provider_utils5.lazySchema)(
|
|
|
966
966
|
var factory2 = (0, import_provider_utils5.createProviderToolFactoryWithOutputSchema)({
|
|
967
967
|
id: "anthropic.web_search_20250305",
|
|
968
968
|
inputSchema: webSearch_20250305InputSchema,
|
|
969
|
-
outputSchema: webSearch_20250305OutputSchema
|
|
969
|
+
outputSchema: webSearch_20250305OutputSchema,
|
|
970
|
+
supportsDeferredResults: true
|
|
970
971
|
});
|
|
971
972
|
var webSearch_20250305 = (args = {}) => {
|
|
972
973
|
return factory2(args);
|
|
@@ -1022,7 +1023,8 @@ var webFetch_20250910InputSchema = (0, import_provider_utils6.lazySchema)(
|
|
|
1022
1023
|
var factory3 = (0, import_provider_utils6.createProviderToolFactoryWithOutputSchema)({
|
|
1023
1024
|
id: "anthropic.web_fetch_20250910",
|
|
1024
1025
|
inputSchema: webFetch_20250910InputSchema,
|
|
1025
|
-
outputSchema: webFetch_20250910OutputSchema
|
|
1026
|
+
outputSchema: webFetch_20250910OutputSchema,
|
|
1027
|
+
supportsDeferredResults: true
|
|
1026
1028
|
});
|
|
1027
1029
|
var webFetch_20250910 = (args = {}) => {
|
|
1028
1030
|
return factory3(args);
|
|
@@ -2703,7 +2705,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2703
2705
|
});
|
|
2704
2706
|
}
|
|
2705
2707
|
async doGenerate(options) {
|
|
2706
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
2708
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
|
2707
2709
|
const { args, warnings, betas, usesJsonResponseTool, toolNameMapping } = await this.getArgs({
|
|
2708
2710
|
...options,
|
|
2709
2711
|
stream: false,
|
|
@@ -2727,6 +2729,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2727
2729
|
});
|
|
2728
2730
|
const content = [];
|
|
2729
2731
|
const mcpToolCalls = {};
|
|
2732
|
+
const serverToolCalls = {};
|
|
2730
2733
|
let isJsonResponseFromTool = false;
|
|
2731
2734
|
for (const part of response.content) {
|
|
2732
2735
|
switch (part.type) {
|
|
@@ -2821,6 +2824,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2821
2824
|
providerExecuted: true
|
|
2822
2825
|
});
|
|
2823
2826
|
} else if (part.name === "tool_search_tool_regex" || part.name === "tool_search_tool_bm25") {
|
|
2827
|
+
serverToolCalls[part.id] = part.name;
|
|
2824
2828
|
content.push({
|
|
2825
2829
|
type: "tool-call",
|
|
2826
2830
|
toolCallId: part.id,
|
|
@@ -2984,11 +2988,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2984
2988
|
}
|
|
2985
2989
|
// tool search tool results:
|
|
2986
2990
|
case "tool_search_tool_result": {
|
|
2991
|
+
const providerToolName = (_c = serverToolCalls[part.tool_use_id]) != null ? _c : "tool_search_tool_regex";
|
|
2987
2992
|
if (part.content.type === "tool_search_tool_search_result") {
|
|
2988
2993
|
content.push({
|
|
2989
2994
|
type: "tool-result",
|
|
2990
2995
|
toolCallId: part.tool_use_id,
|
|
2991
|
-
toolName: toolNameMapping.toCustomToolName(
|
|
2996
|
+
toolName: toolNameMapping.toCustomToolName(providerToolName),
|
|
2992
2997
|
result: part.content.tool_references.map((ref) => ({
|
|
2993
2998
|
type: ref.type,
|
|
2994
2999
|
toolName: ref.tool_name
|
|
@@ -2998,7 +3003,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2998
3003
|
content.push({
|
|
2999
3004
|
type: "tool-result",
|
|
3000
3005
|
toolCallId: part.tool_use_id,
|
|
3001
|
-
toolName: toolNameMapping.toCustomToolName(
|
|
3006
|
+
toolName: toolNameMapping.toCustomToolName(providerToolName),
|
|
3002
3007
|
isError: true,
|
|
3003
3008
|
result: {
|
|
3004
3009
|
type: "tool_search_tool_result_error",
|
|
@@ -3017,13 +3022,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3017
3022
|
finishReason: response.stop_reason,
|
|
3018
3023
|
isJsonResponseFromTool
|
|
3019
3024
|
}),
|
|
3020
|
-
raw: (
|
|
3025
|
+
raw: (_d = response.stop_reason) != null ? _d : void 0
|
|
3021
3026
|
},
|
|
3022
3027
|
usage: convertAnthropicMessagesUsage(response.usage),
|
|
3023
3028
|
request: { body: args },
|
|
3024
3029
|
response: {
|
|
3025
|
-
id: (
|
|
3026
|
-
modelId: (
|
|
3030
|
+
id: (_e = response.id) != null ? _e : void 0,
|
|
3031
|
+
modelId: (_f = response.model) != null ? _f : void 0,
|
|
3027
3032
|
headers: responseHeaders,
|
|
3028
3033
|
body: rawResponse
|
|
3029
3034
|
},
|
|
@@ -3031,20 +3036,20 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3031
3036
|
providerMetadata: {
|
|
3032
3037
|
anthropic: {
|
|
3033
3038
|
usage: response.usage,
|
|
3034
|
-
cacheCreationInputTokens: (
|
|
3035
|
-
stopSequence: (
|
|
3039
|
+
cacheCreationInputTokens: (_g = response.usage.cache_creation_input_tokens) != null ? _g : null,
|
|
3040
|
+
stopSequence: (_h = response.stop_sequence) != null ? _h : null,
|
|
3036
3041
|
container: response.container ? {
|
|
3037
3042
|
expiresAt: response.container.expires_at,
|
|
3038
3043
|
id: response.container.id,
|
|
3039
|
-
skills: (
|
|
3044
|
+
skills: (_j = (_i = response.container.skills) == null ? void 0 : _i.map((skill) => ({
|
|
3040
3045
|
type: skill.type,
|
|
3041
3046
|
skillId: skill.skill_id,
|
|
3042
3047
|
version: skill.version
|
|
3043
|
-
}))) != null ?
|
|
3048
|
+
}))) != null ? _j : null
|
|
3044
3049
|
} : null,
|
|
3045
|
-
contextManagement: (
|
|
3050
|
+
contextManagement: (_k = mapAnthropicResponseContextManagement(
|
|
3046
3051
|
response.context_management
|
|
3047
|
-
)) != null ?
|
|
3052
|
+
)) != null ? _k : null
|
|
3048
3053
|
}
|
|
3049
3054
|
}
|
|
3050
3055
|
};
|
|
@@ -3087,6 +3092,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3087
3092
|
};
|
|
3088
3093
|
const contentBlocks = {};
|
|
3089
3094
|
const mcpToolCalls = {};
|
|
3095
|
+
const serverToolCalls = {};
|
|
3090
3096
|
let contextManagement = null;
|
|
3091
3097
|
let rawUsage = void 0;
|
|
3092
3098
|
let cacheCreationInputTokens = null;
|
|
@@ -3101,7 +3107,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3101
3107
|
controller.enqueue({ type: "stream-start", warnings });
|
|
3102
3108
|
},
|
|
3103
3109
|
transform(chunk, controller) {
|
|
3104
|
-
var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
3110
|
+
var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
|
|
3105
3111
|
if (options.includeRawChunks) {
|
|
3106
3112
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
3107
3113
|
}
|
|
@@ -3213,6 +3219,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3213
3219
|
providerExecuted: true
|
|
3214
3220
|
});
|
|
3215
3221
|
} else if (part.name === "tool_search_tool_regex" || part.name === "tool_search_tool_bm25") {
|
|
3222
|
+
serverToolCalls[part.id] = part.name;
|
|
3216
3223
|
const customToolName = toolNameMapping.toCustomToolName(
|
|
3217
3224
|
part.name
|
|
3218
3225
|
);
|
|
@@ -3357,11 +3364,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3357
3364
|
}
|
|
3358
3365
|
// tool search tool results:
|
|
3359
3366
|
case "tool_search_tool_result": {
|
|
3367
|
+
const providerToolName = (_c = serverToolCalls[part.tool_use_id]) != null ? _c : "tool_search_tool_regex";
|
|
3360
3368
|
if (part.content.type === "tool_search_tool_search_result") {
|
|
3361
3369
|
controller.enqueue({
|
|
3362
3370
|
type: "tool-result",
|
|
3363
3371
|
toolCallId: part.tool_use_id,
|
|
3364
|
-
toolName: toolNameMapping.toCustomToolName(
|
|
3372
|
+
toolName: toolNameMapping.toCustomToolName(providerToolName),
|
|
3365
3373
|
result: part.content.tool_references.map((ref) => ({
|
|
3366
3374
|
type: ref.type,
|
|
3367
3375
|
toolName: ref.tool_name
|
|
@@ -3371,7 +3379,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3371
3379
|
controller.enqueue({
|
|
3372
3380
|
type: "tool-result",
|
|
3373
3381
|
toolCallId: part.tool_use_id,
|
|
3374
|
-
toolName: toolNameMapping.toCustomToolName(
|
|
3382
|
+
toolName: toolNameMapping.toCustomToolName(providerToolName),
|
|
3375
3383
|
isError: true,
|
|
3376
3384
|
result: {
|
|
3377
3385
|
type: "tool_search_tool_result_error",
|
|
@@ -3570,12 +3578,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3570
3578
|
}
|
|
3571
3579
|
case "message_start": {
|
|
3572
3580
|
usage.input_tokens = value.message.usage.input_tokens;
|
|
3573
|
-
usage.cache_read_input_tokens = (
|
|
3574
|
-
usage.cache_creation_input_tokens = (
|
|
3581
|
+
usage.cache_read_input_tokens = (_d = value.message.usage.cache_read_input_tokens) != null ? _d : 0;
|
|
3582
|
+
usage.cache_creation_input_tokens = (_e = value.message.usage.cache_creation_input_tokens) != null ? _e : 0;
|
|
3575
3583
|
rawUsage = {
|
|
3576
3584
|
...value.message.usage
|
|
3577
3585
|
};
|
|
3578
|
-
cacheCreationInputTokens = (
|
|
3586
|
+
cacheCreationInputTokens = (_f = value.message.usage.cache_creation_input_tokens) != null ? _f : null;
|
|
3579
3587
|
if (value.message.container != null) {
|
|
3580
3588
|
container = {
|
|
3581
3589
|
expiresAt: value.message.container.expires_at,
|
|
@@ -3594,8 +3602,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3594
3602
|
}
|
|
3595
3603
|
controller.enqueue({
|
|
3596
3604
|
type: "response-metadata",
|
|
3597
|
-
id: (
|
|
3598
|
-
modelId: (
|
|
3605
|
+
id: (_g = value.message.id) != null ? _g : void 0,
|
|
3606
|
+
modelId: (_h = value.message.model) != null ? _h : void 0
|
|
3599
3607
|
});
|
|
3600
3608
|
if (value.message.content != null) {
|
|
3601
3609
|
for (let contentIndex = 0; contentIndex < value.message.content.length; contentIndex++) {
|
|
@@ -3611,7 +3619,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3611
3619
|
id: part.id,
|
|
3612
3620
|
toolName: part.name
|
|
3613
3621
|
});
|
|
3614
|
-
const inputStr = JSON.stringify((
|
|
3622
|
+
const inputStr = JSON.stringify((_i = part.input) != null ? _i : {});
|
|
3615
3623
|
controller.enqueue({
|
|
3616
3624
|
type: "tool-input-delta",
|
|
3617
3625
|
id: part.id,
|
|
@@ -3646,17 +3654,17 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3646
3654
|
finishReason: value.delta.stop_reason,
|
|
3647
3655
|
isJsonResponseFromTool
|
|
3648
3656
|
}),
|
|
3649
|
-
raw: (
|
|
3657
|
+
raw: (_j = value.delta.stop_reason) != null ? _j : void 0
|
|
3650
3658
|
};
|
|
3651
|
-
stopSequence = (
|
|
3659
|
+
stopSequence = (_k = value.delta.stop_sequence) != null ? _k : null;
|
|
3652
3660
|
container = value.delta.container != null ? {
|
|
3653
3661
|
expiresAt: value.delta.container.expires_at,
|
|
3654
3662
|
id: value.delta.container.id,
|
|
3655
|
-
skills: (
|
|
3663
|
+
skills: (_m = (_l = value.delta.container.skills) == null ? void 0 : _l.map((skill) => ({
|
|
3656
3664
|
type: skill.type,
|
|
3657
3665
|
skillId: skill.skill_id,
|
|
3658
3666
|
version: skill.version
|
|
3659
|
-
}))) != null ?
|
|
3667
|
+
}))) != null ? _m : null
|
|
3660
3668
|
} : null;
|
|
3661
3669
|
if (value.delta.context_management) {
|
|
3662
3670
|
contextManagement = mapAnthropicResponseContextManagement(
|