@ai-sdk/anthropic 3.0.3 → 3.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +32 -26
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +32 -26
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +31 -25
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +31 -25
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
package/dist/index.js
CHANGED
|
@@ -32,7 +32,7 @@ var import_provider4 = require("@ai-sdk/provider");
|
|
|
32
32
|
var import_provider_utils22 = require("@ai-sdk/provider-utils");
|
|
33
33
|
|
|
34
34
|
// src/version.ts
|
|
35
|
-
var VERSION = true ? "3.0.
|
|
35
|
+
var VERSION = true ? "3.0.4" : "0.0.0-test";
|
|
36
36
|
|
|
37
37
|
// src/anthropic-messages-language-model.ts
|
|
38
38
|
var import_provider3 = require("@ai-sdk/provider");
|
|
@@ -2713,7 +2713,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2713
2713
|
});
|
|
2714
2714
|
}
|
|
2715
2715
|
async doGenerate(options) {
|
|
2716
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
2716
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
|
2717
2717
|
const { args, warnings, betas, usesJsonResponseTool, toolNameMapping } = await this.getArgs({
|
|
2718
2718
|
...options,
|
|
2719
2719
|
stream: false,
|
|
@@ -2737,6 +2737,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2737
2737
|
});
|
|
2738
2738
|
const content = [];
|
|
2739
2739
|
const mcpToolCalls = {};
|
|
2740
|
+
const serverToolCalls = {};
|
|
2740
2741
|
let isJsonResponseFromTool = false;
|
|
2741
2742
|
for (const part of response.content) {
|
|
2742
2743
|
switch (part.type) {
|
|
@@ -2831,6 +2832,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2831
2832
|
providerExecuted: true
|
|
2832
2833
|
});
|
|
2833
2834
|
} else if (part.name === "tool_search_tool_regex" || part.name === "tool_search_tool_bm25") {
|
|
2835
|
+
serverToolCalls[part.id] = part.name;
|
|
2834
2836
|
content.push({
|
|
2835
2837
|
type: "tool-call",
|
|
2836
2838
|
toolCallId: part.id,
|
|
@@ -2994,11 +2996,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2994
2996
|
}
|
|
2995
2997
|
// tool search tool results:
|
|
2996
2998
|
case "tool_search_tool_result": {
|
|
2999
|
+
const providerToolName = (_c = serverToolCalls[part.tool_use_id]) != null ? _c : "tool_search_tool_regex";
|
|
2997
3000
|
if (part.content.type === "tool_search_tool_search_result") {
|
|
2998
3001
|
content.push({
|
|
2999
3002
|
type: "tool-result",
|
|
3000
3003
|
toolCallId: part.tool_use_id,
|
|
3001
|
-
toolName: toolNameMapping.toCustomToolName(
|
|
3004
|
+
toolName: toolNameMapping.toCustomToolName(providerToolName),
|
|
3002
3005
|
result: part.content.tool_references.map((ref) => ({
|
|
3003
3006
|
type: ref.type,
|
|
3004
3007
|
toolName: ref.tool_name
|
|
@@ -3008,7 +3011,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3008
3011
|
content.push({
|
|
3009
3012
|
type: "tool-result",
|
|
3010
3013
|
toolCallId: part.tool_use_id,
|
|
3011
|
-
toolName: toolNameMapping.toCustomToolName(
|
|
3014
|
+
toolName: toolNameMapping.toCustomToolName(providerToolName),
|
|
3012
3015
|
isError: true,
|
|
3013
3016
|
result: {
|
|
3014
3017
|
type: "tool_search_tool_result_error",
|
|
@@ -3027,13 +3030,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3027
3030
|
finishReason: response.stop_reason,
|
|
3028
3031
|
isJsonResponseFromTool
|
|
3029
3032
|
}),
|
|
3030
|
-
raw: (
|
|
3033
|
+
raw: (_d = response.stop_reason) != null ? _d : void 0
|
|
3031
3034
|
},
|
|
3032
3035
|
usage: convertAnthropicMessagesUsage(response.usage),
|
|
3033
3036
|
request: { body: args },
|
|
3034
3037
|
response: {
|
|
3035
|
-
id: (
|
|
3036
|
-
modelId: (
|
|
3038
|
+
id: (_e = response.id) != null ? _e : void 0,
|
|
3039
|
+
modelId: (_f = response.model) != null ? _f : void 0,
|
|
3037
3040
|
headers: responseHeaders,
|
|
3038
3041
|
body: rawResponse
|
|
3039
3042
|
},
|
|
@@ -3041,20 +3044,20 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3041
3044
|
providerMetadata: {
|
|
3042
3045
|
anthropic: {
|
|
3043
3046
|
usage: response.usage,
|
|
3044
|
-
cacheCreationInputTokens: (
|
|
3045
|
-
stopSequence: (
|
|
3047
|
+
cacheCreationInputTokens: (_g = response.usage.cache_creation_input_tokens) != null ? _g : null,
|
|
3048
|
+
stopSequence: (_h = response.stop_sequence) != null ? _h : null,
|
|
3046
3049
|
container: response.container ? {
|
|
3047
3050
|
expiresAt: response.container.expires_at,
|
|
3048
3051
|
id: response.container.id,
|
|
3049
|
-
skills: (
|
|
3052
|
+
skills: (_j = (_i = response.container.skills) == null ? void 0 : _i.map((skill) => ({
|
|
3050
3053
|
type: skill.type,
|
|
3051
3054
|
skillId: skill.skill_id,
|
|
3052
3055
|
version: skill.version
|
|
3053
|
-
}))) != null ?
|
|
3056
|
+
}))) != null ? _j : null
|
|
3054
3057
|
} : null,
|
|
3055
|
-
contextManagement: (
|
|
3058
|
+
contextManagement: (_k = mapAnthropicResponseContextManagement(
|
|
3056
3059
|
response.context_management
|
|
3057
|
-
)) != null ?
|
|
3060
|
+
)) != null ? _k : null
|
|
3058
3061
|
}
|
|
3059
3062
|
}
|
|
3060
3063
|
};
|
|
@@ -3097,6 +3100,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3097
3100
|
};
|
|
3098
3101
|
const contentBlocks = {};
|
|
3099
3102
|
const mcpToolCalls = {};
|
|
3103
|
+
const serverToolCalls = {};
|
|
3100
3104
|
let contextManagement = null;
|
|
3101
3105
|
let rawUsage = void 0;
|
|
3102
3106
|
let cacheCreationInputTokens = null;
|
|
@@ -3111,7 +3115,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3111
3115
|
controller.enqueue({ type: "stream-start", warnings });
|
|
3112
3116
|
},
|
|
3113
3117
|
transform(chunk, controller) {
|
|
3114
|
-
var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
3118
|
+
var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
|
|
3115
3119
|
if (options.includeRawChunks) {
|
|
3116
3120
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
3117
3121
|
}
|
|
@@ -3223,6 +3227,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3223
3227
|
providerExecuted: true
|
|
3224
3228
|
});
|
|
3225
3229
|
} else if (part.name === "tool_search_tool_regex" || part.name === "tool_search_tool_bm25") {
|
|
3230
|
+
serverToolCalls[part.id] = part.name;
|
|
3226
3231
|
const customToolName = toolNameMapping.toCustomToolName(
|
|
3227
3232
|
part.name
|
|
3228
3233
|
);
|
|
@@ -3367,11 +3372,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3367
3372
|
}
|
|
3368
3373
|
// tool search tool results:
|
|
3369
3374
|
case "tool_search_tool_result": {
|
|
3375
|
+
const providerToolName = (_c = serverToolCalls[part.tool_use_id]) != null ? _c : "tool_search_tool_regex";
|
|
3370
3376
|
if (part.content.type === "tool_search_tool_search_result") {
|
|
3371
3377
|
controller.enqueue({
|
|
3372
3378
|
type: "tool-result",
|
|
3373
3379
|
toolCallId: part.tool_use_id,
|
|
3374
|
-
toolName: toolNameMapping.toCustomToolName(
|
|
3380
|
+
toolName: toolNameMapping.toCustomToolName(providerToolName),
|
|
3375
3381
|
result: part.content.tool_references.map((ref) => ({
|
|
3376
3382
|
type: ref.type,
|
|
3377
3383
|
toolName: ref.tool_name
|
|
@@ -3381,7 +3387,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3381
3387
|
controller.enqueue({
|
|
3382
3388
|
type: "tool-result",
|
|
3383
3389
|
toolCallId: part.tool_use_id,
|
|
3384
|
-
toolName: toolNameMapping.toCustomToolName(
|
|
3390
|
+
toolName: toolNameMapping.toCustomToolName(providerToolName),
|
|
3385
3391
|
isError: true,
|
|
3386
3392
|
result: {
|
|
3387
3393
|
type: "tool_search_tool_result_error",
|
|
@@ -3580,12 +3586,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3580
3586
|
}
|
|
3581
3587
|
case "message_start": {
|
|
3582
3588
|
usage.input_tokens = value.message.usage.input_tokens;
|
|
3583
|
-
usage.cache_read_input_tokens = (
|
|
3584
|
-
usage.cache_creation_input_tokens = (
|
|
3589
|
+
usage.cache_read_input_tokens = (_d = value.message.usage.cache_read_input_tokens) != null ? _d : 0;
|
|
3590
|
+
usage.cache_creation_input_tokens = (_e = value.message.usage.cache_creation_input_tokens) != null ? _e : 0;
|
|
3585
3591
|
rawUsage = {
|
|
3586
3592
|
...value.message.usage
|
|
3587
3593
|
};
|
|
3588
|
-
cacheCreationInputTokens = (
|
|
3594
|
+
cacheCreationInputTokens = (_f = value.message.usage.cache_creation_input_tokens) != null ? _f : null;
|
|
3589
3595
|
if (value.message.container != null) {
|
|
3590
3596
|
container = {
|
|
3591
3597
|
expiresAt: value.message.container.expires_at,
|
|
@@ -3604,8 +3610,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3604
3610
|
}
|
|
3605
3611
|
controller.enqueue({
|
|
3606
3612
|
type: "response-metadata",
|
|
3607
|
-
id: (
|
|
3608
|
-
modelId: (
|
|
3613
|
+
id: (_g = value.message.id) != null ? _g : void 0,
|
|
3614
|
+
modelId: (_h = value.message.model) != null ? _h : void 0
|
|
3609
3615
|
});
|
|
3610
3616
|
if (value.message.content != null) {
|
|
3611
3617
|
for (let contentIndex = 0; contentIndex < value.message.content.length; contentIndex++) {
|
|
@@ -3621,7 +3627,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3621
3627
|
id: part.id,
|
|
3622
3628
|
toolName: part.name
|
|
3623
3629
|
});
|
|
3624
|
-
const inputStr = JSON.stringify((
|
|
3630
|
+
const inputStr = JSON.stringify((_i = part.input) != null ? _i : {});
|
|
3625
3631
|
controller.enqueue({
|
|
3626
3632
|
type: "tool-input-delta",
|
|
3627
3633
|
id: part.id,
|
|
@@ -3656,17 +3662,17 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
3656
3662
|
finishReason: value.delta.stop_reason,
|
|
3657
3663
|
isJsonResponseFromTool
|
|
3658
3664
|
}),
|
|
3659
|
-
raw: (
|
|
3665
|
+
raw: (_j = value.delta.stop_reason) != null ? _j : void 0
|
|
3660
3666
|
};
|
|
3661
|
-
stopSequence = (
|
|
3667
|
+
stopSequence = (_k = value.delta.stop_sequence) != null ? _k : null;
|
|
3662
3668
|
container = value.delta.container != null ? {
|
|
3663
3669
|
expiresAt: value.delta.container.expires_at,
|
|
3664
3670
|
id: value.delta.container.id,
|
|
3665
|
-
skills: (
|
|
3671
|
+
skills: (_m = (_l = value.delta.container.skills) == null ? void 0 : _l.map((skill) => ({
|
|
3666
3672
|
type: skill.type,
|
|
3667
3673
|
skillId: skill.skill_id,
|
|
3668
3674
|
version: skill.version
|
|
3669
|
-
}))) != null ?
|
|
3675
|
+
}))) != null ? _m : null
|
|
3670
3676
|
} : null;
|
|
3671
3677
|
if (value.delta.context_management) {
|
|
3672
3678
|
contextManagement = mapAnthropicResponseContextManagement(
|