@ai-sdk/anthropic 3.0.2 → 3.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -11,7 +11,7 @@ import {
11
11
  } from "@ai-sdk/provider-utils";
12
12
 
13
13
  // src/version.ts
14
- var VERSION = true ? "3.0.2" : "0.0.0-test";
14
+ var VERSION = true ? "3.0.4" : "0.0.0-test";
15
15
 
16
16
  // src/anthropic-messages-language-model.ts
17
17
  import {
@@ -974,7 +974,8 @@ var webSearch_20250305InputSchema = lazySchema4(
974
974
  var factory2 = createProviderToolFactoryWithOutputSchema({
975
975
  id: "anthropic.web_search_20250305",
976
976
  inputSchema: webSearch_20250305InputSchema,
977
- outputSchema: webSearch_20250305OutputSchema
977
+ outputSchema: webSearch_20250305OutputSchema,
978
+ supportsDeferredResults: true
978
979
  });
979
980
  var webSearch_20250305 = (args = {}) => {
980
981
  return factory2(args);
@@ -1034,7 +1035,8 @@ var webFetch_20250910InputSchema = lazySchema5(
1034
1035
  var factory3 = createProviderToolFactoryWithOutputSchema2({
1035
1036
  id: "anthropic.web_fetch_20250910",
1036
1037
  inputSchema: webFetch_20250910InputSchema,
1037
- outputSchema: webFetch_20250910OutputSchema
1038
+ outputSchema: webFetch_20250910OutputSchema,
1039
+ supportsDeferredResults: true
1038
1040
  });
1039
1041
  var webFetch_20250910 = (args = {}) => {
1040
1042
  return factory3(args);
@@ -2734,7 +2736,7 @@ var AnthropicMessagesLanguageModel = class {
2734
2736
  });
2735
2737
  }
2736
2738
  async doGenerate(options) {
2737
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
2739
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
2738
2740
  const { args, warnings, betas, usesJsonResponseTool, toolNameMapping } = await this.getArgs({
2739
2741
  ...options,
2740
2742
  stream: false,
@@ -2758,6 +2760,7 @@ var AnthropicMessagesLanguageModel = class {
2758
2760
  });
2759
2761
  const content = [];
2760
2762
  const mcpToolCalls = {};
2763
+ const serverToolCalls = {};
2761
2764
  let isJsonResponseFromTool = false;
2762
2765
  for (const part of response.content) {
2763
2766
  switch (part.type) {
@@ -2852,6 +2855,7 @@ var AnthropicMessagesLanguageModel = class {
2852
2855
  providerExecuted: true
2853
2856
  });
2854
2857
  } else if (part.name === "tool_search_tool_regex" || part.name === "tool_search_tool_bm25") {
2858
+ serverToolCalls[part.id] = part.name;
2855
2859
  content.push({
2856
2860
  type: "tool-call",
2857
2861
  toolCallId: part.id,
@@ -3015,11 +3019,12 @@ var AnthropicMessagesLanguageModel = class {
3015
3019
  }
3016
3020
  // tool search tool results:
3017
3021
  case "tool_search_tool_result": {
3022
+ const providerToolName = (_c = serverToolCalls[part.tool_use_id]) != null ? _c : "tool_search_tool_regex";
3018
3023
  if (part.content.type === "tool_search_tool_search_result") {
3019
3024
  content.push({
3020
3025
  type: "tool-result",
3021
3026
  toolCallId: part.tool_use_id,
3022
- toolName: toolNameMapping.toCustomToolName("tool_search"),
3027
+ toolName: toolNameMapping.toCustomToolName(providerToolName),
3023
3028
  result: part.content.tool_references.map((ref) => ({
3024
3029
  type: ref.type,
3025
3030
  toolName: ref.tool_name
@@ -3029,7 +3034,7 @@ var AnthropicMessagesLanguageModel = class {
3029
3034
  content.push({
3030
3035
  type: "tool-result",
3031
3036
  toolCallId: part.tool_use_id,
3032
- toolName: toolNameMapping.toCustomToolName("tool_search"),
3037
+ toolName: toolNameMapping.toCustomToolName(providerToolName),
3033
3038
  isError: true,
3034
3039
  result: {
3035
3040
  type: "tool_search_tool_result_error",
@@ -3048,13 +3053,13 @@ var AnthropicMessagesLanguageModel = class {
3048
3053
  finishReason: response.stop_reason,
3049
3054
  isJsonResponseFromTool
3050
3055
  }),
3051
- raw: (_c = response.stop_reason) != null ? _c : void 0
3056
+ raw: (_d = response.stop_reason) != null ? _d : void 0
3052
3057
  },
3053
3058
  usage: convertAnthropicMessagesUsage(response.usage),
3054
3059
  request: { body: args },
3055
3060
  response: {
3056
- id: (_d = response.id) != null ? _d : void 0,
3057
- modelId: (_e = response.model) != null ? _e : void 0,
3061
+ id: (_e = response.id) != null ? _e : void 0,
3062
+ modelId: (_f = response.model) != null ? _f : void 0,
3058
3063
  headers: responseHeaders,
3059
3064
  body: rawResponse
3060
3065
  },
@@ -3062,20 +3067,20 @@ var AnthropicMessagesLanguageModel = class {
3062
3067
  providerMetadata: {
3063
3068
  anthropic: {
3064
3069
  usage: response.usage,
3065
- cacheCreationInputTokens: (_f = response.usage.cache_creation_input_tokens) != null ? _f : null,
3066
- stopSequence: (_g = response.stop_sequence) != null ? _g : null,
3070
+ cacheCreationInputTokens: (_g = response.usage.cache_creation_input_tokens) != null ? _g : null,
3071
+ stopSequence: (_h = response.stop_sequence) != null ? _h : null,
3067
3072
  container: response.container ? {
3068
3073
  expiresAt: response.container.expires_at,
3069
3074
  id: response.container.id,
3070
- skills: (_i = (_h = response.container.skills) == null ? void 0 : _h.map((skill) => ({
3075
+ skills: (_j = (_i = response.container.skills) == null ? void 0 : _i.map((skill) => ({
3071
3076
  type: skill.type,
3072
3077
  skillId: skill.skill_id,
3073
3078
  version: skill.version
3074
- }))) != null ? _i : null
3079
+ }))) != null ? _j : null
3075
3080
  } : null,
3076
- contextManagement: (_j = mapAnthropicResponseContextManagement(
3081
+ contextManagement: (_k = mapAnthropicResponseContextManagement(
3077
3082
  response.context_management
3078
- )) != null ? _j : null
3083
+ )) != null ? _k : null
3079
3084
  }
3080
3085
  }
3081
3086
  };
@@ -3118,6 +3123,7 @@ var AnthropicMessagesLanguageModel = class {
3118
3123
  };
3119
3124
  const contentBlocks = {};
3120
3125
  const mcpToolCalls = {};
3126
+ const serverToolCalls = {};
3121
3127
  let contextManagement = null;
3122
3128
  let rawUsage = void 0;
3123
3129
  let cacheCreationInputTokens = null;
@@ -3132,7 +3138,7 @@ var AnthropicMessagesLanguageModel = class {
3132
3138
  controller.enqueue({ type: "stream-start", warnings });
3133
3139
  },
3134
3140
  transform(chunk, controller) {
3135
- var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
3141
+ var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
3136
3142
  if (options.includeRawChunks) {
3137
3143
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
3138
3144
  }
@@ -3244,6 +3250,7 @@ var AnthropicMessagesLanguageModel = class {
3244
3250
  providerExecuted: true
3245
3251
  });
3246
3252
  } else if (part.name === "tool_search_tool_regex" || part.name === "tool_search_tool_bm25") {
3253
+ serverToolCalls[part.id] = part.name;
3247
3254
  const customToolName = toolNameMapping.toCustomToolName(
3248
3255
  part.name
3249
3256
  );
@@ -3388,11 +3395,12 @@ var AnthropicMessagesLanguageModel = class {
3388
3395
  }
3389
3396
  // tool search tool results:
3390
3397
  case "tool_search_tool_result": {
3398
+ const providerToolName = (_c = serverToolCalls[part.tool_use_id]) != null ? _c : "tool_search_tool_regex";
3391
3399
  if (part.content.type === "tool_search_tool_search_result") {
3392
3400
  controller.enqueue({
3393
3401
  type: "tool-result",
3394
3402
  toolCallId: part.tool_use_id,
3395
- toolName: toolNameMapping.toCustomToolName("tool_search"),
3403
+ toolName: toolNameMapping.toCustomToolName(providerToolName),
3396
3404
  result: part.content.tool_references.map((ref) => ({
3397
3405
  type: ref.type,
3398
3406
  toolName: ref.tool_name
@@ -3402,7 +3410,7 @@ var AnthropicMessagesLanguageModel = class {
3402
3410
  controller.enqueue({
3403
3411
  type: "tool-result",
3404
3412
  toolCallId: part.tool_use_id,
3405
- toolName: toolNameMapping.toCustomToolName("tool_search"),
3413
+ toolName: toolNameMapping.toCustomToolName(providerToolName),
3406
3414
  isError: true,
3407
3415
  result: {
3408
3416
  type: "tool_search_tool_result_error",
@@ -3601,12 +3609,12 @@ var AnthropicMessagesLanguageModel = class {
3601
3609
  }
3602
3610
  case "message_start": {
3603
3611
  usage.input_tokens = value.message.usage.input_tokens;
3604
- usage.cache_read_input_tokens = (_c = value.message.usage.cache_read_input_tokens) != null ? _c : 0;
3605
- usage.cache_creation_input_tokens = (_d = value.message.usage.cache_creation_input_tokens) != null ? _d : 0;
3612
+ usage.cache_read_input_tokens = (_d = value.message.usage.cache_read_input_tokens) != null ? _d : 0;
3613
+ usage.cache_creation_input_tokens = (_e = value.message.usage.cache_creation_input_tokens) != null ? _e : 0;
3606
3614
  rawUsage = {
3607
3615
  ...value.message.usage
3608
3616
  };
3609
- cacheCreationInputTokens = (_e = value.message.usage.cache_creation_input_tokens) != null ? _e : null;
3617
+ cacheCreationInputTokens = (_f = value.message.usage.cache_creation_input_tokens) != null ? _f : null;
3610
3618
  if (value.message.container != null) {
3611
3619
  container = {
3612
3620
  expiresAt: value.message.container.expires_at,
@@ -3625,8 +3633,8 @@ var AnthropicMessagesLanguageModel = class {
3625
3633
  }
3626
3634
  controller.enqueue({
3627
3635
  type: "response-metadata",
3628
- id: (_f = value.message.id) != null ? _f : void 0,
3629
- modelId: (_g = value.message.model) != null ? _g : void 0
3636
+ id: (_g = value.message.id) != null ? _g : void 0,
3637
+ modelId: (_h = value.message.model) != null ? _h : void 0
3630
3638
  });
3631
3639
  if (value.message.content != null) {
3632
3640
  for (let contentIndex = 0; contentIndex < value.message.content.length; contentIndex++) {
@@ -3642,7 +3650,7 @@ var AnthropicMessagesLanguageModel = class {
3642
3650
  id: part.id,
3643
3651
  toolName: part.name
3644
3652
  });
3645
- const inputStr = JSON.stringify((_h = part.input) != null ? _h : {});
3653
+ const inputStr = JSON.stringify((_i = part.input) != null ? _i : {});
3646
3654
  controller.enqueue({
3647
3655
  type: "tool-input-delta",
3648
3656
  id: part.id,
@@ -3677,17 +3685,17 @@ var AnthropicMessagesLanguageModel = class {
3677
3685
  finishReason: value.delta.stop_reason,
3678
3686
  isJsonResponseFromTool
3679
3687
  }),
3680
- raw: (_i = value.delta.stop_reason) != null ? _i : void 0
3688
+ raw: (_j = value.delta.stop_reason) != null ? _j : void 0
3681
3689
  };
3682
- stopSequence = (_j = value.delta.stop_sequence) != null ? _j : null;
3690
+ stopSequence = (_k = value.delta.stop_sequence) != null ? _k : null;
3683
3691
  container = value.delta.container != null ? {
3684
3692
  expiresAt: value.delta.container.expires_at,
3685
3693
  id: value.delta.container.id,
3686
- skills: (_l = (_k = value.delta.container.skills) == null ? void 0 : _k.map((skill) => ({
3694
+ skills: (_m = (_l = value.delta.container.skills) == null ? void 0 : _l.map((skill) => ({
3687
3695
  type: skill.type,
3688
3696
  skillId: skill.skill_id,
3689
3697
  version: skill.version
3690
- }))) != null ? _l : null
3698
+ }))) != null ? _m : null
3691
3699
  } : null;
3692
3700
  if (value.delta.context_management) {
3693
3701
  contextManagement = mapAnthropicResponseContextManagement(