@ai-sdk/anthropic 2.0.74 → 2.0.76

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,7 @@ import { LanguageModelV2, JSONSchema7, SharedV2ProviderMetadata, LanguageModelV2
2
2
  import * as _ai_sdk_provider_utils from '@ai-sdk/provider-utils';
3
3
  import { Resolvable, FetchFunction } from '@ai-sdk/provider-utils';
4
4
 
5
- type AnthropicMessagesModelId = 'claude-3-5-haiku-20241022' | 'claude-3-5-haiku-latest' | 'claude-3-7-sonnet-20250219' | 'claude-3-7-sonnet-latest' | 'claude-3-haiku-20240307' | 'claude-haiku-4-5-20251001' | 'claude-haiku-4-5' | 'claude-opus-4-0' | 'claude-opus-4-1-20250805' | 'claude-opus-4-1' | 'claude-opus-4-20250514' | 'claude-opus-4-5' | 'claude-opus-4-5-20251101' | 'claude-sonnet-4-0' | 'claude-sonnet-4-20250514' | 'claude-sonnet-4-5-20250929' | 'claude-sonnet-4-5' | 'claude-sonnet-4-6' | 'claude-opus-4-6' | (string & {});
5
+ type AnthropicMessagesModelId = 'claude-3-5-haiku-20241022' | 'claude-3-5-haiku-latest' | 'claude-3-7-sonnet-20250219' | 'claude-3-7-sonnet-latest' | 'claude-3-haiku-20240307' | 'claude-haiku-4-5-20251001' | 'claude-haiku-4-5' | 'claude-opus-4-0' | 'claude-opus-4-1-20250805' | 'claude-opus-4-1' | 'claude-opus-4-20250514' | 'claude-opus-4-5' | 'claude-opus-4-5-20251101' | 'claude-sonnet-4-0' | 'claude-sonnet-4-20250514' | 'claude-sonnet-4-5-20250929' | 'claude-sonnet-4-5' | 'claude-sonnet-4-6' | 'claude-opus-4-6' | 'claude-opus-4-7' | (string & {});
6
6
 
7
7
  type AnthropicMessagesConfig = {
8
8
  provider: string;
@@ -2,7 +2,7 @@ import { LanguageModelV2, JSONSchema7, SharedV2ProviderMetadata, LanguageModelV2
2
2
  import * as _ai_sdk_provider_utils from '@ai-sdk/provider-utils';
3
3
  import { Resolvable, FetchFunction } from '@ai-sdk/provider-utils';
4
4
 
5
- type AnthropicMessagesModelId = 'claude-3-5-haiku-20241022' | 'claude-3-5-haiku-latest' | 'claude-3-7-sonnet-20250219' | 'claude-3-7-sonnet-latest' | 'claude-3-haiku-20240307' | 'claude-haiku-4-5-20251001' | 'claude-haiku-4-5' | 'claude-opus-4-0' | 'claude-opus-4-1-20250805' | 'claude-opus-4-1' | 'claude-opus-4-20250514' | 'claude-opus-4-5' | 'claude-opus-4-5-20251101' | 'claude-sonnet-4-0' | 'claude-sonnet-4-20250514' | 'claude-sonnet-4-5-20250929' | 'claude-sonnet-4-5' | 'claude-sonnet-4-6' | 'claude-opus-4-6' | (string & {});
5
+ type AnthropicMessagesModelId = 'claude-3-5-haiku-20241022' | 'claude-3-5-haiku-latest' | 'claude-3-7-sonnet-20250219' | 'claude-3-7-sonnet-latest' | 'claude-3-haiku-20240307' | 'claude-haiku-4-5-20251001' | 'claude-haiku-4-5' | 'claude-opus-4-0' | 'claude-opus-4-1-20250805' | 'claude-opus-4-1' | 'claude-opus-4-20250514' | 'claude-opus-4-5' | 'claude-opus-4-5-20251101' | 'claude-sonnet-4-0' | 'claude-sonnet-4-20250514' | 'claude-sonnet-4-5-20250929' | 'claude-sonnet-4-5' | 'claude-sonnet-4-6' | 'claude-opus-4-6' | 'claude-opus-4-7' | (string & {});
6
6
 
7
7
  type AnthropicMessagesConfig = {
8
8
  provider: string;
@@ -642,7 +642,13 @@ var anthropicProviderOptions = import_v43.z.object({
642
642
  thinking: import_v43.z.discriminatedUnion("type", [
643
643
  import_v43.z.object({
644
644
  /** for Sonnet 4.6, Opus 4.6, and newer models */
645
- type: import_v43.z.literal("adaptive")
645
+ type: import_v43.z.literal("adaptive"),
646
+ /**
647
+ * Controls whether thinking content is included in the response.
648
+ * - `"omitted"`: Thinking blocks are present but text is empty (default for Opus 4.7+).
649
+ * - `"summarized"`: Thinking content is returned. Required to see reasoning output.
650
+ */
651
+ display: import_v43.z.enum(["omitted", "summarized"]).optional()
646
652
  }),
647
653
  import_v43.z.object({
648
654
  /** for models before Opus 4.6, except Sonnet 4.6 still supports it */
@@ -698,12 +704,33 @@ var anthropicProviderOptions = import_v43.z.object({
698
704
  /**
699
705
  * @default 'high'
700
706
  */
701
- effort: import_v43.z.enum(["low", "medium", "high", "max"]).optional(),
707
+ effort: import_v43.z.enum(["low", "medium", "high", "xhigh", "max"]).optional(),
708
+ /**
709
+ * Task budget for agentic turns. Informs the model of the total token budget
710
+ * available for the current task, allowing it to prioritize work and wind down
711
+ * gracefully as the budget is consumed.
712
+ *
713
+ * Advisory only — does not enforce a hard token limit.
714
+ */
715
+ taskBudget: import_v43.z.object({
716
+ type: import_v43.z.literal("tokens"),
717
+ total: import_v43.z.number().int().min(2e4),
718
+ remaining: import_v43.z.number().int().min(0).optional()
719
+ }).optional(),
702
720
  /**
703
721
  * Enable fast mode for faster inference (2.5x faster output token speeds).
704
722
  * Only supported with claude-opus-4-6.
705
723
  */
706
724
  speed: import_v43.z.enum(["fast", "standard"]).optional(),
725
+ /**
726
+ * Controls where model inference runs for this request.
727
+ *
728
+ * - `"global"`: Inference may run in any available geography (default).
729
+ * - `"us"`: Inference runs only in US-based infrastructure.
730
+ *
731
+ * See https://platform.claude.com/docs/en/build-with-claude/data-residency
732
+ */
733
+ inferenceGeo: import_v43.z.enum(["us", "global"]).optional(),
707
734
  /**
708
735
  * Context management configuration for automatic context window management.
709
736
  * Enables features like automatic compaction and clearing of tool uses/thinking blocks.
@@ -1965,7 +1992,7 @@ var AnthropicMessagesLanguageModel = class {
1965
1992
  toolChoice,
1966
1993
  providerOptions
1967
1994
  }) {
1968
- var _a, _b, _c, _d, _e, _f, _g;
1995
+ var _a, _b, _c, _d, _e, _f, _g, _h;
1969
1996
  const warnings = [];
1970
1997
  if (frequencyPenalty != null) {
1971
1998
  warnings.push({
@@ -2023,8 +2050,36 @@ var AnthropicMessagesLanguageModel = class {
2023
2050
  const {
2024
2051
  maxOutputTokens: maxOutputTokensForModel,
2025
2052
  supportsStructuredOutput,
2053
+ rejectsSamplingParameters,
2026
2054
  isKnownModel
2027
2055
  } = getModelCapabilities(this.modelId);
2056
+ if (rejectsSamplingParameters) {
2057
+ if (temperature != null) {
2058
+ warnings.push({
2059
+ type: "unsupported-setting",
2060
+ setting: "temperature",
2061
+ details: `temperature is not supported by ${this.modelId} and will be ignored`
2062
+ });
2063
+ temperature = void 0;
2064
+ }
2065
+ if (topK != null) {
2066
+ warnings.push({
2067
+ type: "unsupported-setting",
2068
+ setting: "topK",
2069
+ details: `topK is not supported by ${this.modelId} and will be ignored`
2070
+ });
2071
+ topK = void 0;
2072
+ }
2073
+ if (topP != null) {
2074
+ warnings.push({
2075
+ type: "unsupported-setting",
2076
+ setting: "topP",
2077
+ details: `topP is not supported by ${this.modelId} and will be ignored`
2078
+ });
2079
+ topP = void 0;
2080
+ }
2081
+ }
2082
+ const isAnthropicModel = isKnownModel || this.modelId.startsWith("claude-");
2028
2083
  const structureOutputMode = (_a = anthropicOptions == null ? void 0 : anthropicOptions.structuredOutputMode) != null ? _a : "jsonTool";
2029
2084
  const useStructuredOutput = structureOutputMode === "outputFormat" || structureOutputMode === "auto" && supportsStructuredOutput;
2030
2085
  const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !useStructuredOutput ? {
@@ -2043,6 +2098,7 @@ var AnthropicMessagesLanguageModel = class {
2043
2098
  const thinkingType = (_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.type;
2044
2099
  const isThinking = thinkingType === "enabled" || thinkingType === "adaptive";
2045
2100
  let thinkingBudget = thinkingType === "enabled" ? (_d = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _d.budgetTokens : void 0;
2101
+ const thinkingDisplay = thinkingType === "adaptive" ? (_e = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _e.display : void 0;
2046
2102
  const maxTokens = maxOutputTokens != null ? maxOutputTokens : maxOutputTokensForModel;
2047
2103
  const baseArgs = {
2048
2104
  // model id:
@@ -2057,19 +2113,42 @@ var AnthropicMessagesLanguageModel = class {
2057
2113
  ...isThinking && {
2058
2114
  thinking: {
2059
2115
  type: thinkingType,
2060
- ...thinkingBudget != null && { budget_tokens: thinkingBudget }
2116
+ ...thinkingBudget != null && { budget_tokens: thinkingBudget },
2117
+ ...thinkingDisplay != null && { display: thinkingDisplay }
2061
2118
  }
2062
2119
  },
2063
- ...(anthropicOptions == null ? void 0 : anthropicOptions.effort) && {
2064
- output_config: { effort: anthropicOptions.effort }
2120
+ ...((anthropicOptions == null ? void 0 : anthropicOptions.effort) || (anthropicOptions == null ? void 0 : anthropicOptions.taskBudget) || useStructuredOutput && (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null) && {
2121
+ output_config: {
2122
+ ...(anthropicOptions == null ? void 0 : anthropicOptions.effort) && {
2123
+ effort: anthropicOptions.effort
2124
+ },
2125
+ ...(anthropicOptions == null ? void 0 : anthropicOptions.taskBudget) && {
2126
+ task_budget: {
2127
+ type: anthropicOptions.taskBudget.type,
2128
+ total: anthropicOptions.taskBudget.total,
2129
+ ...anthropicOptions.taskBudget.remaining != null && {
2130
+ remaining: anthropicOptions.taskBudget.remaining
2131
+ }
2132
+ }
2133
+ },
2134
+ ...useStructuredOutput && (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && {
2135
+ format: {
2136
+ type: "json_schema",
2137
+ schema: responseFormat.schema
2138
+ }
2139
+ }
2140
+ }
2065
2141
  },
2066
2142
  ...(anthropicOptions == null ? void 0 : anthropicOptions.speed) && {
2067
2143
  speed: anthropicOptions.speed
2068
2144
  },
2145
+ ...(anthropicOptions == null ? void 0 : anthropicOptions.inferenceGeo) && {
2146
+ inference_geo: anthropicOptions.inferenceGeo
2147
+ },
2069
2148
  ...(anthropicOptions == null ? void 0 : anthropicOptions.cacheControl) && {
2070
2149
  cache_control: anthropicOptions.cacheControl
2071
2150
  },
2072
- ...((_e = anthropicOptions == null ? void 0 : anthropicOptions.metadata) == null ? void 0 : _e.userId) != null && {
2151
+ ...((_f = anthropicOptions == null ? void 0 : anthropicOptions.metadata) == null ? void 0 : _f.userId) != null && {
2073
2152
  metadata: { user_id: anthropicOptions.metadata.userId }
2074
2153
  },
2075
2154
  // structured output:
@@ -2083,7 +2162,7 @@ var AnthropicMessagesLanguageModel = class {
2083
2162
  ...(anthropicOptions == null ? void 0 : anthropicOptions.container) && {
2084
2163
  container: {
2085
2164
  id: anthropicOptions.container.id,
2086
- skills: (_f = anthropicOptions.container.skills) == null ? void 0 : _f.map((skill) => ({
2165
+ skills: (_g = anthropicOptions.container.skills) == null ? void 0 : _g.map((skill) => ({
2087
2166
  type: skill.type,
2088
2167
  skill_id: skill.skillId,
2089
2168
  version: skill.version
@@ -2203,6 +2282,9 @@ var AnthropicMessagesLanguageModel = class {
2203
2282
  if (anthropicOptions == null ? void 0 : anthropicOptions.effort) {
2204
2283
  betas.add("effort-2025-11-24");
2205
2284
  }
2285
+ if (anthropicOptions == null ? void 0 : anthropicOptions.taskBudget) {
2286
+ betas.add("task-budgets-2026-03-13");
2287
+ }
2206
2288
  if ((anthropicOptions == null ? void 0 : anthropicOptions.speed) === "fast") {
2207
2289
  betas.add("fast-mode-2026-02-01");
2208
2290
  }
@@ -2246,7 +2328,7 @@ var AnthropicMessagesLanguageModel = class {
2246
2328
  ...betas,
2247
2329
  ...toolsBetas,
2248
2330
  ...userSuppliedBetas,
2249
- ...(_g = anthropicOptions == null ? void 0 : anthropicOptions.anthropicBeta) != null ? _g : []
2331
+ ...(_h = anthropicOptions == null ? void 0 : anthropicOptions.anthropicBeta) != null ? _h : []
2250
2332
  ]),
2251
2333
  usesJsonResponseTool: jsonResponseTool != null
2252
2334
  };
@@ -3210,52 +3292,67 @@ var AnthropicMessagesLanguageModel = class {
3210
3292
  }
3211
3293
  };
3212
3294
  function getModelCapabilities(modelId) {
3213
- if (modelId.includes("claude-sonnet-4-6") || modelId.includes("claude-opus-4-6")) {
3295
+ if (modelId.includes("claude-opus-4-7")) {
3296
+ return {
3297
+ maxOutputTokens: 128e3,
3298
+ supportsStructuredOutput: true,
3299
+ rejectsSamplingParameters: true,
3300
+ isKnownModel: true
3301
+ };
3302
+ } else if (modelId.includes("claude-sonnet-4-6") || modelId.includes("claude-opus-4-6")) {
3214
3303
  return {
3215
3304
  maxOutputTokens: 128e3,
3216
3305
  supportsStructuredOutput: true,
3306
+ rejectsSamplingParameters: false,
3217
3307
  isKnownModel: true
3218
3308
  };
3219
3309
  } else if (modelId.includes("claude-sonnet-4-5") || modelId.includes("claude-opus-4-5") || modelId.includes("claude-haiku-4-5")) {
3220
3310
  return {
3221
3311
  maxOutputTokens: 64e3,
3222
3312
  supportsStructuredOutput: true,
3313
+ rejectsSamplingParameters: false,
3223
3314
  isKnownModel: true
3224
3315
  };
3225
3316
  } else if (modelId.includes("claude-opus-4-1")) {
3226
3317
  return {
3227
3318
  maxOutputTokens: 32e3,
3228
3319
  supportsStructuredOutput: true,
3320
+ rejectsSamplingParameters: false,
3229
3321
  isKnownModel: true
3230
3322
  };
3231
3323
  } else if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet")) {
3232
3324
  return {
3233
3325
  maxOutputTokens: 64e3,
3234
3326
  supportsStructuredOutput: false,
3327
+ rejectsSamplingParameters: false,
3235
3328
  isKnownModel: true
3236
3329
  };
3237
3330
  } else if (modelId.includes("claude-opus-4-")) {
3238
3331
  return {
3239
3332
  maxOutputTokens: 32e3,
3240
3333
  supportsStructuredOutput: false,
3334
+ rejectsSamplingParameters: false,
3241
3335
  isKnownModel: true
3242
3336
  };
3243
3337
  } else if (modelId.includes("claude-3-5-haiku")) {
3244
3338
  return {
3245
3339
  maxOutputTokens: 8192,
3246
3340
  supportsStructuredOutput: false,
3341
+ rejectsSamplingParameters: false,
3247
3342
  isKnownModel: true
3248
3343
  };
3249
3344
  } else if (modelId.includes("claude-3-haiku")) {
3250
3345
  return {
3251
3346
  maxOutputTokens: 4096,
3252
3347
  supportsStructuredOutput: false,
3348
+ rejectsSamplingParameters: false,
3253
3349
  isKnownModel: true
3254
3350
  };
3255
3351
  } else {
3256
3352
  return {
3257
3353
  maxOutputTokens: 4096,
3258
3354
  supportsStructuredOutput: false,
3355
+ rejectsSamplingParameters: false,
3259
3356
  isKnownModel: false
3260
3357
  };
3261
3358
  }