@ai-sdk/anthropic 3.0.0-beta.56 → 3.0.0-beta.58

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -572,6 +572,14 @@ var anthropicProviderOptions = z3.object({
572
572
  * This allows you to deactivate reasoning inputs for models that do not support them.
573
573
  */
574
574
  sendReasoning: z3.boolean().optional(),
575
+ /**
576
+ * Determines how structured outputs are generated.
577
+ *
578
+ * - `outputFormat`: Use the `output_format` parameter to specify the structured output format.
579
+ * - `jsonTool`: Use a special 'json' tool to specify the structured output format.
580
+ * - `auto`: Use 'outputFormat' when supported, otherwise use 'jsonTool' (default).
581
+ */
582
+ structuredOutputMode: z3.enum(["outputFormat", "jsonTool", "auto"]).optional(),
575
583
  /**
576
584
  * Configuration for enabling Claude's extended thinking.
577
585
  *
@@ -633,7 +641,11 @@ var anthropicProviderOptions = z3.object({
633
641
  *
634
642
  * @default true
635
643
  */
636
- toolStreaming: z3.boolean().optional()
644
+ toolStreaming: z3.boolean().optional(),
645
+ /**
646
+ * @default 'high'
647
+ */
648
+ effort: z3.enum(["low", "medium", "high"]).optional()
637
649
  });
638
650
 
639
651
  // src/anthropic-prepare-tools.ts
@@ -1910,7 +1922,7 @@ var AnthropicMessagesLanguageModel = class {
1910
1922
  providerOptions,
1911
1923
  stream
1912
1924
  }) {
1913
- var _a, _b, _c, _d, _e;
1925
+ var _a, _b, _c, _d, _e, _f;
1914
1926
  const warnings = [];
1915
1927
  if (frequencyPenalty != null) {
1916
1928
  warnings.push({
@@ -1954,27 +1966,33 @@ var AnthropicMessagesLanguageModel = class {
1954
1966
  });
1955
1967
  }
1956
1968
  }
1957
- const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null ? {
1958
- type: "function",
1959
- name: "json",
1960
- description: "Respond with a JSON object.",
1961
- inputSchema: responseFormat.schema
1962
- } : void 0;
1963
1969
  const anthropicOptions = await parseProviderOptions2({
1964
1970
  provider: "anthropic",
1965
1971
  providerOptions,
1966
1972
  schema: anthropicProviderOptions
1967
1973
  });
1974
+ const {
1975
+ maxOutputTokens: maxOutputTokensForModel,
1976
+ supportsStructuredOutput,
1977
+ isKnownModel
1978
+ } = getModelCapabilities(this.modelId);
1979
+ const structureOutputMode = (_a = anthropicOptions == null ? void 0 : anthropicOptions.structuredOutputMode) != null ? _a : "auto";
1980
+ const useStructuredOutput = structureOutputMode === "outputFormat" || structureOutputMode === "auto" && supportsStructuredOutput;
1981
+ const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !useStructuredOutput ? {
1982
+ type: "function",
1983
+ name: "json",
1984
+ description: "Respond with a JSON object.",
1985
+ inputSchema: responseFormat.schema
1986
+ } : void 0;
1968
1987
  const cacheControlValidator = new CacheControlValidator();
1969
1988
  const { prompt: messagesPrompt, betas } = await convertToAnthropicMessagesPrompt({
1970
1989
  prompt,
1971
- sendReasoning: (_a = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _a : true,
1990
+ sendReasoning: (_b = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _b : true,
1972
1991
  warnings,
1973
1992
  cacheControlValidator
1974
1993
  });
1975
- const isThinking = ((_b = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _b.type) === "enabled";
1976
- const thinkingBudget = (_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.budgetTokens;
1977
- const { maxOutputTokens: maxOutputTokensForModel, knownModel } = getMaxOutputTokensForModel(this.modelId);
1994
+ const isThinking = ((_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.type) === "enabled";
1995
+ const thinkingBudget = (_d = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _d.budgetTokens;
1978
1996
  const maxTokens = maxOutputTokens != null ? maxOutputTokens : maxOutputTokensForModel;
1979
1997
  const baseArgs = {
1980
1998
  // model id:
@@ -1989,6 +2007,16 @@ var AnthropicMessagesLanguageModel = class {
1989
2007
  ...isThinking && {
1990
2008
  thinking: { type: "enabled", budget_tokens: thinkingBudget }
1991
2009
  },
2010
+ ...(anthropicOptions == null ? void 0 : anthropicOptions.effort) && {
2011
+ output_config: { effort: anthropicOptions.effort }
2012
+ },
2013
+ // structured output:
2014
+ ...useStructuredOutput && (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && {
2015
+ output_format: {
2016
+ type: "json_schema",
2017
+ schema: responseFormat.schema
2018
+ }
2019
+ },
1992
2020
  // mcp servers:
1993
2021
  ...(anthropicOptions == null ? void 0 : anthropicOptions.mcpServers) && anthropicOptions.mcpServers.length > 0 && {
1994
2022
  mcp_servers: anthropicOptions.mcpServers.map((server) => ({
@@ -2006,7 +2034,7 @@ var AnthropicMessagesLanguageModel = class {
2006
2034
  ...(anthropicOptions == null ? void 0 : anthropicOptions.container) && {
2007
2035
  container: {
2008
2036
  id: anthropicOptions.container.id,
2009
- skills: (_d = anthropicOptions.container.skills) == null ? void 0 : _d.map((skill) => ({
2037
+ skills: (_e = anthropicOptions.container.skills) == null ? void 0 : _e.map((skill) => ({
2010
2038
  type: skill.type,
2011
2039
  skill_id: skill.skillId,
2012
2040
  version: skill.version
@@ -2049,7 +2077,7 @@ var AnthropicMessagesLanguageModel = class {
2049
2077
  }
2050
2078
  baseArgs.max_tokens = maxTokens + thinkingBudget;
2051
2079
  }
2052
- if (knownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
2080
+ if (isKnownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
2053
2081
  if (maxOutputTokens != null) {
2054
2082
  warnings.push({
2055
2083
  type: "unsupported-setting",
@@ -2075,9 +2103,15 @@ var AnthropicMessagesLanguageModel = class {
2075
2103
  });
2076
2104
  }
2077
2105
  }
2078
- if (stream && ((_e = anthropicOptions == null ? void 0 : anthropicOptions.toolStreaming) != null ? _e : true)) {
2106
+ if (anthropicOptions == null ? void 0 : anthropicOptions.effort) {
2107
+ betas.add("effort-2025-11-24");
2108
+ }
2109
+ if (stream && ((_f = anthropicOptions == null ? void 0 : anthropicOptions.toolStreaming) != null ? _f : true)) {
2079
2110
  betas.add("fine-grained-tool-streaming-2025-05-14");
2080
2111
  }
2112
+ if (useStructuredOutput) {
2113
+ betas.add("structured-outputs-2025-11-13");
2114
+ }
2081
2115
  const {
2082
2116
  tools: anthropicTools2,
2083
2117
  toolChoice: anthropicToolChoice,
@@ -2954,17 +2988,49 @@ var AnthropicMessagesLanguageModel = class {
2954
2988
  };
2955
2989
  }
2956
2990
  };
2957
- function getMaxOutputTokensForModel(modelId) {
2958
- if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
2959
- return { maxOutputTokens: 64e3, knownModel: true };
2991
+ function getModelCapabilities(modelId) {
2992
+ if (modelId.includes("claude-sonnet-4-5")) {
2993
+ return {
2994
+ maxOutputTokens: 64e3,
2995
+ supportsStructuredOutput: true,
2996
+ isKnownModel: true
2997
+ };
2998
+ } else if (modelId.includes("claude-opus-4-1") || modelId.includes("claude-opus-4-5")) {
2999
+ return {
3000
+ maxOutputTokens: 32e3,
3001
+ supportsStructuredOutput: true,
3002
+ isKnownModel: true
3003
+ };
3004
+ } else if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
3005
+ return {
3006
+ maxOutputTokens: 64e3,
3007
+ supportsStructuredOutput: false,
3008
+ isKnownModel: true
3009
+ };
2960
3010
  } else if (modelId.includes("claude-opus-4-")) {
2961
- return { maxOutputTokens: 32e3, knownModel: true };
3011
+ return {
3012
+ maxOutputTokens: 32e3,
3013
+ supportsStructuredOutput: false,
3014
+ isKnownModel: true
3015
+ };
2962
3016
  } else if (modelId.includes("claude-3-5-haiku")) {
2963
- return { maxOutputTokens: 8192, knownModel: true };
3017
+ return {
3018
+ maxOutputTokens: 8192,
3019
+ supportsStructuredOutput: false,
3020
+ isKnownModel: true
3021
+ };
2964
3022
  } else if (modelId.includes("claude-3-haiku")) {
2965
- return { maxOutputTokens: 4096, knownModel: true };
3023
+ return {
3024
+ maxOutputTokens: 4096,
3025
+ supportsStructuredOutput: false,
3026
+ isKnownModel: true
3027
+ };
2966
3028
  } else {
2967
- return { maxOutputTokens: 4096, knownModel: false };
3029
+ return {
3030
+ maxOutputTokens: 4096,
3031
+ supportsStructuredOutput: false,
3032
+ isKnownModel: false
3033
+ };
2968
3034
  }
2969
3035
  }
2970
3036