@ai-sdk/anthropic 3.0.0-beta.56 → 3.0.0-beta.57

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -11,7 +11,7 @@ import {
11
11
  } from "@ai-sdk/provider-utils";
12
12
 
13
13
  // src/version.ts
14
- var VERSION = true ? "3.0.0-beta.56" : "0.0.0-test";
14
+ var VERSION = true ? "3.0.0-beta.57" : "0.0.0-test";
15
15
 
16
16
  // src/anthropic-messages-language-model.ts
17
17
  import {
@@ -587,6 +587,14 @@ var anthropicProviderOptions = z3.object({
587
587
  * This allows you to deactivate reasoning inputs for models that do not support them.
588
588
  */
589
589
  sendReasoning: z3.boolean().optional(),
590
+ /**
591
+ * Determines how structured outputs are generated.
592
+ *
593
+ * - `outputFormat`: Use the `output_format` parameter to specify the structured output format.
594
+ * - `jsonTool`: Use a special 'json' tool to specify the structured output format.
595
+ * - `auto`: Use 'outputFormat' when supported, otherwise use 'jsonTool' (default).
596
+ */
597
+ structuredOutputMode: z3.enum(["outputFormat", "jsonTool", "auto"]).optional(),
590
598
  /**
591
599
  * Configuration for enabling Claude's extended thinking.
592
600
  *
@@ -1925,7 +1933,7 @@ var AnthropicMessagesLanguageModel = class {
1925
1933
  providerOptions,
1926
1934
  stream
1927
1935
  }) {
1928
- var _a, _b, _c, _d, _e;
1936
+ var _a, _b, _c, _d, _e, _f;
1929
1937
  const warnings = [];
1930
1938
  if (frequencyPenalty != null) {
1931
1939
  warnings.push({
@@ -1969,27 +1977,33 @@ var AnthropicMessagesLanguageModel = class {
1969
1977
  });
1970
1978
  }
1971
1979
  }
1972
- const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null ? {
1973
- type: "function",
1974
- name: "json",
1975
- description: "Respond with a JSON object.",
1976
- inputSchema: responseFormat.schema
1977
- } : void 0;
1978
1980
  const anthropicOptions = await parseProviderOptions2({
1979
1981
  provider: "anthropic",
1980
1982
  providerOptions,
1981
1983
  schema: anthropicProviderOptions
1982
1984
  });
1985
+ const {
1986
+ maxOutputTokens: maxOutputTokensForModel,
1987
+ supportsStructuredOutput,
1988
+ isKnownModel
1989
+ } = getModelCapabilities(this.modelId);
1990
+ const structureOutputMode = (_a = anthropicOptions == null ? void 0 : anthropicOptions.structuredOutputMode) != null ? _a : "auto";
1991
+ const useStructuredOutput = structureOutputMode === "outputFormat" || structureOutputMode === "auto" && supportsStructuredOutput;
1992
+ const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !useStructuredOutput ? {
1993
+ type: "function",
1994
+ name: "json",
1995
+ description: "Respond with a JSON object.",
1996
+ inputSchema: responseFormat.schema
1997
+ } : void 0;
1983
1998
  const cacheControlValidator = new CacheControlValidator();
1984
1999
  const { prompt: messagesPrompt, betas } = await convertToAnthropicMessagesPrompt({
1985
2000
  prompt,
1986
- sendReasoning: (_a = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _a : true,
2001
+ sendReasoning: (_b = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _b : true,
1987
2002
  warnings,
1988
2003
  cacheControlValidator
1989
2004
  });
1990
- const isThinking = ((_b = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _b.type) === "enabled";
1991
- const thinkingBudget = (_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.budgetTokens;
1992
- const { maxOutputTokens: maxOutputTokensForModel, knownModel } = getMaxOutputTokensForModel(this.modelId);
2005
+ const isThinking = ((_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.type) === "enabled";
2006
+ const thinkingBudget = (_d = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _d.budgetTokens;
1993
2007
  const maxTokens = maxOutputTokens != null ? maxOutputTokens : maxOutputTokensForModel;
1994
2008
  const baseArgs = {
1995
2009
  // model id:
@@ -2004,6 +2018,13 @@ var AnthropicMessagesLanguageModel = class {
2004
2018
  ...isThinking && {
2005
2019
  thinking: { type: "enabled", budget_tokens: thinkingBudget }
2006
2020
  },
2021
+ // structured output:
2022
+ ...useStructuredOutput && (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && {
2023
+ output_format: {
2024
+ type: "json_schema",
2025
+ schema: responseFormat.schema
2026
+ }
2027
+ },
2007
2028
  // mcp servers:
2008
2029
  ...(anthropicOptions == null ? void 0 : anthropicOptions.mcpServers) && anthropicOptions.mcpServers.length > 0 && {
2009
2030
  mcp_servers: anthropicOptions.mcpServers.map((server) => ({
@@ -2021,7 +2042,7 @@ var AnthropicMessagesLanguageModel = class {
2021
2042
  ...(anthropicOptions == null ? void 0 : anthropicOptions.container) && {
2022
2043
  container: {
2023
2044
  id: anthropicOptions.container.id,
2024
- skills: (_d = anthropicOptions.container.skills) == null ? void 0 : _d.map((skill) => ({
2045
+ skills: (_e = anthropicOptions.container.skills) == null ? void 0 : _e.map((skill) => ({
2025
2046
  type: skill.type,
2026
2047
  skill_id: skill.skillId,
2027
2048
  version: skill.version
@@ -2064,7 +2085,7 @@ var AnthropicMessagesLanguageModel = class {
2064
2085
  }
2065
2086
  baseArgs.max_tokens = maxTokens + thinkingBudget;
2066
2087
  }
2067
- if (knownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
2088
+ if (isKnownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
2068
2089
  if (maxOutputTokens != null) {
2069
2090
  warnings.push({
2070
2091
  type: "unsupported-setting",
@@ -2090,9 +2111,12 @@ var AnthropicMessagesLanguageModel = class {
2090
2111
  });
2091
2112
  }
2092
2113
  }
2093
- if (stream && ((_e = anthropicOptions == null ? void 0 : anthropicOptions.toolStreaming) != null ? _e : true)) {
2114
+ if (stream && ((_f = anthropicOptions == null ? void 0 : anthropicOptions.toolStreaming) != null ? _f : true)) {
2094
2115
  betas.add("fine-grained-tool-streaming-2025-05-14");
2095
2116
  }
2117
+ if (useStructuredOutput) {
2118
+ betas.add("structured-outputs-2025-11-13");
2119
+ }
2096
2120
  const {
2097
2121
  tools: anthropicTools2,
2098
2122
  toolChoice: anthropicToolChoice,
@@ -2969,17 +2993,49 @@ var AnthropicMessagesLanguageModel = class {
2969
2993
  };
2970
2994
  }
2971
2995
  };
2972
- function getMaxOutputTokensForModel(modelId) {
2973
- if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
2974
- return { maxOutputTokens: 64e3, knownModel: true };
2996
+ function getModelCapabilities(modelId) {
2997
+ if (modelId.includes("claude-sonnet-4-5")) {
2998
+ return {
2999
+ maxOutputTokens: 64e3,
3000
+ supportsStructuredOutput: true,
3001
+ isKnownModel: true
3002
+ };
3003
+ } else if (modelId.includes("claude-opus-4-1")) {
3004
+ return {
3005
+ maxOutputTokens: 32e3,
3006
+ supportsStructuredOutput: true,
3007
+ isKnownModel: true
3008
+ };
3009
+ } else if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
3010
+ return {
3011
+ maxOutputTokens: 64e3,
3012
+ supportsStructuredOutput: false,
3013
+ isKnownModel: true
3014
+ };
2975
3015
  } else if (modelId.includes("claude-opus-4-")) {
2976
- return { maxOutputTokens: 32e3, knownModel: true };
3016
+ return {
3017
+ maxOutputTokens: 32e3,
3018
+ supportsStructuredOutput: false,
3019
+ isKnownModel: true
3020
+ };
2977
3021
  } else if (modelId.includes("claude-3-5-haiku")) {
2978
- return { maxOutputTokens: 8192, knownModel: true };
3022
+ return {
3023
+ maxOutputTokens: 8192,
3024
+ supportsStructuredOutput: false,
3025
+ isKnownModel: true
3026
+ };
2979
3027
  } else if (modelId.includes("claude-3-haiku")) {
2980
- return { maxOutputTokens: 4096, knownModel: true };
3028
+ return {
3029
+ maxOutputTokens: 4096,
3030
+ supportsStructuredOutput: false,
3031
+ isKnownModel: true
3032
+ };
2981
3033
  } else {
2982
- return { maxOutputTokens: 4096, knownModel: false };
3034
+ return {
3035
+ maxOutputTokens: 4096,
3036
+ supportsStructuredOutput: false,
3037
+ isKnownModel: false
3038
+ };
2983
3039
  }
2984
3040
  }
2985
3041