@ai-sdk/anthropic 2.0.45 → 2.0.48

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -531,6 +531,20 @@ var anthropicFilePartProviderOptions = z3.object({
531
531
  });
532
532
  var anthropicProviderOptions = z3.object({
533
533
  sendReasoning: z3.boolean().optional(),
534
+ /**
535
+ * Determines how structured outputs are generated.
536
+ *
537
+ * - `outputFormat`: Use the `output_format` parameter to specify the structured output format.
538
+ * - `jsonTool`: Use a special 'json' tool to specify the structured output format (default).
539
+ * - `auto`: Use 'outputFormat' when supported, otherwise use 'jsonTool'.
540
+ */
541
+ structuredOutputMode: z3.enum(["outputFormat", "jsonTool", "auto"]).optional(),
542
+ /**
543
+ * Configuration for enabling Claude's extended thinking.
544
+ *
545
+ * When enabled, responses include thinking content blocks showing Claude's thinking process before the final answer.
546
+ * Requires a minimum budget of 1,024 tokens and counts towards the `max_tokens` limit.
547
+ */
534
548
  thinking: z3.object({
535
549
  type: z3.union([z3.literal("enabled"), z3.literal("disabled")]),
536
550
  budgetTokens: z3.number().optional()
@@ -562,7 +576,11 @@ var anthropicProviderOptions = z3.object({
562
576
  version: z3.string().optional()
563
577
  })
564
578
  ).optional()
565
- }).optional()
579
+ }).optional(),
580
+ /**
581
+ * @default 'high'
582
+ */
583
+ effort: z3.enum(["low", "medium", "high"]).optional()
566
584
  });
567
585
 
568
586
  // src/anthropic-prepare-tools.ts
@@ -1768,7 +1786,7 @@ var AnthropicMessagesLanguageModel = class {
1768
1786
  toolChoice,
1769
1787
  providerOptions
1770
1788
  }) {
1771
- var _a, _b, _c, _d;
1789
+ var _a, _b, _c, _d, _e;
1772
1790
  const warnings = [];
1773
1791
  if (frequencyPenalty != null) {
1774
1792
  warnings.push({
@@ -1818,27 +1836,33 @@ var AnthropicMessagesLanguageModel = class {
1818
1836
  });
1819
1837
  }
1820
1838
  }
1821
- const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null ? {
1822
- type: "function",
1823
- name: "json",
1824
- description: "Respond with a JSON object.",
1825
- inputSchema: responseFormat.schema
1826
- } : void 0;
1827
1839
  const anthropicOptions = await parseProviderOptions2({
1828
1840
  provider: "anthropic",
1829
1841
  providerOptions,
1830
1842
  schema: anthropicProviderOptions
1831
1843
  });
1844
+ const {
1845
+ maxOutputTokens: maxOutputTokensForModel,
1846
+ supportsStructuredOutput,
1847
+ isKnownModel
1848
+ } = getModelCapabilities(this.modelId);
1849
+ const structureOutputMode = (_a = anthropicOptions == null ? void 0 : anthropicOptions.structuredOutputMode) != null ? _a : "jsonTool";
1850
+ const useStructuredOutput = structureOutputMode === "outputFormat" || structureOutputMode === "auto" && supportsStructuredOutput;
1851
+ const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !useStructuredOutput ? {
1852
+ type: "function",
1853
+ name: "json",
1854
+ description: "Respond with a JSON object.",
1855
+ inputSchema: responseFormat.schema
1856
+ } : void 0;
1832
1857
  const cacheControlValidator = new CacheControlValidator();
1833
1858
  const { prompt: messagesPrompt, betas } = await convertToAnthropicMessagesPrompt({
1834
1859
  prompt,
1835
- sendReasoning: (_a = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _a : true,
1860
+ sendReasoning: (_b = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _b : true,
1836
1861
  warnings,
1837
1862
  cacheControlValidator
1838
1863
  });
1839
- const isThinking = ((_b = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _b.type) === "enabled";
1840
- const thinkingBudget = (_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.budgetTokens;
1841
- const { maxOutputTokens: maxOutputTokensForModel, knownModel } = getMaxOutputTokensForModel(this.modelId);
1864
+ const isThinking = ((_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.type) === "enabled";
1865
+ const thinkingBudget = (_d = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _d.budgetTokens;
1842
1866
  const maxTokens = maxOutputTokens != null ? maxOutputTokens : maxOutputTokensForModel;
1843
1867
  const baseArgs = {
1844
1868
  // model id:
@@ -1853,11 +1877,21 @@ var AnthropicMessagesLanguageModel = class {
1853
1877
  ...isThinking && {
1854
1878
  thinking: { type: "enabled", budget_tokens: thinkingBudget }
1855
1879
  },
1880
+ ...(anthropicOptions == null ? void 0 : anthropicOptions.effort) && {
1881
+ output_config: { effort: anthropicOptions.effort }
1882
+ },
1883
+ // structured output:
1884
+ ...useStructuredOutput && (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && {
1885
+ output_format: {
1886
+ type: "json_schema",
1887
+ schema: responseFormat.schema
1888
+ }
1889
+ },
1856
1890
  // container with agent skills:
1857
1891
  ...(anthropicOptions == null ? void 0 : anthropicOptions.container) && {
1858
1892
  container: {
1859
1893
  id: anthropicOptions.container.id,
1860
- skills: (_d = anthropicOptions.container.skills) == null ? void 0 : _d.map((skill) => ({
1894
+ skills: (_e = anthropicOptions.container.skills) == null ? void 0 : _e.map((skill) => ({
1861
1895
  type: skill.type,
1862
1896
  skill_id: skill.skillId,
1863
1897
  version: skill.version
@@ -1900,7 +1934,7 @@ var AnthropicMessagesLanguageModel = class {
1900
1934
  }
1901
1935
  baseArgs.max_tokens = maxTokens + thinkingBudget;
1902
1936
  }
1903
- if (knownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
1937
+ if (isKnownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
1904
1938
  if (maxOutputTokens != null) {
1905
1939
  warnings.push({
1906
1940
  type: "unsupported-setting",
@@ -1923,6 +1957,12 @@ var AnthropicMessagesLanguageModel = class {
1923
1957
  });
1924
1958
  }
1925
1959
  }
1960
+ if (anthropicOptions == null ? void 0 : anthropicOptions.effort) {
1961
+ betas.add("effort-2025-11-24");
1962
+ }
1963
+ if (useStructuredOutput) {
1964
+ betas.add("structured-outputs-2025-11-13");
1965
+ }
1926
1966
  const {
1927
1967
  tools: anthropicTools2,
1928
1968
  toolChoice: anthropicToolChoice,
@@ -2725,17 +2765,49 @@ var AnthropicMessagesLanguageModel = class {
2725
2765
  };
2726
2766
  }
2727
2767
  };
2728
- function getMaxOutputTokensForModel(modelId) {
2729
- if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
2730
- return { maxOutputTokens: 64e3, knownModel: true };
2768
+ function getModelCapabilities(modelId) {
2769
+ if (modelId.includes("claude-sonnet-4-5") || modelId.includes("claude-opus-4-5")) {
2770
+ return {
2771
+ maxOutputTokens: 64e3,
2772
+ supportsStructuredOutput: true,
2773
+ isKnownModel: true
2774
+ };
2775
+ } else if (modelId.includes("claude-opus-4-1")) {
2776
+ return {
2777
+ maxOutputTokens: 32e3,
2778
+ supportsStructuredOutput: true,
2779
+ isKnownModel: true
2780
+ };
2781
+ } else if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
2782
+ return {
2783
+ maxOutputTokens: 64e3,
2784
+ supportsStructuredOutput: false,
2785
+ isKnownModel: true
2786
+ };
2731
2787
  } else if (modelId.includes("claude-opus-4-")) {
2732
- return { maxOutputTokens: 32e3, knownModel: true };
2788
+ return {
2789
+ maxOutputTokens: 32e3,
2790
+ supportsStructuredOutput: false,
2791
+ isKnownModel: true
2792
+ };
2733
2793
  } else if (modelId.includes("claude-3-5-haiku")) {
2734
- return { maxOutputTokens: 8192, knownModel: true };
2794
+ return {
2795
+ maxOutputTokens: 8192,
2796
+ supportsStructuredOutput: false,
2797
+ isKnownModel: true
2798
+ };
2735
2799
  } else if (modelId.includes("claude-3-haiku")) {
2736
- return { maxOutputTokens: 4096, knownModel: true };
2800
+ return {
2801
+ maxOutputTokens: 4096,
2802
+ supportsStructuredOutput: false,
2803
+ isKnownModel: true
2804
+ };
2737
2805
  } else {
2738
- return { maxOutputTokens: 4096, knownModel: false };
2806
+ return {
2807
+ maxOutputTokens: 4096,
2808
+ supportsStructuredOutput: false,
2809
+ isKnownModel: false
2810
+ };
2739
2811
  }
2740
2812
  }
2741
2813