@ai-sdk/anthropic 2.0.45 → 2.0.48

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -11,7 +11,7 @@ import {
11
11
  } from "@ai-sdk/provider-utils";
12
12
 
13
13
  // src/version.ts
14
- var VERSION = true ? "2.0.45" : "0.0.0-test";
14
+ var VERSION = true ? "2.0.48" : "0.0.0-test";
15
15
 
16
16
  // src/anthropic-messages-language-model.ts
17
17
  import {
@@ -546,6 +546,20 @@ var anthropicFilePartProviderOptions = z3.object({
546
546
  });
547
547
  var anthropicProviderOptions = z3.object({
548
548
  sendReasoning: z3.boolean().optional(),
549
+ /**
550
+ * Determines how structured outputs are generated.
551
+ *
552
+ * - `outputFormat`: Use the `output_format` parameter to specify the structured output format.
553
+ * - `jsonTool`: Use a special 'json' tool to specify the structured output format (default).
554
+ * - `auto`: Use 'outputFormat' when supported, otherwise use 'jsonTool'.
555
+ */
556
+ structuredOutputMode: z3.enum(["outputFormat", "jsonTool", "auto"]).optional(),
557
+ /**
558
+ * Configuration for enabling Claude's extended thinking.
559
+ *
560
+ * When enabled, responses include thinking content blocks showing Claude's thinking process before the final answer.
561
+ * Requires a minimum budget of 1,024 tokens and counts towards the `max_tokens` limit.
562
+ */
549
563
  thinking: z3.object({
550
564
  type: z3.union([z3.literal("enabled"), z3.literal("disabled")]),
551
565
  budgetTokens: z3.number().optional()
@@ -577,7 +591,11 @@ var anthropicProviderOptions = z3.object({
577
591
  version: z3.string().optional()
578
592
  })
579
593
  ).optional()
580
- }).optional()
594
+ }).optional(),
595
+ /**
596
+ * @default 'high'
597
+ */
598
+ effort: z3.enum(["low", "medium", "high"]).optional()
581
599
  });
582
600
 
583
601
  // src/anthropic-prepare-tools.ts
@@ -1783,7 +1801,7 @@ var AnthropicMessagesLanguageModel = class {
1783
1801
  toolChoice,
1784
1802
  providerOptions
1785
1803
  }) {
1786
- var _a, _b, _c, _d;
1804
+ var _a, _b, _c, _d, _e;
1787
1805
  const warnings = [];
1788
1806
  if (frequencyPenalty != null) {
1789
1807
  warnings.push({
@@ -1833,27 +1851,33 @@ var AnthropicMessagesLanguageModel = class {
1833
1851
  });
1834
1852
  }
1835
1853
  }
1836
- const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null ? {
1837
- type: "function",
1838
- name: "json",
1839
- description: "Respond with a JSON object.",
1840
- inputSchema: responseFormat.schema
1841
- } : void 0;
1842
1854
  const anthropicOptions = await parseProviderOptions2({
1843
1855
  provider: "anthropic",
1844
1856
  providerOptions,
1845
1857
  schema: anthropicProviderOptions
1846
1858
  });
1859
+ const {
1860
+ maxOutputTokens: maxOutputTokensForModel,
1861
+ supportsStructuredOutput,
1862
+ isKnownModel
1863
+ } = getModelCapabilities(this.modelId);
1864
+ const structureOutputMode = (_a = anthropicOptions == null ? void 0 : anthropicOptions.structuredOutputMode) != null ? _a : "jsonTool";
1865
+ const useStructuredOutput = structureOutputMode === "outputFormat" || structureOutputMode === "auto" && supportsStructuredOutput;
1866
+ const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !useStructuredOutput ? {
1867
+ type: "function",
1868
+ name: "json",
1869
+ description: "Respond with a JSON object.",
1870
+ inputSchema: responseFormat.schema
1871
+ } : void 0;
1847
1872
  const cacheControlValidator = new CacheControlValidator();
1848
1873
  const { prompt: messagesPrompt, betas } = await convertToAnthropicMessagesPrompt({
1849
1874
  prompt,
1850
- sendReasoning: (_a = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _a : true,
1875
+ sendReasoning: (_b = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _b : true,
1851
1876
  warnings,
1852
1877
  cacheControlValidator
1853
1878
  });
1854
- const isThinking = ((_b = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _b.type) === "enabled";
1855
- const thinkingBudget = (_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.budgetTokens;
1856
- const { maxOutputTokens: maxOutputTokensForModel, knownModel } = getMaxOutputTokensForModel(this.modelId);
1879
+ const isThinking = ((_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.type) === "enabled";
1880
+ const thinkingBudget = (_d = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _d.budgetTokens;
1857
1881
  const maxTokens = maxOutputTokens != null ? maxOutputTokens : maxOutputTokensForModel;
1858
1882
  const baseArgs = {
1859
1883
  // model id:
@@ -1868,11 +1892,21 @@ var AnthropicMessagesLanguageModel = class {
1868
1892
  ...isThinking && {
1869
1893
  thinking: { type: "enabled", budget_tokens: thinkingBudget }
1870
1894
  },
1895
+ ...(anthropicOptions == null ? void 0 : anthropicOptions.effort) && {
1896
+ output_config: { effort: anthropicOptions.effort }
1897
+ },
1898
+ // structured output:
1899
+ ...useStructuredOutput && (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && {
1900
+ output_format: {
1901
+ type: "json_schema",
1902
+ schema: responseFormat.schema
1903
+ }
1904
+ },
1871
1905
  // container with agent skills:
1872
1906
  ...(anthropicOptions == null ? void 0 : anthropicOptions.container) && {
1873
1907
  container: {
1874
1908
  id: anthropicOptions.container.id,
1875
- skills: (_d = anthropicOptions.container.skills) == null ? void 0 : _d.map((skill) => ({
1909
+ skills: (_e = anthropicOptions.container.skills) == null ? void 0 : _e.map((skill) => ({
1876
1910
  type: skill.type,
1877
1911
  skill_id: skill.skillId,
1878
1912
  version: skill.version
@@ -1915,7 +1949,7 @@ var AnthropicMessagesLanguageModel = class {
1915
1949
  }
1916
1950
  baseArgs.max_tokens = maxTokens + thinkingBudget;
1917
1951
  }
1918
- if (knownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
1952
+ if (isKnownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
1919
1953
  if (maxOutputTokens != null) {
1920
1954
  warnings.push({
1921
1955
  type: "unsupported-setting",
@@ -1938,6 +1972,12 @@ var AnthropicMessagesLanguageModel = class {
1938
1972
  });
1939
1973
  }
1940
1974
  }
1975
+ if (anthropicOptions == null ? void 0 : anthropicOptions.effort) {
1976
+ betas.add("effort-2025-11-24");
1977
+ }
1978
+ if (useStructuredOutput) {
1979
+ betas.add("structured-outputs-2025-11-13");
1980
+ }
1941
1981
  const {
1942
1982
  tools: anthropicTools2,
1943
1983
  toolChoice: anthropicToolChoice,
@@ -2740,17 +2780,49 @@ var AnthropicMessagesLanguageModel = class {
2740
2780
  };
2741
2781
  }
2742
2782
  };
2743
- function getMaxOutputTokensForModel(modelId) {
2744
- if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
2745
- return { maxOutputTokens: 64e3, knownModel: true };
2783
+ function getModelCapabilities(modelId) {
2784
+ if (modelId.includes("claude-sonnet-4-5") || modelId.includes("claude-opus-4-5")) {
2785
+ return {
2786
+ maxOutputTokens: 64e3,
2787
+ supportsStructuredOutput: true,
2788
+ isKnownModel: true
2789
+ };
2790
+ } else if (modelId.includes("claude-opus-4-1")) {
2791
+ return {
2792
+ maxOutputTokens: 32e3,
2793
+ supportsStructuredOutput: true,
2794
+ isKnownModel: true
2795
+ };
2796
+ } else if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
2797
+ return {
2798
+ maxOutputTokens: 64e3,
2799
+ supportsStructuredOutput: false,
2800
+ isKnownModel: true
2801
+ };
2746
2802
  } else if (modelId.includes("claude-opus-4-")) {
2747
- return { maxOutputTokens: 32e3, knownModel: true };
2803
+ return {
2804
+ maxOutputTokens: 32e3,
2805
+ supportsStructuredOutput: false,
2806
+ isKnownModel: true
2807
+ };
2748
2808
  } else if (modelId.includes("claude-3-5-haiku")) {
2749
- return { maxOutputTokens: 8192, knownModel: true };
2809
+ return {
2810
+ maxOutputTokens: 8192,
2811
+ supportsStructuredOutput: false,
2812
+ isKnownModel: true
2813
+ };
2750
2814
  } else if (modelId.includes("claude-3-haiku")) {
2751
- return { maxOutputTokens: 4096, knownModel: true };
2815
+ return {
2816
+ maxOutputTokens: 4096,
2817
+ supportsStructuredOutput: false,
2818
+ isKnownModel: true
2819
+ };
2752
2820
  } else {
2753
- return { maxOutputTokens: 4096, knownModel: false };
2821
+ return {
2822
+ maxOutputTokens: 4096,
2823
+ supportsStructuredOutput: false,
2824
+ isKnownModel: false
2825
+ };
2754
2826
  }
2755
2827
  }
2756
2828