@ai-sdk/anthropic 2.0.30 → 2.0.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -522,6 +522,21 @@ var anthropicProviderOptions = z3.object({
522
522
  cacheControl: z3.object({
523
523
  type: z3.literal("ephemeral"),
524
524
  ttl: z3.union([z3.literal("5m"), z3.literal("1h")]).optional()
525
+ }).optional(),
526
+ /**
527
+ * Agent Skills configuration. Skills enable Claude to perform specialized tasks
528
+ * like document processing (PPTX, DOCX, PDF, XLSX) and data analysis.
529
+ * Requires code execution tool to be enabled.
530
+ */
531
+ container: z3.object({
532
+ id: z3.string().optional(),
533
+ skills: z3.array(
534
+ z3.object({
535
+ type: z3.union([z3.literal("anthropic"), z3.literal("custom")]),
536
+ skillId: z3.string(),
537
+ version: z3.string().optional()
538
+ })
539
+ ).optional()
525
540
  }).optional()
526
541
  });
527
542
 
@@ -1639,8 +1654,7 @@ var AnthropicMessagesLanguageModel = class {
1639
1654
  }
1640
1655
  async getArgs({
1641
1656
  prompt,
1642
- maxOutputTokens = 4096,
1643
- // 4096: max model output tokens TODO update default in v5
1657
+ maxOutputTokens,
1644
1658
  temperature,
1645
1659
  topP,
1646
1660
  topK,
@@ -1653,7 +1667,7 @@ var AnthropicMessagesLanguageModel = class {
1653
1667
  toolChoice,
1654
1668
  providerOptions
1655
1669
  }) {
1656
- var _a, _b, _c;
1670
+ var _a, _b, _c, _d;
1657
1671
  const warnings = [];
1658
1672
  if (frequencyPenalty != null) {
1659
1673
  warnings.push({
@@ -1699,18 +1713,20 @@ var AnthropicMessagesLanguageModel = class {
1699
1713
  providerOptions,
1700
1714
  schema: anthropicProviderOptions
1701
1715
  });
1702
- const { prompt: messagesPrompt, betas: messagesBetas } = await convertToAnthropicMessagesPrompt({
1716
+ const { prompt: messagesPrompt, betas } = await convertToAnthropicMessagesPrompt({
1703
1717
  prompt,
1704
1718
  sendReasoning: (_a = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _a : true,
1705
1719
  warnings
1706
1720
  });
1707
1721
  const isThinking = ((_b = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _b.type) === "enabled";
1708
1722
  const thinkingBudget = (_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.budgetTokens;
1723
+ const maxOutputTokensForModel = getMaxOutputTokensForModel(this.modelId);
1724
+ const maxTokens = maxOutputTokens != null ? maxOutputTokens : maxOutputTokensForModel;
1709
1725
  const baseArgs = {
1710
1726
  // model id:
1711
1727
  model: this.modelId,
1712
1728
  // standardized settings:
1713
- max_tokens: maxOutputTokens,
1729
+ max_tokens: maxTokens,
1714
1730
  temperature,
1715
1731
  top_k: topK,
1716
1732
  top_p: topP,
@@ -1719,6 +1735,17 @@ var AnthropicMessagesLanguageModel = class {
1719
1735
  ...isThinking && {
1720
1736
  thinking: { type: "enabled", budget_tokens: thinkingBudget }
1721
1737
  },
1738
+ // container with agent skills:
1739
+ ...(anthropicOptions == null ? void 0 : anthropicOptions.container) && {
1740
+ container: {
1741
+ id: anthropicOptions.container.id,
1742
+ skills: (_d = anthropicOptions.container.skills) == null ? void 0 : _d.map((skill) => ({
1743
+ type: skill.type,
1744
+ skill_id: skill.skillId,
1745
+ version: skill.version
1746
+ }))
1747
+ }
1748
+ },
1722
1749
  // prompt:
1723
1750
  system: messagesPrompt.system,
1724
1751
  messages: messagesPrompt.messages
@@ -1753,7 +1780,30 @@ var AnthropicMessagesLanguageModel = class {
1753
1780
  details: "topP is not supported when thinking is enabled"
1754
1781
  });
1755
1782
  }
1756
- baseArgs.max_tokens = maxOutputTokens + thinkingBudget;
1783
+ baseArgs.max_tokens = maxTokens + thinkingBudget;
1784
+ }
1785
+ if (baseArgs.max_tokens > maxOutputTokensForModel) {
1786
+ if (maxOutputTokens != null) {
1787
+ warnings.push({
1788
+ type: "unsupported-setting",
1789
+ setting: "maxOutputTokens",
1790
+ details: `${baseArgs.max_tokens} (maxOutputTokens + thinkingBudget) is greater than ${this.modelId} ${maxOutputTokensForModel} max output tokens. The max output tokens have been limited to ${maxOutputTokensForModel}.`
1791
+ });
1792
+ }
1793
+ baseArgs.max_tokens = maxOutputTokensForModel;
1794
+ }
1795
+ if ((anthropicOptions == null ? void 0 : anthropicOptions.container) && anthropicOptions.container.skills && anthropicOptions.container.skills.length > 0) {
1796
+ betas.add("code-execution-2025-08-25");
1797
+ betas.add("skills-2025-10-02");
1798
+ betas.add("files-api-2025-04-14");
1799
+ if (!(tools == null ? void 0 : tools.some(
1800
+ (tool) => tool.type === "provider-defined" && tool.id === "anthropic.code_execution_20250825"
1801
+ ))) {
1802
+ warnings.push({
1803
+ type: "other",
1804
+ message: "code execution tool is required when using skills"
1805
+ });
1806
+ }
1757
1807
  }
1758
1808
  const {
1759
1809
  tools: anthropicTools2,
@@ -1778,7 +1828,7 @@ var AnthropicMessagesLanguageModel = class {
1778
1828
  tool_choice: anthropicToolChoice
1779
1829
  },
1780
1830
  warnings: [...warnings, ...toolWarnings],
1781
- betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas]),
1831
+ betas: /* @__PURE__ */ new Set([...betas, ...toolsBetas]),
1782
1832
  usesJsonResponseTool: jsonResponseTool != null
1783
1833
  };
1784
1834
  }
@@ -2533,6 +2583,17 @@ var AnthropicMessagesLanguageModel = class {
2533
2583
  };
2534
2584
  }
2535
2585
  };
2586
+ function getMaxOutputTokensForModel(modelId) {
2587
+ if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
2588
+ return 64e3;
2589
+ } else if (modelId.includes("claude-opus-4-")) {
2590
+ return 32e3;
2591
+ } else if (modelId.includes("claude-3-5-haiku")) {
2592
+ return 8192;
2593
+ } else {
2594
+ return 4096;
2595
+ }
2596
+ }
2536
2597
 
2537
2598
  // src/tool/bash_20241022.ts
2538
2599
  import {