@ai-sdk/anthropic 3.0.0-beta.55 → 3.0.0-beta.57
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +5 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +92 -22
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +92 -22
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +91 -21
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +91 -21
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/internal/index.mjs
CHANGED
|
@@ -566,7 +566,26 @@ var anthropicFilePartProviderOptions = z3.object({
|
|
|
566
566
|
context: z3.string().optional()
|
|
567
567
|
});
|
|
568
568
|
var anthropicProviderOptions = z3.object({
|
|
569
|
+
/**
|
|
570
|
+
* Whether to send reasoning to the model.
|
|
571
|
+
*
|
|
572
|
+
* This allows you to deactivate reasoning inputs for models that do not support them.
|
|
573
|
+
*/
|
|
569
574
|
sendReasoning: z3.boolean().optional(),
|
|
575
|
+
/**
|
|
576
|
+
* Determines how structured outputs are generated.
|
|
577
|
+
*
|
|
578
|
+
* - `outputFormat`: Use the `output_format` parameter to specify the structured output format.
|
|
579
|
+
* - `jsonTool`: Use a special 'json' tool to specify the structured output format.
|
|
580
|
+
* - `auto`: Use 'outputFormat' when supported, otherwise use 'jsonTool' (default).
|
|
581
|
+
*/
|
|
582
|
+
structuredOutputMode: z3.enum(["outputFormat", "jsonTool", "auto"]).optional(),
|
|
583
|
+
/**
|
|
584
|
+
* Configuration for enabling Claude's extended thinking.
|
|
585
|
+
*
|
|
586
|
+
* When enabled, responses include thinking content blocks showing Claude's thinking process before the final answer.
|
|
587
|
+
* Requires a minimum budget of 1,024 tokens and counts towards the `max_tokens` limit.
|
|
588
|
+
*/
|
|
570
589
|
thinking: z3.object({
|
|
571
590
|
type: z3.union([z3.literal("enabled"), z3.literal("disabled")]),
|
|
572
591
|
budgetTokens: z3.number().optional()
|
|
@@ -584,6 +603,9 @@ var anthropicProviderOptions = z3.object({
|
|
|
584
603
|
type: z3.literal("ephemeral"),
|
|
585
604
|
ttl: z3.union([z3.literal("5m"), z3.literal("1h")]).optional()
|
|
586
605
|
}).optional(),
|
|
606
|
+
/**
|
|
607
|
+
* MCP servers to be utilized in this request.
|
|
608
|
+
*/
|
|
587
609
|
mcpServers: z3.array(
|
|
588
610
|
z3.object({
|
|
589
611
|
type: z3.literal("url"),
|
|
@@ -1896,7 +1918,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1896
1918
|
providerOptions,
|
|
1897
1919
|
stream
|
|
1898
1920
|
}) {
|
|
1899
|
-
var _a, _b, _c, _d, _e;
|
|
1921
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1900
1922
|
const warnings = [];
|
|
1901
1923
|
if (frequencyPenalty != null) {
|
|
1902
1924
|
warnings.push({
|
|
@@ -1940,27 +1962,33 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1940
1962
|
});
|
|
1941
1963
|
}
|
|
1942
1964
|
}
|
|
1943
|
-
const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null ? {
|
|
1944
|
-
type: "function",
|
|
1945
|
-
name: "json",
|
|
1946
|
-
description: "Respond with a JSON object.",
|
|
1947
|
-
inputSchema: responseFormat.schema
|
|
1948
|
-
} : void 0;
|
|
1949
1965
|
const anthropicOptions = await parseProviderOptions2({
|
|
1950
1966
|
provider: "anthropic",
|
|
1951
1967
|
providerOptions,
|
|
1952
1968
|
schema: anthropicProviderOptions
|
|
1953
1969
|
});
|
|
1970
|
+
const {
|
|
1971
|
+
maxOutputTokens: maxOutputTokensForModel,
|
|
1972
|
+
supportsStructuredOutput,
|
|
1973
|
+
isKnownModel
|
|
1974
|
+
} = getModelCapabilities(this.modelId);
|
|
1975
|
+
const structureOutputMode = (_a = anthropicOptions == null ? void 0 : anthropicOptions.structuredOutputMode) != null ? _a : "auto";
|
|
1976
|
+
const useStructuredOutput = structureOutputMode === "outputFormat" || structureOutputMode === "auto" && supportsStructuredOutput;
|
|
1977
|
+
const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !useStructuredOutput ? {
|
|
1978
|
+
type: "function",
|
|
1979
|
+
name: "json",
|
|
1980
|
+
description: "Respond with a JSON object.",
|
|
1981
|
+
inputSchema: responseFormat.schema
|
|
1982
|
+
} : void 0;
|
|
1954
1983
|
const cacheControlValidator = new CacheControlValidator();
|
|
1955
1984
|
const { prompt: messagesPrompt, betas } = await convertToAnthropicMessagesPrompt({
|
|
1956
1985
|
prompt,
|
|
1957
|
-
sendReasoning: (
|
|
1986
|
+
sendReasoning: (_b = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _b : true,
|
|
1958
1987
|
warnings,
|
|
1959
1988
|
cacheControlValidator
|
|
1960
1989
|
});
|
|
1961
|
-
const isThinking = ((
|
|
1962
|
-
const thinkingBudget = (
|
|
1963
|
-
const { maxOutputTokens: maxOutputTokensForModel, knownModel } = getMaxOutputTokensForModel(this.modelId);
|
|
1990
|
+
const isThinking = ((_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.type) === "enabled";
|
|
1991
|
+
const thinkingBudget = (_d = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _d.budgetTokens;
|
|
1964
1992
|
const maxTokens = maxOutputTokens != null ? maxOutputTokens : maxOutputTokensForModel;
|
|
1965
1993
|
const baseArgs = {
|
|
1966
1994
|
// model id:
|
|
@@ -1975,6 +2003,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1975
2003
|
...isThinking && {
|
|
1976
2004
|
thinking: { type: "enabled", budget_tokens: thinkingBudget }
|
|
1977
2005
|
},
|
|
2006
|
+
// structured output:
|
|
2007
|
+
...useStructuredOutput && (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && {
|
|
2008
|
+
output_format: {
|
|
2009
|
+
type: "json_schema",
|
|
2010
|
+
schema: responseFormat.schema
|
|
2011
|
+
}
|
|
2012
|
+
},
|
|
1978
2013
|
// mcp servers:
|
|
1979
2014
|
...(anthropicOptions == null ? void 0 : anthropicOptions.mcpServers) && anthropicOptions.mcpServers.length > 0 && {
|
|
1980
2015
|
mcp_servers: anthropicOptions.mcpServers.map((server) => ({
|
|
@@ -1992,7 +2027,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1992
2027
|
...(anthropicOptions == null ? void 0 : anthropicOptions.container) && {
|
|
1993
2028
|
container: {
|
|
1994
2029
|
id: anthropicOptions.container.id,
|
|
1995
|
-
skills: (
|
|
2030
|
+
skills: (_e = anthropicOptions.container.skills) == null ? void 0 : _e.map((skill) => ({
|
|
1996
2031
|
type: skill.type,
|
|
1997
2032
|
skill_id: skill.skillId,
|
|
1998
2033
|
version: skill.version
|
|
@@ -2035,7 +2070,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2035
2070
|
}
|
|
2036
2071
|
baseArgs.max_tokens = maxTokens + thinkingBudget;
|
|
2037
2072
|
}
|
|
2038
|
-
if (
|
|
2073
|
+
if (isKnownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
|
|
2039
2074
|
if (maxOutputTokens != null) {
|
|
2040
2075
|
warnings.push({
|
|
2041
2076
|
type: "unsupported-setting",
|
|
@@ -2061,9 +2096,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2061
2096
|
});
|
|
2062
2097
|
}
|
|
2063
2098
|
}
|
|
2064
|
-
if (stream && ((
|
|
2099
|
+
if (stream && ((_f = anthropicOptions == null ? void 0 : anthropicOptions.toolStreaming) != null ? _f : true)) {
|
|
2065
2100
|
betas.add("fine-grained-tool-streaming-2025-05-14");
|
|
2066
2101
|
}
|
|
2102
|
+
if (useStructuredOutput) {
|
|
2103
|
+
betas.add("structured-outputs-2025-11-13");
|
|
2104
|
+
}
|
|
2067
2105
|
const {
|
|
2068
2106
|
tools: anthropicTools2,
|
|
2069
2107
|
toolChoice: anthropicToolChoice,
|
|
@@ -2940,17 +2978,49 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2940
2978
|
};
|
|
2941
2979
|
}
|
|
2942
2980
|
};
|
|
2943
|
-
function
|
|
2944
|
-
if (modelId.includes("claude-sonnet-4-
|
|
2945
|
-
return {
|
|
2981
|
+
function getModelCapabilities(modelId) {
|
|
2982
|
+
if (modelId.includes("claude-sonnet-4-5")) {
|
|
2983
|
+
return {
|
|
2984
|
+
maxOutputTokens: 64e3,
|
|
2985
|
+
supportsStructuredOutput: true,
|
|
2986
|
+
isKnownModel: true
|
|
2987
|
+
};
|
|
2988
|
+
} else if (modelId.includes("claude-opus-4-1")) {
|
|
2989
|
+
return {
|
|
2990
|
+
maxOutputTokens: 32e3,
|
|
2991
|
+
supportsStructuredOutput: true,
|
|
2992
|
+
isKnownModel: true
|
|
2993
|
+
};
|
|
2994
|
+
} else if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
|
|
2995
|
+
return {
|
|
2996
|
+
maxOutputTokens: 64e3,
|
|
2997
|
+
supportsStructuredOutput: false,
|
|
2998
|
+
isKnownModel: true
|
|
2999
|
+
};
|
|
2946
3000
|
} else if (modelId.includes("claude-opus-4-")) {
|
|
2947
|
-
return {
|
|
3001
|
+
return {
|
|
3002
|
+
maxOutputTokens: 32e3,
|
|
3003
|
+
supportsStructuredOutput: false,
|
|
3004
|
+
isKnownModel: true
|
|
3005
|
+
};
|
|
2948
3006
|
} else if (modelId.includes("claude-3-5-haiku")) {
|
|
2949
|
-
return {
|
|
3007
|
+
return {
|
|
3008
|
+
maxOutputTokens: 8192,
|
|
3009
|
+
supportsStructuredOutput: false,
|
|
3010
|
+
isKnownModel: true
|
|
3011
|
+
};
|
|
2950
3012
|
} else if (modelId.includes("claude-3-haiku")) {
|
|
2951
|
-
return {
|
|
3013
|
+
return {
|
|
3014
|
+
maxOutputTokens: 4096,
|
|
3015
|
+
supportsStructuredOutput: false,
|
|
3016
|
+
isKnownModel: true
|
|
3017
|
+
};
|
|
2952
3018
|
} else {
|
|
2953
|
-
return {
|
|
3019
|
+
return {
|
|
3020
|
+
maxOutputTokens: 4096,
|
|
3021
|
+
supportsStructuredOutput: false,
|
|
3022
|
+
isKnownModel: false
|
|
3023
|
+
};
|
|
2954
3024
|
}
|
|
2955
3025
|
}
|
|
2956
3026
|
|