@ai-sdk/anthropic 3.0.0-beta.55 → 3.0.0-beta.57
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +5 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +92 -22
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +92 -22
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +91 -21
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +91 -21
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/internal/index.js
CHANGED
|
@@ -580,7 +580,26 @@ var anthropicFilePartProviderOptions = import_v43.z.object({
|
|
|
580
580
|
context: import_v43.z.string().optional()
|
|
581
581
|
});
|
|
582
582
|
var anthropicProviderOptions = import_v43.z.object({
|
|
583
|
+
/**
|
|
584
|
+
* Whether to send reasoning to the model.
|
|
585
|
+
*
|
|
586
|
+
* This allows you to deactivate reasoning inputs for models that do not support them.
|
|
587
|
+
*/
|
|
583
588
|
sendReasoning: import_v43.z.boolean().optional(),
|
|
589
|
+
/**
|
|
590
|
+
* Determines how structured outputs are generated.
|
|
591
|
+
*
|
|
592
|
+
* - `outputFormat`: Use the `output_format` parameter to specify the structured output format.
|
|
593
|
+
* - `jsonTool`: Use a special 'json' tool to specify the structured output format.
|
|
594
|
+
* - `auto`: Use 'outputFormat' when supported, otherwise use 'jsonTool' (default).
|
|
595
|
+
*/
|
|
596
|
+
structuredOutputMode: import_v43.z.enum(["outputFormat", "jsonTool", "auto"]).optional(),
|
|
597
|
+
/**
|
|
598
|
+
* Configuration for enabling Claude's extended thinking.
|
|
599
|
+
*
|
|
600
|
+
* When enabled, responses include thinking content blocks showing Claude's thinking process before the final answer.
|
|
601
|
+
* Requires a minimum budget of 1,024 tokens and counts towards the `max_tokens` limit.
|
|
602
|
+
*/
|
|
584
603
|
thinking: import_v43.z.object({
|
|
585
604
|
type: import_v43.z.union([import_v43.z.literal("enabled"), import_v43.z.literal("disabled")]),
|
|
586
605
|
budgetTokens: import_v43.z.number().optional()
|
|
@@ -598,6 +617,9 @@ var anthropicProviderOptions = import_v43.z.object({
|
|
|
598
617
|
type: import_v43.z.literal("ephemeral"),
|
|
599
618
|
ttl: import_v43.z.union([import_v43.z.literal("5m"), import_v43.z.literal("1h")]).optional()
|
|
600
619
|
}).optional(),
|
|
620
|
+
/**
|
|
621
|
+
* MCP servers to be utilized in this request.
|
|
622
|
+
*/
|
|
601
623
|
mcpServers: import_v43.z.array(
|
|
602
624
|
import_v43.z.object({
|
|
603
625
|
type: import_v43.z.literal("url"),
|
|
@@ -1885,7 +1907,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1885
1907
|
providerOptions,
|
|
1886
1908
|
stream
|
|
1887
1909
|
}) {
|
|
1888
|
-
var _a, _b, _c, _d, _e;
|
|
1910
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1889
1911
|
const warnings = [];
|
|
1890
1912
|
if (frequencyPenalty != null) {
|
|
1891
1913
|
warnings.push({
|
|
@@ -1929,27 +1951,33 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1929
1951
|
});
|
|
1930
1952
|
}
|
|
1931
1953
|
}
|
|
1932
|
-
const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null ? {
|
|
1933
|
-
type: "function",
|
|
1934
|
-
name: "json",
|
|
1935
|
-
description: "Respond with a JSON object.",
|
|
1936
|
-
inputSchema: responseFormat.schema
|
|
1937
|
-
} : void 0;
|
|
1938
1954
|
const anthropicOptions = await (0, import_provider_utils11.parseProviderOptions)({
|
|
1939
1955
|
provider: "anthropic",
|
|
1940
1956
|
providerOptions,
|
|
1941
1957
|
schema: anthropicProviderOptions
|
|
1942
1958
|
});
|
|
1959
|
+
const {
|
|
1960
|
+
maxOutputTokens: maxOutputTokensForModel,
|
|
1961
|
+
supportsStructuredOutput,
|
|
1962
|
+
isKnownModel
|
|
1963
|
+
} = getModelCapabilities(this.modelId);
|
|
1964
|
+
const structureOutputMode = (_a = anthropicOptions == null ? void 0 : anthropicOptions.structuredOutputMode) != null ? _a : "auto";
|
|
1965
|
+
const useStructuredOutput = structureOutputMode === "outputFormat" || structureOutputMode === "auto" && supportsStructuredOutput;
|
|
1966
|
+
const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !useStructuredOutput ? {
|
|
1967
|
+
type: "function",
|
|
1968
|
+
name: "json",
|
|
1969
|
+
description: "Respond with a JSON object.",
|
|
1970
|
+
inputSchema: responseFormat.schema
|
|
1971
|
+
} : void 0;
|
|
1943
1972
|
const cacheControlValidator = new CacheControlValidator();
|
|
1944
1973
|
const { prompt: messagesPrompt, betas } = await convertToAnthropicMessagesPrompt({
|
|
1945
1974
|
prompt,
|
|
1946
|
-
sendReasoning: (
|
|
1975
|
+
sendReasoning: (_b = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _b : true,
|
|
1947
1976
|
warnings,
|
|
1948
1977
|
cacheControlValidator
|
|
1949
1978
|
});
|
|
1950
|
-
const isThinking = ((
|
|
1951
|
-
const thinkingBudget = (
|
|
1952
|
-
const { maxOutputTokens: maxOutputTokensForModel, knownModel } = getMaxOutputTokensForModel(this.modelId);
|
|
1979
|
+
const isThinking = ((_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.type) === "enabled";
|
|
1980
|
+
const thinkingBudget = (_d = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _d.budgetTokens;
|
|
1953
1981
|
const maxTokens = maxOutputTokens != null ? maxOutputTokens : maxOutputTokensForModel;
|
|
1954
1982
|
const baseArgs = {
|
|
1955
1983
|
// model id:
|
|
@@ -1964,6 +1992,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1964
1992
|
...isThinking && {
|
|
1965
1993
|
thinking: { type: "enabled", budget_tokens: thinkingBudget }
|
|
1966
1994
|
},
|
|
1995
|
+
// structured output:
|
|
1996
|
+
...useStructuredOutput && (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && {
|
|
1997
|
+
output_format: {
|
|
1998
|
+
type: "json_schema",
|
|
1999
|
+
schema: responseFormat.schema
|
|
2000
|
+
}
|
|
2001
|
+
},
|
|
1967
2002
|
// mcp servers:
|
|
1968
2003
|
...(anthropicOptions == null ? void 0 : anthropicOptions.mcpServers) && anthropicOptions.mcpServers.length > 0 && {
|
|
1969
2004
|
mcp_servers: anthropicOptions.mcpServers.map((server) => ({
|
|
@@ -1981,7 +2016,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1981
2016
|
...(anthropicOptions == null ? void 0 : anthropicOptions.container) && {
|
|
1982
2017
|
container: {
|
|
1983
2018
|
id: anthropicOptions.container.id,
|
|
1984
|
-
skills: (
|
|
2019
|
+
skills: (_e = anthropicOptions.container.skills) == null ? void 0 : _e.map((skill) => ({
|
|
1985
2020
|
type: skill.type,
|
|
1986
2021
|
skill_id: skill.skillId,
|
|
1987
2022
|
version: skill.version
|
|
@@ -2024,7 +2059,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2024
2059
|
}
|
|
2025
2060
|
baseArgs.max_tokens = maxTokens + thinkingBudget;
|
|
2026
2061
|
}
|
|
2027
|
-
if (
|
|
2062
|
+
if (isKnownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
|
|
2028
2063
|
if (maxOutputTokens != null) {
|
|
2029
2064
|
warnings.push({
|
|
2030
2065
|
type: "unsupported-setting",
|
|
@@ -2050,9 +2085,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2050
2085
|
});
|
|
2051
2086
|
}
|
|
2052
2087
|
}
|
|
2053
|
-
if (stream && ((
|
|
2088
|
+
if (stream && ((_f = anthropicOptions == null ? void 0 : anthropicOptions.toolStreaming) != null ? _f : true)) {
|
|
2054
2089
|
betas.add("fine-grained-tool-streaming-2025-05-14");
|
|
2055
2090
|
}
|
|
2091
|
+
if (useStructuredOutput) {
|
|
2092
|
+
betas.add("structured-outputs-2025-11-13");
|
|
2093
|
+
}
|
|
2056
2094
|
const {
|
|
2057
2095
|
tools: anthropicTools2,
|
|
2058
2096
|
toolChoice: anthropicToolChoice,
|
|
@@ -2929,17 +2967,49 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2929
2967
|
};
|
|
2930
2968
|
}
|
|
2931
2969
|
};
|
|
2932
|
-
function
|
|
2933
|
-
if (modelId.includes("claude-sonnet-4-
|
|
2934
|
-
return {
|
|
2970
|
+
function getModelCapabilities(modelId) {
|
|
2971
|
+
if (modelId.includes("claude-sonnet-4-5")) {
|
|
2972
|
+
return {
|
|
2973
|
+
maxOutputTokens: 64e3,
|
|
2974
|
+
supportsStructuredOutput: true,
|
|
2975
|
+
isKnownModel: true
|
|
2976
|
+
};
|
|
2977
|
+
} else if (modelId.includes("claude-opus-4-1")) {
|
|
2978
|
+
return {
|
|
2979
|
+
maxOutputTokens: 32e3,
|
|
2980
|
+
supportsStructuredOutput: true,
|
|
2981
|
+
isKnownModel: true
|
|
2982
|
+
};
|
|
2983
|
+
} else if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
|
|
2984
|
+
return {
|
|
2985
|
+
maxOutputTokens: 64e3,
|
|
2986
|
+
supportsStructuredOutput: false,
|
|
2987
|
+
isKnownModel: true
|
|
2988
|
+
};
|
|
2935
2989
|
} else if (modelId.includes("claude-opus-4-")) {
|
|
2936
|
-
return {
|
|
2990
|
+
return {
|
|
2991
|
+
maxOutputTokens: 32e3,
|
|
2992
|
+
supportsStructuredOutput: false,
|
|
2993
|
+
isKnownModel: true
|
|
2994
|
+
};
|
|
2937
2995
|
} else if (modelId.includes("claude-3-5-haiku")) {
|
|
2938
|
-
return {
|
|
2996
|
+
return {
|
|
2997
|
+
maxOutputTokens: 8192,
|
|
2998
|
+
supportsStructuredOutput: false,
|
|
2999
|
+
isKnownModel: true
|
|
3000
|
+
};
|
|
2939
3001
|
} else if (modelId.includes("claude-3-haiku")) {
|
|
2940
|
-
return {
|
|
3002
|
+
return {
|
|
3003
|
+
maxOutputTokens: 4096,
|
|
3004
|
+
supportsStructuredOutput: false,
|
|
3005
|
+
isKnownModel: true
|
|
3006
|
+
};
|
|
2941
3007
|
} else {
|
|
2942
|
-
return {
|
|
3008
|
+
return {
|
|
3009
|
+
maxOutputTokens: 4096,
|
|
3010
|
+
supportsStructuredOutput: false,
|
|
3011
|
+
isKnownModel: false
|
|
3012
|
+
};
|
|
2943
3013
|
}
|
|
2944
3014
|
}
|
|
2945
3015
|
|