@ai-sdk/anthropic 3.0.0-beta.56 → 3.0.0-beta.58
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +11 -1
- package/dist/index.d.ts +11 -1
- package/dist/index.js +89 -23
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +89 -23
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +1 -1
- package/dist/internal/index.d.ts +1 -1
- package/dist/internal/index.js +88 -22
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +88 -22
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
|
@@ -2,7 +2,7 @@ import { LanguageModelV3, JSONSchema7, SharedV3ProviderMetadata, LanguageModelV3
|
|
|
2
2
|
import * as _ai_sdk_provider_utils from '@ai-sdk/provider-utils';
|
|
3
3
|
import { Resolvable, FetchFunction } from '@ai-sdk/provider-utils';
|
|
4
4
|
|
|
5
|
-
type AnthropicMessagesModelId = 'claude-haiku-
|
|
5
|
+
type AnthropicMessagesModelId = 'claude-3-5-haiku-20241022' | 'claude-3-5-haiku-latest' | 'claude-3-7-sonnet-20250219' | 'claude-3-7-sonnet-latest' | 'claude-3-haiku-20240307' | 'claude-haiku-4-5-20251001' | 'claude-haiku-4-5' | 'claude-opus-4-0' | 'claude-opus-4-1-20250805' | 'claude-opus-4-1' | 'claude-opus-4-20250514' | 'claude-opus-4-5' | 'claude-opus-4-5-20251101' | 'claude-sonnet-4-0' | 'claude-sonnet-4-20250514' | 'claude-sonnet-4-5-20250929' | 'claude-sonnet-4-5' | (string & {});
|
|
6
6
|
|
|
7
7
|
type AnthropicMessagesConfig = {
|
|
8
8
|
provider: string;
|
package/dist/internal/index.d.ts
CHANGED
|
@@ -2,7 +2,7 @@ import { LanguageModelV3, JSONSchema7, SharedV3ProviderMetadata, LanguageModelV3
|
|
|
2
2
|
import * as _ai_sdk_provider_utils from '@ai-sdk/provider-utils';
|
|
3
3
|
import { Resolvable, FetchFunction } from '@ai-sdk/provider-utils';
|
|
4
4
|
|
|
5
|
-
type AnthropicMessagesModelId = 'claude-haiku-
|
|
5
|
+
type AnthropicMessagesModelId = 'claude-3-5-haiku-20241022' | 'claude-3-5-haiku-latest' | 'claude-3-7-sonnet-20250219' | 'claude-3-7-sonnet-latest' | 'claude-3-haiku-20240307' | 'claude-haiku-4-5-20251001' | 'claude-haiku-4-5' | 'claude-opus-4-0' | 'claude-opus-4-1-20250805' | 'claude-opus-4-1' | 'claude-opus-4-20250514' | 'claude-opus-4-5' | 'claude-opus-4-5-20251101' | 'claude-sonnet-4-0' | 'claude-sonnet-4-20250514' | 'claude-sonnet-4-5-20250929' | 'claude-sonnet-4-5' | (string & {});
|
|
6
6
|
|
|
7
7
|
type AnthropicMessagesConfig = {
|
|
8
8
|
provider: string;
|
package/dist/internal/index.js
CHANGED
|
@@ -586,6 +586,14 @@ var anthropicProviderOptions = import_v43.z.object({
|
|
|
586
586
|
* This allows you to deactivate reasoning inputs for models that do not support them.
|
|
587
587
|
*/
|
|
588
588
|
sendReasoning: import_v43.z.boolean().optional(),
|
|
589
|
+
/**
|
|
590
|
+
* Determines how structured outputs are generated.
|
|
591
|
+
*
|
|
592
|
+
* - `outputFormat`: Use the `output_format` parameter to specify the structured output format.
|
|
593
|
+
* - `jsonTool`: Use a special 'json' tool to specify the structured output format.
|
|
594
|
+
* - `auto`: Use 'outputFormat' when supported, otherwise use 'jsonTool' (default).
|
|
595
|
+
*/
|
|
596
|
+
structuredOutputMode: import_v43.z.enum(["outputFormat", "jsonTool", "auto"]).optional(),
|
|
589
597
|
/**
|
|
590
598
|
* Configuration for enabling Claude's extended thinking.
|
|
591
599
|
*
|
|
@@ -647,7 +655,11 @@ var anthropicProviderOptions = import_v43.z.object({
|
|
|
647
655
|
*
|
|
648
656
|
* @default true
|
|
649
657
|
*/
|
|
650
|
-
toolStreaming: import_v43.z.boolean().optional()
|
|
658
|
+
toolStreaming: import_v43.z.boolean().optional(),
|
|
659
|
+
/**
|
|
660
|
+
* @default 'high'
|
|
661
|
+
*/
|
|
662
|
+
effort: import_v43.z.enum(["low", "medium", "high"]).optional()
|
|
651
663
|
});
|
|
652
664
|
|
|
653
665
|
// src/anthropic-prepare-tools.ts
|
|
@@ -1899,7 +1911,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1899
1911
|
providerOptions,
|
|
1900
1912
|
stream
|
|
1901
1913
|
}) {
|
|
1902
|
-
var _a, _b, _c, _d, _e;
|
|
1914
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1903
1915
|
const warnings = [];
|
|
1904
1916
|
if (frequencyPenalty != null) {
|
|
1905
1917
|
warnings.push({
|
|
@@ -1943,27 +1955,33 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1943
1955
|
});
|
|
1944
1956
|
}
|
|
1945
1957
|
}
|
|
1946
|
-
const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null ? {
|
|
1947
|
-
type: "function",
|
|
1948
|
-
name: "json",
|
|
1949
|
-
description: "Respond with a JSON object.",
|
|
1950
|
-
inputSchema: responseFormat.schema
|
|
1951
|
-
} : void 0;
|
|
1952
1958
|
const anthropicOptions = await (0, import_provider_utils11.parseProviderOptions)({
|
|
1953
1959
|
provider: "anthropic",
|
|
1954
1960
|
providerOptions,
|
|
1955
1961
|
schema: anthropicProviderOptions
|
|
1956
1962
|
});
|
|
1963
|
+
const {
|
|
1964
|
+
maxOutputTokens: maxOutputTokensForModel,
|
|
1965
|
+
supportsStructuredOutput,
|
|
1966
|
+
isKnownModel
|
|
1967
|
+
} = getModelCapabilities(this.modelId);
|
|
1968
|
+
const structureOutputMode = (_a = anthropicOptions == null ? void 0 : anthropicOptions.structuredOutputMode) != null ? _a : "auto";
|
|
1969
|
+
const useStructuredOutput = structureOutputMode === "outputFormat" || structureOutputMode === "auto" && supportsStructuredOutput;
|
|
1970
|
+
const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !useStructuredOutput ? {
|
|
1971
|
+
type: "function",
|
|
1972
|
+
name: "json",
|
|
1973
|
+
description: "Respond with a JSON object.",
|
|
1974
|
+
inputSchema: responseFormat.schema
|
|
1975
|
+
} : void 0;
|
|
1957
1976
|
const cacheControlValidator = new CacheControlValidator();
|
|
1958
1977
|
const { prompt: messagesPrompt, betas } = await convertToAnthropicMessagesPrompt({
|
|
1959
1978
|
prompt,
|
|
1960
|
-
sendReasoning: (
|
|
1979
|
+
sendReasoning: (_b = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _b : true,
|
|
1961
1980
|
warnings,
|
|
1962
1981
|
cacheControlValidator
|
|
1963
1982
|
});
|
|
1964
|
-
const isThinking = ((
|
|
1965
|
-
const thinkingBudget = (
|
|
1966
|
-
const { maxOutputTokens: maxOutputTokensForModel, knownModel } = getMaxOutputTokensForModel(this.modelId);
|
|
1983
|
+
const isThinking = ((_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.type) === "enabled";
|
|
1984
|
+
const thinkingBudget = (_d = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _d.budgetTokens;
|
|
1967
1985
|
const maxTokens = maxOutputTokens != null ? maxOutputTokens : maxOutputTokensForModel;
|
|
1968
1986
|
const baseArgs = {
|
|
1969
1987
|
// model id:
|
|
@@ -1978,6 +1996,16 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1978
1996
|
...isThinking && {
|
|
1979
1997
|
thinking: { type: "enabled", budget_tokens: thinkingBudget }
|
|
1980
1998
|
},
|
|
1999
|
+
...(anthropicOptions == null ? void 0 : anthropicOptions.effort) && {
|
|
2000
|
+
output_config: { effort: anthropicOptions.effort }
|
|
2001
|
+
},
|
|
2002
|
+
// structured output:
|
|
2003
|
+
...useStructuredOutput && (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && {
|
|
2004
|
+
output_format: {
|
|
2005
|
+
type: "json_schema",
|
|
2006
|
+
schema: responseFormat.schema
|
|
2007
|
+
}
|
|
2008
|
+
},
|
|
1981
2009
|
// mcp servers:
|
|
1982
2010
|
...(anthropicOptions == null ? void 0 : anthropicOptions.mcpServers) && anthropicOptions.mcpServers.length > 0 && {
|
|
1983
2011
|
mcp_servers: anthropicOptions.mcpServers.map((server) => ({
|
|
@@ -1995,7 +2023,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1995
2023
|
...(anthropicOptions == null ? void 0 : anthropicOptions.container) && {
|
|
1996
2024
|
container: {
|
|
1997
2025
|
id: anthropicOptions.container.id,
|
|
1998
|
-
skills: (
|
|
2026
|
+
skills: (_e = anthropicOptions.container.skills) == null ? void 0 : _e.map((skill) => ({
|
|
1999
2027
|
type: skill.type,
|
|
2000
2028
|
skill_id: skill.skillId,
|
|
2001
2029
|
version: skill.version
|
|
@@ -2038,7 +2066,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2038
2066
|
}
|
|
2039
2067
|
baseArgs.max_tokens = maxTokens + thinkingBudget;
|
|
2040
2068
|
}
|
|
2041
|
-
if (
|
|
2069
|
+
if (isKnownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
|
|
2042
2070
|
if (maxOutputTokens != null) {
|
|
2043
2071
|
warnings.push({
|
|
2044
2072
|
type: "unsupported-setting",
|
|
@@ -2064,9 +2092,15 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2064
2092
|
});
|
|
2065
2093
|
}
|
|
2066
2094
|
}
|
|
2067
|
-
if (
|
|
2095
|
+
if (anthropicOptions == null ? void 0 : anthropicOptions.effort) {
|
|
2096
|
+
betas.add("effort-2025-11-24");
|
|
2097
|
+
}
|
|
2098
|
+
if (stream && ((_f = anthropicOptions == null ? void 0 : anthropicOptions.toolStreaming) != null ? _f : true)) {
|
|
2068
2099
|
betas.add("fine-grained-tool-streaming-2025-05-14");
|
|
2069
2100
|
}
|
|
2101
|
+
if (useStructuredOutput) {
|
|
2102
|
+
betas.add("structured-outputs-2025-11-13");
|
|
2103
|
+
}
|
|
2070
2104
|
const {
|
|
2071
2105
|
tools: anthropicTools2,
|
|
2072
2106
|
toolChoice: anthropicToolChoice,
|
|
@@ -2943,17 +2977,49 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2943
2977
|
};
|
|
2944
2978
|
}
|
|
2945
2979
|
};
|
|
2946
|
-
function
|
|
2947
|
-
if (modelId.includes("claude-sonnet-4-
|
|
2948
|
-
return {
|
|
2980
|
+
function getModelCapabilities(modelId) {
|
|
2981
|
+
if (modelId.includes("claude-sonnet-4-5")) {
|
|
2982
|
+
return {
|
|
2983
|
+
maxOutputTokens: 64e3,
|
|
2984
|
+
supportsStructuredOutput: true,
|
|
2985
|
+
isKnownModel: true
|
|
2986
|
+
};
|
|
2987
|
+
} else if (modelId.includes("claude-opus-4-1") || modelId.includes("claude-opus-4-5")) {
|
|
2988
|
+
return {
|
|
2989
|
+
maxOutputTokens: 32e3,
|
|
2990
|
+
supportsStructuredOutput: true,
|
|
2991
|
+
isKnownModel: true
|
|
2992
|
+
};
|
|
2993
|
+
} else if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
|
|
2994
|
+
return {
|
|
2995
|
+
maxOutputTokens: 64e3,
|
|
2996
|
+
supportsStructuredOutput: false,
|
|
2997
|
+
isKnownModel: true
|
|
2998
|
+
};
|
|
2949
2999
|
} else if (modelId.includes("claude-opus-4-")) {
|
|
2950
|
-
return {
|
|
3000
|
+
return {
|
|
3001
|
+
maxOutputTokens: 32e3,
|
|
3002
|
+
supportsStructuredOutput: false,
|
|
3003
|
+
isKnownModel: true
|
|
3004
|
+
};
|
|
2951
3005
|
} else if (modelId.includes("claude-3-5-haiku")) {
|
|
2952
|
-
return {
|
|
3006
|
+
return {
|
|
3007
|
+
maxOutputTokens: 8192,
|
|
3008
|
+
supportsStructuredOutput: false,
|
|
3009
|
+
isKnownModel: true
|
|
3010
|
+
};
|
|
2953
3011
|
} else if (modelId.includes("claude-3-haiku")) {
|
|
2954
|
-
return {
|
|
3012
|
+
return {
|
|
3013
|
+
maxOutputTokens: 4096,
|
|
3014
|
+
supportsStructuredOutput: false,
|
|
3015
|
+
isKnownModel: true
|
|
3016
|
+
};
|
|
2955
3017
|
} else {
|
|
2956
|
-
return {
|
|
3018
|
+
return {
|
|
3019
|
+
maxOutputTokens: 4096,
|
|
3020
|
+
supportsStructuredOutput: false,
|
|
3021
|
+
isKnownModel: false
|
|
3022
|
+
};
|
|
2957
3023
|
}
|
|
2958
3024
|
}
|
|
2959
3025
|
|