@ai-sdk/anthropic 3.0.0-beta.25 → 3.0.0-beta.27
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +27 -5
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +27 -5
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +1 -1
- package/dist/internal/index.d.ts +1 -1
- package/dist/internal/index.js +26 -4
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +26 -4
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
# @ai-sdk/anthropic
|
|
2
2
|
|
|
3
|
+
## 3.0.0-beta.27
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 4c5a6be: feat(provider/anthropic): default and limit maxTokens based on model
|
|
8
|
+
|
|
9
|
+
## 3.0.0-beta.26
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- f33a018: chore: add model ID for Haiku 4.5
|
|
14
|
+
|
|
3
15
|
## 3.0.0-beta.25
|
|
4
16
|
|
|
5
17
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
|
@@ -3,7 +3,7 @@ import { ProviderV3, LanguageModelV3 } from '@ai-sdk/provider';
|
|
|
3
3
|
import * as _ai_sdk_provider_utils from '@ai-sdk/provider-utils';
|
|
4
4
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
5
5
|
|
|
6
|
-
type AnthropicMessagesModelId = 'claude-sonnet-4-5' | 'claude-sonnet-4-5-20250929' | 'claude-opus-4-1' | 'claude-opus-4-0' | 'claude-sonnet-4-0' | 'claude-opus-4-1-20250805' | 'claude-opus-4-20250514' | 'claude-sonnet-4-20250514' | 'claude-3-7-sonnet-latest' | 'claude-3-7-sonnet-20250219' | 'claude-3-5-haiku-latest' | 'claude-3-5-haiku-20241022' | 'claude-3-haiku-20240307' | (string & {});
|
|
6
|
+
type AnthropicMessagesModelId = 'claude-haiku-4-5' | 'claude-haiku-4-5-20251001' | 'claude-sonnet-4-5' | 'claude-sonnet-4-5-20250929' | 'claude-opus-4-1' | 'claude-opus-4-0' | 'claude-sonnet-4-0' | 'claude-opus-4-1-20250805' | 'claude-opus-4-20250514' | 'claude-sonnet-4-20250514' | 'claude-3-7-sonnet-latest' | 'claude-3-7-sonnet-20250219' | 'claude-3-5-haiku-latest' | 'claude-3-5-haiku-20241022' | 'claude-3-haiku-20240307' | (string & {});
|
|
7
7
|
declare const anthropicProviderOptions: z.ZodObject<{
|
|
8
8
|
sendReasoning: z.ZodOptional<z.ZodBoolean>;
|
|
9
9
|
thinking: z.ZodOptional<z.ZodObject<{
|
package/dist/index.d.ts
CHANGED
|
@@ -3,7 +3,7 @@ import { ProviderV3, LanguageModelV3 } from '@ai-sdk/provider';
|
|
|
3
3
|
import * as _ai_sdk_provider_utils from '@ai-sdk/provider-utils';
|
|
4
4
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
5
5
|
|
|
6
|
-
type AnthropicMessagesModelId = 'claude-sonnet-4-5' | 'claude-sonnet-4-5-20250929' | 'claude-opus-4-1' | 'claude-opus-4-0' | 'claude-sonnet-4-0' | 'claude-opus-4-1-20250805' | 'claude-opus-4-20250514' | 'claude-sonnet-4-20250514' | 'claude-3-7-sonnet-latest' | 'claude-3-7-sonnet-20250219' | 'claude-3-5-haiku-latest' | 'claude-3-5-haiku-20241022' | 'claude-3-haiku-20240307' | (string & {});
|
|
6
|
+
type AnthropicMessagesModelId = 'claude-haiku-4-5' | 'claude-haiku-4-5-20251001' | 'claude-sonnet-4-5' | 'claude-sonnet-4-5-20250929' | 'claude-opus-4-1' | 'claude-opus-4-0' | 'claude-sonnet-4-0' | 'claude-opus-4-1-20250805' | 'claude-opus-4-20250514' | 'claude-sonnet-4-20250514' | 'claude-3-7-sonnet-latest' | 'claude-3-7-sonnet-20250219' | 'claude-3-5-haiku-latest' | 'claude-3-5-haiku-20241022' | 'claude-3-haiku-20240307' | (string & {});
|
|
7
7
|
declare const anthropicProviderOptions: z.ZodObject<{
|
|
8
8
|
sendReasoning: z.ZodOptional<z.ZodBoolean>;
|
|
9
9
|
thinking: z.ZodOptional<z.ZodObject<{
|
package/dist/index.js
CHANGED
|
@@ -31,7 +31,7 @@ var import_provider4 = require("@ai-sdk/provider");
|
|
|
31
31
|
var import_provider_utils20 = require("@ai-sdk/provider-utils");
|
|
32
32
|
|
|
33
33
|
// src/version.ts
|
|
34
|
-
var VERSION = true ? "3.0.0-beta.
|
|
34
|
+
var VERSION = true ? "3.0.0-beta.27" : "0.0.0-test";
|
|
35
35
|
|
|
36
36
|
// src/anthropic-messages-language-model.ts
|
|
37
37
|
var import_provider3 = require("@ai-sdk/provider");
|
|
@@ -1639,8 +1639,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1639
1639
|
}
|
|
1640
1640
|
async getArgs({
|
|
1641
1641
|
prompt,
|
|
1642
|
-
maxOutputTokens
|
|
1643
|
-
// 4096: max model output tokens TODO update default in v5
|
|
1642
|
+
maxOutputTokens,
|
|
1644
1643
|
temperature,
|
|
1645
1644
|
topP,
|
|
1646
1645
|
topK,
|
|
@@ -1706,11 +1705,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1706
1705
|
});
|
|
1707
1706
|
const isThinking = ((_b = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _b.type) === "enabled";
|
|
1708
1707
|
const thinkingBudget = (_c = anthropicOptions == null ? void 0 : anthropicOptions.thinking) == null ? void 0 : _c.budgetTokens;
|
|
1708
|
+
const maxOutputTokensForModel = getMaxOutputTokensForModel(this.modelId);
|
|
1709
|
+
const maxTokens = maxOutputTokens != null ? maxOutputTokens : maxOutputTokensForModel;
|
|
1709
1710
|
const baseArgs = {
|
|
1710
1711
|
// model id:
|
|
1711
1712
|
model: this.modelId,
|
|
1712
1713
|
// standardized settings:
|
|
1713
|
-
max_tokens:
|
|
1714
|
+
max_tokens: maxTokens,
|
|
1714
1715
|
temperature,
|
|
1715
1716
|
top_k: topK,
|
|
1716
1717
|
top_p: topP,
|
|
@@ -1753,7 +1754,17 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1753
1754
|
details: "topP is not supported when thinking is enabled"
|
|
1754
1755
|
});
|
|
1755
1756
|
}
|
|
1756
|
-
baseArgs.max_tokens =
|
|
1757
|
+
baseArgs.max_tokens = maxTokens + thinkingBudget;
|
|
1758
|
+
}
|
|
1759
|
+
if (baseArgs.max_tokens > maxOutputTokensForModel) {
|
|
1760
|
+
if (maxOutputTokens != null) {
|
|
1761
|
+
warnings.push({
|
|
1762
|
+
type: "unsupported-setting",
|
|
1763
|
+
setting: "maxOutputTokens",
|
|
1764
|
+
details: `${maxTokens} (maxOutputTokens + thinkingBudget) is greater than ${this.modelId} ${maxOutputTokensForModel} max output tokens. The max output tokens have been limited to ${maxOutputTokensForModel}.`
|
|
1765
|
+
});
|
|
1766
|
+
}
|
|
1767
|
+
baseArgs.max_tokens = maxOutputTokensForModel;
|
|
1757
1768
|
}
|
|
1758
1769
|
const {
|
|
1759
1770
|
tools: anthropicTools2,
|
|
@@ -2533,6 +2544,17 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2533
2544
|
};
|
|
2534
2545
|
}
|
|
2535
2546
|
};
|
|
2547
|
+
function getMaxOutputTokensForModel(modelId) {
|
|
2548
|
+
if (modelId.includes("claude-sonnet-4-") || modelId.includes("claude-3-7-sonnet") || modelId.includes("claude-haiku-4-5")) {
|
|
2549
|
+
return 64e3;
|
|
2550
|
+
} else if (modelId.includes("claude-opus-4-")) {
|
|
2551
|
+
return 32e3;
|
|
2552
|
+
} else if (modelId.includes("claude-3-5-haiku")) {
|
|
2553
|
+
return 8192;
|
|
2554
|
+
} else {
|
|
2555
|
+
return 4096;
|
|
2556
|
+
}
|
|
2557
|
+
}
|
|
2536
2558
|
|
|
2537
2559
|
// src/tool/bash_20241022.ts
|
|
2538
2560
|
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|