@ai-sdk/anthropic 4.0.0-beta.3 → 4.0.0-beta.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -122,14 +122,22 @@ The following optional provider options are available for Anthropic models:
122
122
  If you are experiencing issues with the model handling requests involving
123
123
  reasoning content, you can set this to `false` to omit them from the request.
124
124
 
125
- - `effort` _"high" | "medium" | "low"_
125
+ - `effort` _"low" | "medium" | "high" | "xhigh" | "max"_
126
126
 
127
127
  Optional. See [Effort section](#effort) for more details.
128
128
 
129
+ - `taskBudget` _object_
130
+
131
+ Optional. See [Task Budgets section](#task-budgets) for more details.
132
+
129
133
  - `speed` _"fast" | "standard"_
130
134
 
131
135
  Optional. See [Fast Mode section](#fast-mode) for more details.
132
136
 
137
+ - `inferenceGeo` _"us" | "global"_
138
+
139
+ Optional. See [Data Residency section](#data-residency) for more details.
140
+
133
141
  - `thinking` _object_
134
142
 
135
143
  Optional. See [Reasoning section](#reasoning) for more details.
@@ -146,6 +154,12 @@ The following optional provider options are available for Anthropic models:
146
154
  - `"jsonTool"`: Use a special `"json"` tool to specify the structured output format.
147
155
  - `"auto"`: Use `"outputFormat"` when supported, otherwise fall back to `"jsonTool"` (default).
148
156
 
157
+ - `metadata` _object_
158
+
159
+ Optional. Metadata to include with the request. See the [Anthropic API documentation](https://platform.claude.com/docs/en/api/messages/create) for details.
160
+
161
+ - `userId` _string_ - An external identifier for the end-user. Should be a UUID, hash, or other opaque identifier. Must not contain PII.
162
+
149
163
  ### Structured Outputs and Tool Input Streaming
150
164
 
151
165
  Tool call streaming is enabled by default. You can opt out by setting the
@@ -177,7 +191,7 @@ const result = streamText({
177
191
 
178
192
  ### Effort
179
193
 
180
- Anthropic introduced an `effort` option with `claude-opus-4-5` that affects thinking, text responses, and function calls. Effort defaults to `high` and you can set it to `medium` or `low` to save tokens and to lower time-to-last-token latency (TTLT).
194
+ Anthropic introduced an `effort` option with `claude-opus-4-5` that affects thinking, text responses, and function calls. Effort defaults to `high` and you can set it to `medium` or `low` to save tokens and to lower time-to-last-token latency (TTLT). `claude-opus-4-7` additionally supports `xhigh` for maximum reasoning effort.
181
195
 
182
196
  ```ts highlight="8-10"
183
197
  import { anthropic, AnthropicLanguageModelOptions } from '@ai-sdk/anthropic';
@@ -218,6 +232,67 @@ const { text } = await generateText({
218
232
 
219
233
  The `speed` option accepts `'fast'` or `'standard'` (default behavior).
220
234
 
235
+ ### Task Budgets
236
+
237
+ `claude-opus-4-7` supports a `taskBudget` option that informs the model of the total token budget available for an agentic turn. The model uses this information to prioritize work, plan ahead, and wind down gracefully as the budget is consumed.
238
+
239
+ Task budgets are advisory — they do not enforce a hard token limit. The model will attempt to stay within budget, but actual usage may vary.
240
+
241
+ ```ts highlight="8-13"
242
+ import { anthropic, AnthropicLanguageModelOptions } from '@ai-sdk/anthropic';
243
+ import { generateText } from 'ai';
244
+
245
+ const { text } = await generateText({
246
+ model: anthropic('claude-opus-4-7'),
247
+ prompt: 'Research the pros and cons of Rust vs Go for building CLI tools.',
248
+ providerOptions: {
249
+ anthropic: {
250
+ taskBudget: {
251
+ type: 'tokens',
252
+ total: 400000,
253
+ },
254
+ } satisfies AnthropicLanguageModelOptions,
255
+ },
256
+ });
257
+ ```
258
+
259
+ For long-running agents that compact and restart context, you can carry the remaining budget forward using the `remaining` field:
260
+
261
+ ```ts
262
+ taskBudget: {
263
+ type: 'tokens',
264
+ total: 400000,
265
+ remaining: 215000, // budget left after prior compacted-away contexts
266
+ }
267
+ ```
268
+
269
+ The `taskBudget` object accepts:
270
+
271
+ - `type` _"tokens"_ - Budget type. Currently only `"tokens"` is supported.
272
+ - `total` _number_ - Total task budget for the agentic turn. Minimum 20,000.
273
+ - `remaining` _number_ - Budget left after prior compacted-away contexts. Must be between 0 and `total`. Defaults to `total` if omitted.
274
+
275
+ ### Data Residency
276
+
277
+ Anthropic supports an [`inferenceGeo` option](https://platform.claude.com/docs/en/build-with-claude/data-residency) that controls where model inference runs for a request.
278
+
279
+ ```ts highlight="8-10"
280
+ import { anthropic, AnthropicLanguageModelOptions } from '@ai-sdk/anthropic';
281
+ import { generateText } from 'ai';
282
+
283
+ const { text } = await generateText({
284
+ model: anthropic('claude-opus-4-6'),
285
+ prompt: 'Summarize the key points of this document.',
286
+ providerOptions: {
287
+ anthropic: {
288
+ inferenceGeo: 'us',
289
+ } satisfies AnthropicLanguageModelOptions,
290
+ },
291
+ });
292
+ ```
293
+
294
+ The `inferenceGeo` option accepts `'us'` (US-only infrastructure) or `'global'` (default, any available geography).
295
+
221
296
  ### Reasoning
222
297
 
223
298
  Anthropic models support extended thinking, where Claude shows its reasoning process before providing a final answer.
@@ -261,6 +336,31 @@ const { text } = await generateText({
261
336
  });
262
337
  ```
263
338
 
339
+ ##### Thinking Display (Opus 4.7+)
340
+
341
+ Starting with `claude-opus-4-7`, thinking content is omitted from the response by default — thinking blocks are present in the stream but their text is empty. To receive reasoning output, set `display: 'summarized'`:
342
+
343
+ ```ts highlight="5"
344
+ const { text, reasoningText } = await generateText({
345
+ model: anthropic('claude-opus-4-7'),
346
+ providerOptions: {
347
+ anthropic: {
348
+ thinking: { type: 'adaptive', display: 'summarized' },
349
+ } satisfies AnthropicLanguageModelOptions,
350
+ },
351
+ prompt: 'How many people will live in the world in 2040?',
352
+ });
353
+
354
+ console.log(reasoningText); // reasoning text (empty without display: 'summarized')
355
+ console.log(text);
356
+ ```
357
+
358
+ <Note>
359
+ If you stream reasoning to users with `claude-opus-4-7`, the default `"omitted"` display will
360
+ cause a long pause before output begins. Set `display: "summarized"` to restore visible
361
+ progress during thinking.
362
+ </Note>
363
+
264
364
  #### Budget-Based Thinking
265
365
 
266
366
  For earlier models (`claude-opus-4-20250514`, `claude-sonnet-4-20250514`, `claude-sonnet-4-5-20250929`),
@@ -1155,12 +1255,12 @@ import {
1155
1255
  anthropic,
1156
1256
  forwardAnthropicContainerIdFromLastStep,
1157
1257
  } from '@ai-sdk/anthropic';
1158
- import { generateText, tool, stepCountIs } from 'ai';
1258
+ import { generateText, tool, isStepCount } from 'ai';
1159
1259
  import { z } from 'zod';
1160
1260
 
1161
1261
  const result = await generateText({
1162
1262
  model: anthropic('claude-sonnet-4-5'),
1163
- stopWhen: stepCountIs(10),
1263
+ stopWhen: isStepCount(10),
1164
1264
  prompt:
1165
1265
  'Get the weather for Tokyo, Sydney, and London, then calculate the average temperature.',
1166
1266
  tools: {
@@ -1345,6 +1445,7 @@ and the `mediaType` should be set to `'application/pdf'`.
1345
1445
 
1346
1446
  | Model | Image Input | Object Generation | Tool Usage | Computer Use | Web Search | Tool Search | Compaction |
1347
1447
  | ------------------- | ------------------- | ------------------- | ------------------- | ------------------- | ------------------- | ------------------- | ------------------- |
1448
+ | `claude-opus-4-7` | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> |
1348
1449
  | `claude-opus-4-6` | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> |
1349
1450
  | `claude-sonnet-4-6` | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | |
1350
1451
  | `claude-opus-4-5` | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | |
package/package.json CHANGED
@@ -1,10 +1,10 @@
1
1
  {
2
2
  "name": "@ai-sdk/anthropic",
3
- "version": "4.0.0-beta.3",
3
+ "version": "4.0.0-beta.32",
4
+ "type": "module",
4
5
  "license": "Apache-2.0",
5
6
  "sideEffects": false,
6
7
  "main": "./dist/index.js",
7
- "module": "./dist/index.mjs",
8
8
  "types": "./dist/index.d.ts",
9
9
  "files": [
10
10
  "dist/**/*",
@@ -25,26 +25,25 @@
25
25
  "./package.json": "./package.json",
26
26
  ".": {
27
27
  "types": "./dist/index.d.ts",
28
- "import": "./dist/index.mjs",
29
- "require": "./dist/index.js"
28
+ "import": "./dist/index.js",
29
+ "default": "./dist/index.js"
30
30
  },
31
31
  "./internal": {
32
32
  "types": "./dist/internal/index.d.ts",
33
- "import": "./dist/internal/index.mjs",
34
- "module": "./dist/internal/index.mjs",
35
- "require": "./dist/internal/index.js"
33
+ "import": "./dist/internal/index.js",
34
+ "default": "./dist/internal/index.js"
36
35
  }
37
36
  },
38
37
  "dependencies": {
39
- "@ai-sdk/provider": "4.0.0-beta.0",
40
- "@ai-sdk/provider-utils": "5.0.0-beta.1"
38
+ "@ai-sdk/provider": "4.0.0-beta.12",
39
+ "@ai-sdk/provider-utils": "5.0.0-beta.22"
41
40
  },
42
41
  "devDependencies": {
43
42
  "@types/node": "20.17.24",
44
43
  "tsup": "^8",
45
44
  "typescript": "5.8.3",
46
45
  "zod": "3.25.76",
47
- "@ai-sdk/test-server": "2.0.0-beta.0",
46
+ "@ai-sdk/test-server": "2.0.0-beta.1",
48
47
  "@vercel/ai-tsconfig": "0.0.0"
49
48
  },
50
49
  "peerDependencies": {
@@ -71,9 +70,7 @@
71
70
  "build": "pnpm clean && tsup --tsconfig tsconfig.build.json",
72
71
  "build:watch": "pnpm clean && tsup --watch --tsconfig tsconfig.build.json",
73
72
  "clean": "del-cli dist docs *.tsbuildinfo",
74
- "lint": "eslint \"./**/*.ts*\"",
75
73
  "type-check": "tsc --build",
76
- "prettier-check": "prettier --check \"./**/*.ts*\"",
77
74
  "test": "pnpm test:node && pnpm test:edge",
78
75
  "test:update": "pnpm test:node -u",
79
76
  "test:watch": "vitest --config vitest.node.config.js",
@@ -0,0 +1,96 @@
1
+ import {
2
+ FilesV4,
3
+ FilesV4UploadFileCallOptions,
4
+ FilesV4UploadFileResult,
5
+ } from '@ai-sdk/provider';
6
+ import {
7
+ combineHeaders,
8
+ convertBase64ToUint8Array,
9
+ createJsonResponseHandler,
10
+ FetchFunction,
11
+ lazySchema,
12
+ postFormDataToApi,
13
+ zodSchema,
14
+ } from '@ai-sdk/provider-utils';
15
+ import { z } from 'zod/v4';
16
+ import { anthropicFailedResponseHandler } from './anthropic-error';
17
+
18
+ const anthropicUploadFileResponseSchema = lazySchema(() =>
19
+ zodSchema(
20
+ z.object({
21
+ id: z.string(),
22
+ type: z.literal('file'),
23
+ filename: z.string(),
24
+ mime_type: z.string(),
25
+ size_bytes: z.number(),
26
+ created_at: z.string(),
27
+ downloadable: z.boolean().nullish(),
28
+ }),
29
+ ),
30
+ );
31
+
32
+ interface AnthropicFilesConfig {
33
+ provider: string;
34
+ baseURL: string;
35
+ headers: () => Record<string, string | undefined>;
36
+ fetch?: FetchFunction;
37
+ }
38
+
39
+ export class AnthropicFiles implements FilesV4 {
40
+ readonly specificationVersion = 'v4';
41
+
42
+ get provider(): string {
43
+ return this.config.provider;
44
+ }
45
+
46
+ constructor(private readonly config: AnthropicFilesConfig) {}
47
+
48
+ async uploadFile({
49
+ data,
50
+ mediaType,
51
+ filename,
52
+ }: FilesV4UploadFileCallOptions): Promise<FilesV4UploadFileResult> {
53
+ const fileBytes =
54
+ data instanceof Uint8Array ? data : convertBase64ToUint8Array(data);
55
+
56
+ const blob = new Blob([fileBytes], { type: mediaType });
57
+
58
+ const formData = new FormData();
59
+ if (filename != null) {
60
+ formData.append('file', blob, filename);
61
+ } else {
62
+ formData.append('file', blob);
63
+ }
64
+
65
+ const { value: response } = await postFormDataToApi({
66
+ url: `${this.config.baseURL}/files`,
67
+ headers: combineHeaders(this.config.headers(), {
68
+ 'anthropic-beta': 'files-api-2025-04-14',
69
+ }),
70
+ formData,
71
+ failedResponseHandler: anthropicFailedResponseHandler,
72
+ successfulResponseHandler: createJsonResponseHandler(
73
+ anthropicUploadFileResponseSchema,
74
+ ),
75
+ fetch: this.config.fetch,
76
+ });
77
+
78
+ return {
79
+ warnings: [],
80
+ providerReference: { anthropic: response.id },
81
+ mediaType: response.mime_type ?? mediaType,
82
+ filename: response.filename ?? filename,
83
+ providerMetadata: {
84
+ anthropic: {
85
+ filename: response.filename,
86
+ mimeType: response.mime_type,
87
+ sizeBytes: response.size_bytes,
88
+ createdAt: response.created_at,
89
+ ...(response.downloadable != null
90
+ ? { downloadable: response.downloadable }
91
+ : {}),
92
+ },
93
+ },
94
+ };
95
+ }
96
+ }
@@ -87,6 +87,10 @@ type AnthropicContentSource =
87
87
  type: 'text';
88
88
  media_type: 'text/plain';
89
89
  data: string;
90
+ }
91
+ | {
92
+ type: 'file';
93
+ file_id: string;
90
94
  };
91
95
 
92
96
  export interface AnthropicImageContent {