@ai-sdk/anthropic 2.0.60 → 2.0.61
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.mts +75 -1
- package/dist/index.d.ts +75 -1
- package/dist/index.js +282 -24
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +282 -24
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +281 -23
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +281 -23
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
package/dist/index.d.mts
CHANGED
|
@@ -3,10 +3,34 @@ import { z } from 'zod/v4';
|
|
|
3
3
|
import * as _ai_sdk_provider_utils from '@ai-sdk/provider-utils';
|
|
4
4
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
5
5
|
|
|
6
|
+
/**
|
|
7
|
+
* Represents a single iteration in the usage breakdown.
|
|
8
|
+
* When compaction occurs, the API returns an iterations array showing
|
|
9
|
+
* usage for each sampling iteration (compaction + message).
|
|
10
|
+
*/
|
|
11
|
+
interface AnthropicUsageIteration {
|
|
12
|
+
type: 'compaction' | 'message';
|
|
13
|
+
/**
|
|
14
|
+
* Number of input tokens consumed in this iteration.
|
|
15
|
+
*/
|
|
16
|
+
inputTokens: number;
|
|
17
|
+
/**
|
|
18
|
+
* Number of output tokens generated in this iteration.
|
|
19
|
+
*/
|
|
20
|
+
outputTokens: number;
|
|
21
|
+
}
|
|
6
22
|
interface AnthropicMessageMetadata {
|
|
7
23
|
usage: JSONObject;
|
|
8
24
|
cacheCreationInputTokens: number | null;
|
|
9
25
|
stopSequence: string | null;
|
|
26
|
+
/**
|
|
27
|
+
* Usage breakdown by iteration when compaction is triggered.
|
|
28
|
+
*
|
|
29
|
+
* When compaction occurs, this array contains usage for each sampling iteration.
|
|
30
|
+
* The first iteration is typically the compaction step, followed by the main
|
|
31
|
+
* message iteration.
|
|
32
|
+
*/
|
|
33
|
+
iterations: AnthropicUsageIteration[] | null;
|
|
10
34
|
/**
|
|
11
35
|
* Information about the container used in this request.
|
|
12
36
|
*
|
|
@@ -40,6 +64,35 @@ interface AnthropicMessageMetadata {
|
|
|
40
64
|
version: string;
|
|
41
65
|
}> | null;
|
|
42
66
|
} | null;
|
|
67
|
+
/**
|
|
68
|
+
* Information about context management operations applied to this request.
|
|
69
|
+
*/
|
|
70
|
+
contextManagement: {
|
|
71
|
+
/**
|
|
72
|
+
* List of context management edits that were applied.
|
|
73
|
+
*/
|
|
74
|
+
appliedEdits: Array<{
|
|
75
|
+
/**
|
|
76
|
+
* The type of context management edit applied.
|
|
77
|
+
* Possible value: 'clear_01'
|
|
78
|
+
*/
|
|
79
|
+
type: 'clear_01';
|
|
80
|
+
/**
|
|
81
|
+
* The number of input tokens that were cleared.
|
|
82
|
+
*/
|
|
83
|
+
clearedInputTokens: number;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Represents a compaction edit where the conversation context was summarized.
|
|
87
|
+
*/
|
|
88
|
+
| {
|
|
89
|
+
/**
|
|
90
|
+
* The type of context management edit applied.
|
|
91
|
+
* Possible value: 'compact_20260112'
|
|
92
|
+
*/
|
|
93
|
+
type: 'compact_20260112';
|
|
94
|
+
}>;
|
|
95
|
+
} | null;
|
|
43
96
|
}
|
|
44
97
|
|
|
45
98
|
type AnthropicMessagesModelId = 'claude-3-5-haiku-20241022' | 'claude-3-5-haiku-latest' | 'claude-3-7-sonnet-20250219' | 'claude-3-7-sonnet-latest' | 'claude-3-haiku-20240307' | 'claude-haiku-4-5-20251001' | 'claude-haiku-4-5' | 'claude-opus-4-0' | 'claude-opus-4-1-20250805' | 'claude-opus-4-1' | 'claude-opus-4-20250514' | 'claude-opus-4-5' | 'claude-opus-4-5-20251101' | 'claude-sonnet-4-0' | 'claude-sonnet-4-20250514' | 'claude-sonnet-4-5-20250929' | 'claude-sonnet-4-5' | 'claude-opus-4-6' | (string & {});
|
|
@@ -78,6 +131,27 @@ declare const anthropicProviderOptions: z.ZodObject<{
|
|
|
78
131
|
max: "max";
|
|
79
132
|
}>>;
|
|
80
133
|
speed: z.ZodOptional<z.ZodLiteral<"fast">>;
|
|
134
|
+
contextManagement: z.ZodOptional<z.ZodObject<{
|
|
135
|
+
edits: z.ZodArray<z.ZodDiscriminatedUnion<[z.ZodObject<{
|
|
136
|
+
type: z.ZodLiteral<"clear_01">;
|
|
137
|
+
trigger: z.ZodOptional<z.ZodObject<{
|
|
138
|
+
type: z.ZodLiteral<"input_tokens">;
|
|
139
|
+
value: z.ZodNumber;
|
|
140
|
+
}, z.core.$strip>>;
|
|
141
|
+
keep: z.ZodOptional<z.ZodUnion<readonly [z.ZodLiteral<"all">, z.ZodObject<{
|
|
142
|
+
type: z.ZodLiteral<"thinking_turns">;
|
|
143
|
+
value: z.ZodNumber;
|
|
144
|
+
}, z.core.$strip>]>>;
|
|
145
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
146
|
+
type: z.ZodLiteral<"compact_20260112">;
|
|
147
|
+
trigger: z.ZodOptional<z.ZodObject<{
|
|
148
|
+
type: z.ZodLiteral<"input_tokens">;
|
|
149
|
+
value: z.ZodNumber;
|
|
150
|
+
}, z.core.$strip>>;
|
|
151
|
+
pauseAfterCompaction: z.ZodOptional<z.ZodBoolean>;
|
|
152
|
+
instructions: z.ZodOptional<z.ZodString>;
|
|
153
|
+
}, z.core.$strip>]>>;
|
|
154
|
+
}, z.core.$strip>>;
|
|
81
155
|
}, z.core.$strip>;
|
|
82
156
|
type AnthropicProviderOptions = z.infer<typeof anthropicProviderOptions>;
|
|
83
157
|
|
|
@@ -572,4 +646,4 @@ declare const anthropic: AnthropicProvider;
|
|
|
572
646
|
|
|
573
647
|
declare const VERSION: string;
|
|
574
648
|
|
|
575
|
-
export { type AnthropicMessageMetadata, type AnthropicProvider, type AnthropicProviderOptions, type AnthropicProviderSettings, VERSION, anthropic, createAnthropic };
|
|
649
|
+
export { type AnthropicMessageMetadata, type AnthropicProvider, type AnthropicProviderOptions, type AnthropicProviderSettings, type AnthropicUsageIteration, VERSION, anthropic, createAnthropic };
|
package/dist/index.d.ts
CHANGED
|
@@ -3,10 +3,34 @@ import { z } from 'zod/v4';
|
|
|
3
3
|
import * as _ai_sdk_provider_utils from '@ai-sdk/provider-utils';
|
|
4
4
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
5
5
|
|
|
6
|
+
/**
|
|
7
|
+
* Represents a single iteration in the usage breakdown.
|
|
8
|
+
* When compaction occurs, the API returns an iterations array showing
|
|
9
|
+
* usage for each sampling iteration (compaction + message).
|
|
10
|
+
*/
|
|
11
|
+
interface AnthropicUsageIteration {
|
|
12
|
+
type: 'compaction' | 'message';
|
|
13
|
+
/**
|
|
14
|
+
* Number of input tokens consumed in this iteration.
|
|
15
|
+
*/
|
|
16
|
+
inputTokens: number;
|
|
17
|
+
/**
|
|
18
|
+
* Number of output tokens generated in this iteration.
|
|
19
|
+
*/
|
|
20
|
+
outputTokens: number;
|
|
21
|
+
}
|
|
6
22
|
interface AnthropicMessageMetadata {
|
|
7
23
|
usage: JSONObject;
|
|
8
24
|
cacheCreationInputTokens: number | null;
|
|
9
25
|
stopSequence: string | null;
|
|
26
|
+
/**
|
|
27
|
+
* Usage breakdown by iteration when compaction is triggered.
|
|
28
|
+
*
|
|
29
|
+
* When compaction occurs, this array contains usage for each sampling iteration.
|
|
30
|
+
* The first iteration is typically the compaction step, followed by the main
|
|
31
|
+
* message iteration.
|
|
32
|
+
*/
|
|
33
|
+
iterations: AnthropicUsageIteration[] | null;
|
|
10
34
|
/**
|
|
11
35
|
* Information about the container used in this request.
|
|
12
36
|
*
|
|
@@ -40,6 +64,35 @@ interface AnthropicMessageMetadata {
|
|
|
40
64
|
version: string;
|
|
41
65
|
}> | null;
|
|
42
66
|
} | null;
|
|
67
|
+
/**
|
|
68
|
+
* Information about context management operations applied to this request.
|
|
69
|
+
*/
|
|
70
|
+
contextManagement: {
|
|
71
|
+
/**
|
|
72
|
+
* List of context management edits that were applied.
|
|
73
|
+
*/
|
|
74
|
+
appliedEdits: Array<{
|
|
75
|
+
/**
|
|
76
|
+
* The type of context management edit applied.
|
|
77
|
+
* Possible value: 'clear_01'
|
|
78
|
+
*/
|
|
79
|
+
type: 'clear_01';
|
|
80
|
+
/**
|
|
81
|
+
* The number of input tokens that were cleared.
|
|
82
|
+
*/
|
|
83
|
+
clearedInputTokens: number;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Represents a compaction edit where the conversation context was summarized.
|
|
87
|
+
*/
|
|
88
|
+
| {
|
|
89
|
+
/**
|
|
90
|
+
* The type of context management edit applied.
|
|
91
|
+
* Possible value: 'compact_20260112'
|
|
92
|
+
*/
|
|
93
|
+
type: 'compact_20260112';
|
|
94
|
+
}>;
|
|
95
|
+
} | null;
|
|
43
96
|
}
|
|
44
97
|
|
|
45
98
|
type AnthropicMessagesModelId = 'claude-3-5-haiku-20241022' | 'claude-3-5-haiku-latest' | 'claude-3-7-sonnet-20250219' | 'claude-3-7-sonnet-latest' | 'claude-3-haiku-20240307' | 'claude-haiku-4-5-20251001' | 'claude-haiku-4-5' | 'claude-opus-4-0' | 'claude-opus-4-1-20250805' | 'claude-opus-4-1' | 'claude-opus-4-20250514' | 'claude-opus-4-5' | 'claude-opus-4-5-20251101' | 'claude-sonnet-4-0' | 'claude-sonnet-4-20250514' | 'claude-sonnet-4-5-20250929' | 'claude-sonnet-4-5' | 'claude-opus-4-6' | (string & {});
|
|
@@ -78,6 +131,27 @@ declare const anthropicProviderOptions: z.ZodObject<{
|
|
|
78
131
|
max: "max";
|
|
79
132
|
}>>;
|
|
80
133
|
speed: z.ZodOptional<z.ZodLiteral<"fast">>;
|
|
134
|
+
contextManagement: z.ZodOptional<z.ZodObject<{
|
|
135
|
+
edits: z.ZodArray<z.ZodDiscriminatedUnion<[z.ZodObject<{
|
|
136
|
+
type: z.ZodLiteral<"clear_01">;
|
|
137
|
+
trigger: z.ZodOptional<z.ZodObject<{
|
|
138
|
+
type: z.ZodLiteral<"input_tokens">;
|
|
139
|
+
value: z.ZodNumber;
|
|
140
|
+
}, z.core.$strip>>;
|
|
141
|
+
keep: z.ZodOptional<z.ZodUnion<readonly [z.ZodLiteral<"all">, z.ZodObject<{
|
|
142
|
+
type: z.ZodLiteral<"thinking_turns">;
|
|
143
|
+
value: z.ZodNumber;
|
|
144
|
+
}, z.core.$strip>]>>;
|
|
145
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
146
|
+
type: z.ZodLiteral<"compact_20260112">;
|
|
147
|
+
trigger: z.ZodOptional<z.ZodObject<{
|
|
148
|
+
type: z.ZodLiteral<"input_tokens">;
|
|
149
|
+
value: z.ZodNumber;
|
|
150
|
+
}, z.core.$strip>>;
|
|
151
|
+
pauseAfterCompaction: z.ZodOptional<z.ZodBoolean>;
|
|
152
|
+
instructions: z.ZodOptional<z.ZodString>;
|
|
153
|
+
}, z.core.$strip>]>>;
|
|
154
|
+
}, z.core.$strip>>;
|
|
81
155
|
}, z.core.$strip>;
|
|
82
156
|
type AnthropicProviderOptions = z.infer<typeof anthropicProviderOptions>;
|
|
83
157
|
|
|
@@ -572,4 +646,4 @@ declare const anthropic: AnthropicProvider;
|
|
|
572
646
|
|
|
573
647
|
declare const VERSION: string;
|
|
574
648
|
|
|
575
|
-
export { type AnthropicMessageMetadata, type AnthropicProvider, type AnthropicProviderOptions, type AnthropicProviderSettings, VERSION, anthropic, createAnthropic };
|
|
649
|
+
export { type AnthropicMessageMetadata, type AnthropicProvider, type AnthropicProviderOptions, type AnthropicProviderSettings, type AnthropicUsageIteration, VERSION, anthropic, createAnthropic };
|
package/dist/index.js
CHANGED
|
@@ -31,7 +31,7 @@ var import_provider4 = require("@ai-sdk/provider");
|
|
|
31
31
|
var import_provider_utils20 = require("@ai-sdk/provider-utils");
|
|
32
32
|
|
|
33
33
|
// src/version.ts
|
|
34
|
-
var VERSION = true ? "2.0.
|
|
34
|
+
var VERSION = true ? "2.0.61" : "0.0.0-test";
|
|
35
35
|
|
|
36
36
|
// src/anthropic-messages-language-model.ts
|
|
37
37
|
var import_provider3 = require("@ai-sdk/provider");
|
|
@@ -107,6 +107,10 @@ var anthropicMessagesResponseSchema = (0, import_provider_utils2.lazySchema)(
|
|
|
107
107
|
type: import_v42.z.literal("redacted_thinking"),
|
|
108
108
|
data: import_v42.z.string()
|
|
109
109
|
}),
|
|
110
|
+
import_v42.z.object({
|
|
111
|
+
type: import_v42.z.literal("compaction"),
|
|
112
|
+
content: import_v42.z.string()
|
|
113
|
+
}),
|
|
110
114
|
import_v42.z.object({
|
|
111
115
|
type: import_v42.z.literal("tool_use"),
|
|
112
116
|
id: import_v42.z.string(),
|
|
@@ -251,8 +255,28 @@ var anthropicMessagesResponseSchema = (0, import_provider_utils2.lazySchema)(
|
|
|
251
255
|
input_tokens: import_v42.z.number(),
|
|
252
256
|
output_tokens: import_v42.z.number(),
|
|
253
257
|
cache_creation_input_tokens: import_v42.z.number().nullish(),
|
|
254
|
-
cache_read_input_tokens: import_v42.z.number().nullish()
|
|
258
|
+
cache_read_input_tokens: import_v42.z.number().nullish(),
|
|
259
|
+
iterations: import_v42.z.array(
|
|
260
|
+
import_v42.z.object({
|
|
261
|
+
type: import_v42.z.union([import_v42.z.literal("compaction"), import_v42.z.literal("message")]),
|
|
262
|
+
input_tokens: import_v42.z.number(),
|
|
263
|
+
output_tokens: import_v42.z.number()
|
|
264
|
+
})
|
|
265
|
+
).nullish()
|
|
255
266
|
}),
|
|
267
|
+
context_management: import_v42.z.object({
|
|
268
|
+
applied_edits: import_v42.z.array(
|
|
269
|
+
import_v42.z.union([
|
|
270
|
+
import_v42.z.object({
|
|
271
|
+
type: import_v42.z.literal("clear_01"),
|
|
272
|
+
cleared_input_tokens: import_v42.z.number()
|
|
273
|
+
}),
|
|
274
|
+
import_v42.z.object({
|
|
275
|
+
type: import_v42.z.literal("compact_20260112")
|
|
276
|
+
})
|
|
277
|
+
])
|
|
278
|
+
)
|
|
279
|
+
}).nullish(),
|
|
256
280
|
container: import_v42.z.object({
|
|
257
281
|
expires_at: import_v42.z.string(),
|
|
258
282
|
id: import_v42.z.string(),
|
|
@@ -303,6 +327,10 @@ var anthropicMessagesChunkSchema = (0, import_provider_utils2.lazySchema)(
|
|
|
303
327
|
type: import_v42.z.literal("redacted_thinking"),
|
|
304
328
|
data: import_v42.z.string()
|
|
305
329
|
}),
|
|
330
|
+
import_v42.z.object({
|
|
331
|
+
type: import_v42.z.literal("compaction"),
|
|
332
|
+
content: import_v42.z.string().nullish()
|
|
333
|
+
}),
|
|
306
334
|
import_v42.z.object({
|
|
307
335
|
type: import_v42.z.literal("server_tool_use"),
|
|
308
336
|
id: import_v42.z.string(),
|
|
@@ -455,6 +483,10 @@ var anthropicMessagesChunkSchema = (0, import_provider_utils2.lazySchema)(
|
|
|
455
483
|
type: import_v42.z.literal("signature_delta"),
|
|
456
484
|
signature: import_v42.z.string()
|
|
457
485
|
}),
|
|
486
|
+
import_v42.z.object({
|
|
487
|
+
type: import_v42.z.literal("compaction_delta"),
|
|
488
|
+
content: import_v42.z.string()
|
|
489
|
+
}),
|
|
458
490
|
import_v42.z.object({
|
|
459
491
|
type: import_v42.z.literal("citations_delta"),
|
|
460
492
|
citation: import_v42.z.discriminatedUnion("type", [
|
|
@@ -520,8 +552,28 @@ var anthropicMessagesChunkSchema = (0, import_provider_utils2.lazySchema)(
|
|
|
520
552
|
input_tokens: import_v42.z.number().nullish(),
|
|
521
553
|
output_tokens: import_v42.z.number(),
|
|
522
554
|
cache_creation_input_tokens: import_v42.z.number().nullish(),
|
|
523
|
-
cache_read_input_tokens: import_v42.z.number().nullish()
|
|
524
|
-
|
|
555
|
+
cache_read_input_tokens: import_v42.z.number().nullish(),
|
|
556
|
+
iterations: import_v42.z.array(
|
|
557
|
+
import_v42.z.object({
|
|
558
|
+
type: import_v42.z.union([import_v42.z.literal("compaction"), import_v42.z.literal("message")]),
|
|
559
|
+
input_tokens: import_v42.z.number(),
|
|
560
|
+
output_tokens: import_v42.z.number()
|
|
561
|
+
})
|
|
562
|
+
).nullish()
|
|
563
|
+
}),
|
|
564
|
+
context_management: import_v42.z.object({
|
|
565
|
+
applied_edits: import_v42.z.array(
|
|
566
|
+
import_v42.z.union([
|
|
567
|
+
import_v42.z.object({
|
|
568
|
+
type: import_v42.z.literal("clear_01"),
|
|
569
|
+
cleared_input_tokens: import_v42.z.number()
|
|
570
|
+
}),
|
|
571
|
+
import_v42.z.object({
|
|
572
|
+
type: import_v42.z.literal("compact_20260112")
|
|
573
|
+
})
|
|
574
|
+
])
|
|
575
|
+
)
|
|
576
|
+
}).nullish()
|
|
525
577
|
}),
|
|
526
578
|
import_v42.z.object({
|
|
527
579
|
type: import_v42.z.literal("message_stop")
|
|
@@ -632,7 +684,40 @@ var anthropicProviderOptions = import_v43.z.object({
|
|
|
632
684
|
* Enable fast mode for faster inference (2.5x faster output token speeds).
|
|
633
685
|
* Only supported with claude-opus-4-6.
|
|
634
686
|
*/
|
|
635
|
-
speed: import_v43.z.literal("fast").optional()
|
|
687
|
+
speed: import_v43.z.literal("fast").optional(),
|
|
688
|
+
/**
|
|
689
|
+
* Context management configuration for automatic context window management.
|
|
690
|
+
* Enables features like automatic compaction and clearing of tool uses/thinking blocks.
|
|
691
|
+
*/
|
|
692
|
+
contextManagement: import_v43.z.object({
|
|
693
|
+
edits: import_v43.z.array(
|
|
694
|
+
import_v43.z.discriminatedUnion("type", [
|
|
695
|
+
import_v43.z.object({
|
|
696
|
+
type: import_v43.z.literal("clear_01"),
|
|
697
|
+
trigger: import_v43.z.object({
|
|
698
|
+
type: import_v43.z.literal("input_tokens"),
|
|
699
|
+
value: import_v43.z.number()
|
|
700
|
+
}).optional(),
|
|
701
|
+
keep: import_v43.z.union([
|
|
702
|
+
import_v43.z.literal("all"),
|
|
703
|
+
import_v43.z.object({
|
|
704
|
+
type: import_v43.z.literal("thinking_turns"),
|
|
705
|
+
value: import_v43.z.number()
|
|
706
|
+
})
|
|
707
|
+
]).optional()
|
|
708
|
+
}),
|
|
709
|
+
import_v43.z.object({
|
|
710
|
+
type: import_v43.z.literal("compact_20260112"),
|
|
711
|
+
trigger: import_v43.z.object({
|
|
712
|
+
type: import_v43.z.literal("input_tokens"),
|
|
713
|
+
value: import_v43.z.number()
|
|
714
|
+
}).optional(),
|
|
715
|
+
pauseAfterCompaction: import_v43.z.boolean().optional(),
|
|
716
|
+
instructions: import_v43.z.string().optional()
|
|
717
|
+
})
|
|
718
|
+
])
|
|
719
|
+
)
|
|
720
|
+
}).optional()
|
|
636
721
|
});
|
|
637
722
|
|
|
638
723
|
// src/anthropic-prepare-tools.ts
|
|
@@ -1204,7 +1289,7 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
1204
1289
|
warnings,
|
|
1205
1290
|
cacheControlValidator
|
|
1206
1291
|
}) {
|
|
1207
|
-
var _a, _b, _c, _d, _e;
|
|
1292
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1208
1293
|
const betas = /* @__PURE__ */ new Set();
|
|
1209
1294
|
const blocks = groupIntoBlocks(prompt);
|
|
1210
1295
|
const validator = cacheControlValidator || new CacheControlValidator();
|
|
@@ -1448,16 +1533,25 @@ async function convertToAnthropicMessagesPrompt({
|
|
|
1448
1533
|
}) : void 0;
|
|
1449
1534
|
switch (part.type) {
|
|
1450
1535
|
case "text": {
|
|
1451
|
-
|
|
1452
|
-
|
|
1453
|
-
|
|
1454
|
-
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
|
|
1458
|
-
|
|
1459
|
-
|
|
1460
|
-
|
|
1536
|
+
const textMetadata = (_f = part.providerOptions) == null ? void 0 : _f.anthropic;
|
|
1537
|
+
if ((textMetadata == null ? void 0 : textMetadata.type) === "compaction") {
|
|
1538
|
+
anthropicContent.push({
|
|
1539
|
+
type: "compaction",
|
|
1540
|
+
content: part.text,
|
|
1541
|
+
cache_control: cacheControl
|
|
1542
|
+
});
|
|
1543
|
+
} else {
|
|
1544
|
+
anthropicContent.push({
|
|
1545
|
+
type: "text",
|
|
1546
|
+
text: (
|
|
1547
|
+
// trim the last text part if it's the last message in the block
|
|
1548
|
+
// because Anthropic does not allow trailing whitespace
|
|
1549
|
+
// in pre-filled assistant responses
|
|
1550
|
+
isLastBlock && isLastMessage && isLastContentPart ? part.text.trim() : part.text
|
|
1551
|
+
),
|
|
1552
|
+
cache_control: cacheControl
|
|
1553
|
+
});
|
|
1554
|
+
}
|
|
1461
1555
|
break;
|
|
1462
1556
|
}
|
|
1463
1557
|
case "reasoning": {
|
|
@@ -1746,6 +1840,8 @@ function mapAnthropicStopReason({
|
|
|
1746
1840
|
return isJsonResponseFromTool ? "stop" : "tool-calls";
|
|
1747
1841
|
case "max_tokens":
|
|
1748
1842
|
return "length";
|
|
1843
|
+
case "compaction":
|
|
1844
|
+
return "other";
|
|
1749
1845
|
default:
|
|
1750
1846
|
return "unknown";
|
|
1751
1847
|
}
|
|
@@ -1934,6 +2030,48 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1934
2030
|
}))
|
|
1935
2031
|
}
|
|
1936
2032
|
},
|
|
2033
|
+
// context management:
|
|
2034
|
+
...(anthropicOptions == null ? void 0 : anthropicOptions.contextManagement) && {
|
|
2035
|
+
context_management: {
|
|
2036
|
+
edits: anthropicOptions.contextManagement.edits.map((edit) => {
|
|
2037
|
+
const convertTrigger = (trigger) => trigger ? {
|
|
2038
|
+
type: trigger.type,
|
|
2039
|
+
value: trigger.value
|
|
2040
|
+
} : void 0;
|
|
2041
|
+
switch (edit.type) {
|
|
2042
|
+
case "clear_01":
|
|
2043
|
+
return {
|
|
2044
|
+
type: edit.type,
|
|
2045
|
+
...edit.trigger !== void 0 && {
|
|
2046
|
+
trigger: convertTrigger(edit.trigger)
|
|
2047
|
+
},
|
|
2048
|
+
...edit.keep !== void 0 && {
|
|
2049
|
+
keep: edit.keep
|
|
2050
|
+
}
|
|
2051
|
+
};
|
|
2052
|
+
case "compact_20260112":
|
|
2053
|
+
return {
|
|
2054
|
+
type: edit.type,
|
|
2055
|
+
...edit.trigger !== void 0 && {
|
|
2056
|
+
trigger: convertTrigger(edit.trigger)
|
|
2057
|
+
},
|
|
2058
|
+
...edit.pauseAfterCompaction !== void 0 && {
|
|
2059
|
+
pause_after_compaction: edit.pauseAfterCompaction
|
|
2060
|
+
},
|
|
2061
|
+
...edit.instructions !== void 0 && {
|
|
2062
|
+
instructions: edit.instructions
|
|
2063
|
+
}
|
|
2064
|
+
};
|
|
2065
|
+
default:
|
|
2066
|
+
warnings.push({
|
|
2067
|
+
type: "other",
|
|
2068
|
+
message: `Unknown context management edit type: ${edit.type}`
|
|
2069
|
+
});
|
|
2070
|
+
return edit;
|
|
2071
|
+
}
|
|
2072
|
+
})
|
|
2073
|
+
}
|
|
2074
|
+
},
|
|
1937
2075
|
// prompt:
|
|
1938
2076
|
system: messagesPrompt.system,
|
|
1939
2077
|
messages: messagesPrompt.messages
|
|
@@ -2002,6 +2140,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2002
2140
|
if (useStructuredOutput) {
|
|
2003
2141
|
betas.add("structured-outputs-2025-11-13");
|
|
2004
2142
|
}
|
|
2143
|
+
const contextManagement = anthropicOptions == null ? void 0 : anthropicOptions.contextManagement;
|
|
2144
|
+
if (contextManagement) {
|
|
2145
|
+
betas.add("context-management-2025-06-27");
|
|
2146
|
+
if (contextManagement.edits.some((e) => e.type === "compact_20260112")) {
|
|
2147
|
+
betas.add("compact-2026-01-12");
|
|
2148
|
+
}
|
|
2149
|
+
}
|
|
2005
2150
|
const {
|
|
2006
2151
|
tools: anthropicTools2,
|
|
2007
2152
|
toolChoice: anthropicToolChoice,
|
|
@@ -2152,6 +2297,18 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2152
2297
|
});
|
|
2153
2298
|
break;
|
|
2154
2299
|
}
|
|
2300
|
+
case "compaction": {
|
|
2301
|
+
content.push({
|
|
2302
|
+
type: "text",
|
|
2303
|
+
text: part.content,
|
|
2304
|
+
providerMetadata: {
|
|
2305
|
+
anthropic: {
|
|
2306
|
+
type: "compaction"
|
|
2307
|
+
}
|
|
2308
|
+
}
|
|
2309
|
+
});
|
|
2310
|
+
break;
|
|
2311
|
+
}
|
|
2155
2312
|
case "tool_use": {
|
|
2156
2313
|
content.push(
|
|
2157
2314
|
// when a json response tool is used, the tool call becomes the text:
|
|
@@ -2316,6 +2473,22 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2316
2473
|
}
|
|
2317
2474
|
}
|
|
2318
2475
|
}
|
|
2476
|
+
let inputTokens;
|
|
2477
|
+
let outputTokens;
|
|
2478
|
+
if (response.usage.iterations && response.usage.iterations.length > 0) {
|
|
2479
|
+
const totals = response.usage.iterations.reduce(
|
|
2480
|
+
(acc, iter) => ({
|
|
2481
|
+
input: acc.input + iter.input_tokens,
|
|
2482
|
+
output: acc.output + iter.output_tokens
|
|
2483
|
+
}),
|
|
2484
|
+
{ input: 0, output: 0 }
|
|
2485
|
+
);
|
|
2486
|
+
inputTokens = totals.input;
|
|
2487
|
+
outputTokens = totals.output;
|
|
2488
|
+
} else {
|
|
2489
|
+
inputTokens = response.usage.input_tokens;
|
|
2490
|
+
outputTokens = response.usage.output_tokens;
|
|
2491
|
+
}
|
|
2319
2492
|
return {
|
|
2320
2493
|
content,
|
|
2321
2494
|
finishReason: mapAnthropicStopReason({
|
|
@@ -2323,9 +2496,9 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2323
2496
|
isJsonResponseFromTool: usesJsonResponseTool
|
|
2324
2497
|
}),
|
|
2325
2498
|
usage: {
|
|
2326
|
-
inputTokens
|
|
2327
|
-
outputTokens
|
|
2328
|
-
totalTokens:
|
|
2499
|
+
inputTokens,
|
|
2500
|
+
outputTokens,
|
|
2501
|
+
totalTokens: inputTokens + outputTokens,
|
|
2329
2502
|
cachedInputTokens: (_b = response.usage.cache_read_input_tokens) != null ? _b : void 0
|
|
2330
2503
|
},
|
|
2331
2504
|
request: { body: args },
|
|
@@ -2341,6 +2514,11 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2341
2514
|
usage: response.usage,
|
|
2342
2515
|
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null,
|
|
2343
2516
|
stopSequence: (_f = response.stop_sequence) != null ? _f : null,
|
|
2517
|
+
iterations: response.usage.iterations ? response.usage.iterations.map((iter) => ({
|
|
2518
|
+
type: iter.type,
|
|
2519
|
+
inputTokens: iter.input_tokens,
|
|
2520
|
+
outputTokens: iter.output_tokens
|
|
2521
|
+
})) : null,
|
|
2344
2522
|
container: response.container ? {
|
|
2345
2523
|
expiresAt: response.container.expires_at,
|
|
2346
2524
|
id: response.container.id,
|
|
@@ -2349,6 +2527,23 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2349
2527
|
skillId: skill.skill_id,
|
|
2350
2528
|
version: skill.version
|
|
2351
2529
|
}))) != null ? _h : null
|
|
2530
|
+
} : null,
|
|
2531
|
+
contextManagement: response.context_management ? {
|
|
2532
|
+
appliedEdits: response.context_management.applied_edits.map(
|
|
2533
|
+
(edit) => {
|
|
2534
|
+
switch (edit.type) {
|
|
2535
|
+
case "clear_01":
|
|
2536
|
+
return {
|
|
2537
|
+
type: edit.type,
|
|
2538
|
+
clearedInputTokens: edit.cleared_input_tokens
|
|
2539
|
+
};
|
|
2540
|
+
case "compact_20260112":
|
|
2541
|
+
return {
|
|
2542
|
+
type: edit.type
|
|
2543
|
+
};
|
|
2544
|
+
}
|
|
2545
|
+
}
|
|
2546
|
+
)
|
|
2352
2547
|
} : null
|
|
2353
2548
|
}
|
|
2354
2549
|
}
|
|
@@ -2385,6 +2580,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2385
2580
|
let cacheCreationInputTokens = null;
|
|
2386
2581
|
let stopSequence = null;
|
|
2387
2582
|
let container = null;
|
|
2583
|
+
let iterations = null;
|
|
2584
|
+
let contextManagement = null;
|
|
2388
2585
|
let blockType = void 0;
|
|
2389
2586
|
const generateId3 = this.generateId;
|
|
2390
2587
|
const transformedStream = response.pipeThrough(
|
|
@@ -2439,6 +2636,19 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2439
2636
|
});
|
|
2440
2637
|
return;
|
|
2441
2638
|
}
|
|
2639
|
+
case "compaction": {
|
|
2640
|
+
contentBlocks[value.index] = { type: "text" };
|
|
2641
|
+
controller.enqueue({
|
|
2642
|
+
type: "text-start",
|
|
2643
|
+
id: String(value.index),
|
|
2644
|
+
providerMetadata: {
|
|
2645
|
+
anthropic: {
|
|
2646
|
+
type: "compaction"
|
|
2647
|
+
}
|
|
2648
|
+
}
|
|
2649
|
+
});
|
|
2650
|
+
return;
|
|
2651
|
+
}
|
|
2442
2652
|
case "tool_use": {
|
|
2443
2653
|
contentBlocks[value.index] = usesJsonResponseTool ? { type: "text" } : {
|
|
2444
2654
|
type: "tool-call",
|
|
@@ -2701,6 +2911,14 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2701
2911
|
}
|
|
2702
2912
|
return;
|
|
2703
2913
|
}
|
|
2914
|
+
case "compaction_delta": {
|
|
2915
|
+
controller.enqueue({
|
|
2916
|
+
type: "text-delta",
|
|
2917
|
+
id: String(value.index),
|
|
2918
|
+
delta: value.delta.content
|
|
2919
|
+
});
|
|
2920
|
+
return;
|
|
2921
|
+
}
|
|
2704
2922
|
case "input_json_delta": {
|
|
2705
2923
|
const contentBlock = contentBlocks[value.index];
|
|
2706
2924
|
let delta = value.delta.partial_json;
|
|
@@ -2768,16 +2986,31 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2768
2986
|
return;
|
|
2769
2987
|
}
|
|
2770
2988
|
case "message_delta": {
|
|
2771
|
-
if (value.usage.input_tokens != null && usage.inputTokens !== value.usage.input_tokens) {
|
|
2772
|
-
usage.inputTokens = value.usage.input_tokens;
|
|
2773
|
-
}
|
|
2774
|
-
usage.outputTokens = value.usage.output_tokens;
|
|
2775
2989
|
if (value.usage.cache_read_input_tokens != null) {
|
|
2776
2990
|
usage.cachedInputTokens = value.usage.cache_read_input_tokens;
|
|
2777
2991
|
}
|
|
2778
2992
|
if (value.usage.cache_creation_input_tokens != null) {
|
|
2779
2993
|
cacheCreationInputTokens = value.usage.cache_creation_input_tokens;
|
|
2780
2994
|
}
|
|
2995
|
+
if (value.usage.iterations != null) {
|
|
2996
|
+
iterations = value.usage.iterations;
|
|
2997
|
+
}
|
|
2998
|
+
if (value.usage.iterations && value.usage.iterations.length > 0) {
|
|
2999
|
+
const totals = value.usage.iterations.reduce(
|
|
3000
|
+
(acc, iter) => ({
|
|
3001
|
+
input: acc.input + iter.input_tokens,
|
|
3002
|
+
output: acc.output + iter.output_tokens
|
|
3003
|
+
}),
|
|
3004
|
+
{ input: 0, output: 0 }
|
|
3005
|
+
);
|
|
3006
|
+
usage.inputTokens = totals.input;
|
|
3007
|
+
usage.outputTokens = totals.output;
|
|
3008
|
+
} else {
|
|
3009
|
+
if (value.usage.input_tokens != null && usage.inputTokens !== value.usage.input_tokens) {
|
|
3010
|
+
usage.inputTokens = value.usage.input_tokens;
|
|
3011
|
+
}
|
|
3012
|
+
usage.outputTokens = value.usage.output_tokens;
|
|
3013
|
+
}
|
|
2781
3014
|
usage.totalTokens = ((_f = usage.inputTokens) != null ? _f : 0) + ((_g = usage.outputTokens) != null ? _g : 0);
|
|
2782
3015
|
finishReason = mapAnthropicStopReason({
|
|
2783
3016
|
finishReason: value.delta.stop_reason,
|
|
@@ -2793,6 +3026,25 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2793
3026
|
version: skill.version
|
|
2794
3027
|
}))) != null ? _j : null
|
|
2795
3028
|
} : null;
|
|
3029
|
+
if (value.context_management != null) {
|
|
3030
|
+
contextManagement = {
|
|
3031
|
+
appliedEdits: value.context_management.applied_edits.map(
|
|
3032
|
+
(edit) => {
|
|
3033
|
+
switch (edit.type) {
|
|
3034
|
+
case "clear_01":
|
|
3035
|
+
return {
|
|
3036
|
+
type: edit.type,
|
|
3037
|
+
clearedInputTokens: edit.cleared_input_tokens
|
|
3038
|
+
};
|
|
3039
|
+
case "compact_20260112":
|
|
3040
|
+
return {
|
|
3041
|
+
type: edit.type
|
|
3042
|
+
};
|
|
3043
|
+
}
|
|
3044
|
+
}
|
|
3045
|
+
)
|
|
3046
|
+
};
|
|
3047
|
+
}
|
|
2796
3048
|
rawUsage = {
|
|
2797
3049
|
...rawUsage,
|
|
2798
3050
|
...value.usage
|
|
@@ -2809,7 +3061,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2809
3061
|
usage: rawUsage != null ? rawUsage : null,
|
|
2810
3062
|
cacheCreationInputTokens,
|
|
2811
3063
|
stopSequence,
|
|
2812
|
-
|
|
3064
|
+
iterations: iterations ? iterations.map((iter) => ({
|
|
3065
|
+
type: iter.type,
|
|
3066
|
+
inputTokens: iter.input_tokens,
|
|
3067
|
+
outputTokens: iter.output_tokens
|
|
3068
|
+
})) : null,
|
|
3069
|
+
container,
|
|
3070
|
+
contextManagement
|
|
2813
3071
|
}
|
|
2814
3072
|
}
|
|
2815
3073
|
});
|