noumen 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +95 -16
- package/dist/a2a/index.d.ts +5 -5
- package/dist/a2a/index.js +3 -3
- package/dist/a2a/index.js.map +1 -1
- package/dist/acp/index.d.ts +5 -5
- package/dist/acp/index.js +4 -4
- package/dist/acp/index.js.map +1 -1
- package/dist/{agent-BrkbZyOT.d.ts → agent-1nFVUP9E.d.ts} +319 -15
- package/dist/{cache-DVqaCX8v.d.ts → cache-DsRqxx6v.d.ts} +1 -1
- package/dist/{chunk-BGG2E6JD.js → chunk-3HEYCV26.js} +1 -1
- package/dist/chunk-3SK5GCI6.js +75 -0
- package/dist/chunk-3SK5GCI6.js.map +1 -0
- package/dist/{chunk-NBDFQYUZ.js → chunk-4HW6LN6D.js} +4784 -2411
- package/dist/chunk-4HW6LN6D.js.map +1 -0
- package/dist/{chunk-7ZMN7XJE.js → chunk-5JN4SPI7.js} +6 -6
- package/dist/chunk-5JN4SPI7.js.map +1 -0
- package/dist/{chunk-CPFHEPW4.js → chunk-CS6WNDCF.js} +73 -41
- package/dist/chunk-CS6WNDCF.js.map +1 -0
- package/dist/chunk-EKOGVTBT.js +472 -0
- package/dist/chunk-EKOGVTBT.js.map +1 -0
- package/dist/{chunk-KY6ZPWHO.js → chunk-HEQQQGK5.js} +47 -28
- package/dist/chunk-HEQQQGK5.js.map +1 -0
- package/dist/{chunk-QTJ7VTJY.js → chunk-HL6JCRZJ.js} +1599 -481
- package/dist/chunk-HL6JCRZJ.js.map +1 -0
- package/dist/chunk-L3L3FG5T.js +16 -0
- package/dist/chunk-L3L3FG5T.js.map +1 -0
- package/dist/cli/index.js +36 -30
- package/dist/cli/index.js.map +1 -1
- package/dist/client/index.d.ts +2 -2
- package/dist/{headless-Q7XHHZIW.js → headless-FFU2DESQ.js} +3 -4
- package/dist/headless-FFU2DESQ.js.map +1 -0
- package/dist/index.d.ts +218 -68
- package/dist/index.js +37 -23
- package/dist/lsp/index.d.ts +4 -4
- package/dist/mcp/index.d.ts +5 -5
- package/dist/mcp/index.js +2 -1
- package/dist/mcp/index.js.map +1 -1
- package/dist/{provider-factory-34MSWJZ3.js → provider-factory-KCLIF34X.js} +2 -2
- package/dist/providers/anthropic.d.ts +2 -2
- package/dist/providers/anthropic.js +5 -3
- package/dist/providers/anthropic.js.map +1 -1
- package/dist/providers/bedrock.d.ts +2 -2
- package/dist/providers/bedrock.js +5 -3
- package/dist/providers/bedrock.js.map +1 -1
- package/dist/providers/gemini.d.ts +2 -1
- package/dist/providers/gemini.js +133 -95
- package/dist/providers/gemini.js.map +1 -1
- package/dist/providers/ollama.d.ts +13 -0
- package/dist/{ollama-YNXAYP3R.js → providers/ollama.js} +6 -4
- package/dist/providers/ollama.js.map +1 -0
- package/dist/providers/openai.d.ts +4 -1
- package/dist/providers/openai.js +2 -1
- package/dist/providers/openrouter.d.ts +1 -1
- package/dist/providers/openrouter.js +2 -1
- package/dist/providers/openrouter.js.map +1 -1
- package/dist/providers/vertex.d.ts +4 -2
- package/dist/providers/vertex.js +6 -3
- package/dist/providers/vertex.js.map +1 -1
- package/dist/{resolve-XM52G7YE.js → resolve-4JA2BBDA.js} +2 -2
- package/dist/server/index.d.ts +35 -20
- package/dist/server/index.js +276 -207
- package/dist/server/index.js.map +1 -1
- package/dist/{server-Cg1yWGaV.d.ts → server-CHMxuWKq.d.ts} +1 -1
- package/dist/{types-DwdzmXfs.d.ts → types-CD0rUKKT.d.ts} +2 -0
- package/dist/{types-3c88cRKH.d.ts → types-LrU4LRmX.d.ts} +28 -0
- package/dist/{types-CwKKucOF.d.ts → types-RPKUTu1k.d.ts} +27 -2
- package/dist/uuid-RVN2T26F.js +8 -0
- package/dist/uuid-RVN2T26F.js.map +1 -0
- package/dist/zod-7YXKWYMC.js +12 -0
- package/dist/zod-7YXKWYMC.js.map +1 -0
- package/package.json +19 -13
- package/dist/chunk-2ZTGQLYK.js +0 -356
- package/dist/chunk-2ZTGQLYK.js.map +0 -1
- package/dist/chunk-7ZMN7XJE.js.map +0 -1
- package/dist/chunk-CPFHEPW4.js.map +0 -1
- package/dist/chunk-KY6ZPWHO.js.map +0 -1
- package/dist/chunk-NBDFQYUZ.js.map +0 -1
- package/dist/chunk-QTJ7VTJY.js.map +0 -1
- package/dist/headless-Q7XHHZIW.js.map +0 -1
- package/dist/ollama-YNXAYP3R.js.map +0 -1
- /package/dist/{chunk-BGG2E6JD.js.map → chunk-3HEYCV26.js.map} +0 -0
- /package/dist/{provider-factory-34MSWJZ3.js.map → provider-factory-KCLIF34X.js.map} +0 -0
- /package/dist/{resolve-XM52G7YE.js.map → resolve-4JA2BBDA.js.map} +0 -0
|
@@ -0,0 +1,472 @@
|
|
|
1
|
+
import {
|
|
2
|
+
getMaxOutputTokensForModel,
|
|
3
|
+
getMessageCacheBreakpointIndex
|
|
4
|
+
} from "./chunk-HEQQQGK5.js";
|
|
5
|
+
import {
|
|
6
|
+
ChatStreamError
|
|
7
|
+
} from "./chunk-L3L3FG5T.js";
|
|
8
|
+
|
|
9
|
+
// src/providers/anthropic-shared.ts
|
|
10
|
+
function buildCacheControlBlock(config) {
|
|
11
|
+
const cc = { type: "ephemeral" };
|
|
12
|
+
if (config?.ttl) cc.ttl = config.ttl;
|
|
13
|
+
if (config?.scope) cc.scope = config.scope;
|
|
14
|
+
return cc;
|
|
15
|
+
}
|
|
16
|
+
function isCachingEnabled(config) {
|
|
17
|
+
return config?.enabled === true;
|
|
18
|
+
}
|
|
19
|
+
function contentPartsToAnthropic(parts) {
|
|
20
|
+
return parts.map((part) => {
|
|
21
|
+
if (part.type === "text") {
|
|
22
|
+
return { type: "text", text: part.text };
|
|
23
|
+
}
|
|
24
|
+
if (part.type === "image") {
|
|
25
|
+
return {
|
|
26
|
+
type: "image",
|
|
27
|
+
source: {
|
|
28
|
+
type: "base64",
|
|
29
|
+
media_type: part.media_type,
|
|
30
|
+
data: part.data
|
|
31
|
+
}
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
return {
|
|
35
|
+
type: "image",
|
|
36
|
+
source: { type: "url", url: part.url }
|
|
37
|
+
};
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
function buildAnthropicTools(params, cacheConfig) {
|
|
41
|
+
if (!params.tools) return void 0;
|
|
42
|
+
const tools = params.tools.map((t) => ({
|
|
43
|
+
name: t.function.name,
|
|
44
|
+
description: t.function.description,
|
|
45
|
+
input_schema: t.function.parameters
|
|
46
|
+
}));
|
|
47
|
+
if (isCachingEnabled(cacheConfig) && tools.length > 0) {
|
|
48
|
+
const lastTool = tools[tools.length - 1];
|
|
49
|
+
lastTool.cache_control = buildCacheControlBlock(cacheConfig);
|
|
50
|
+
}
|
|
51
|
+
return tools;
|
|
52
|
+
}
|
|
53
|
+
function buildAnthropicSystemBlocks(systemPrompt, cacheConfig) {
|
|
54
|
+
if (!systemPrompt) return void 0;
|
|
55
|
+
if (!isCachingEnabled(cacheConfig)) return systemPrompt;
|
|
56
|
+
return [
|
|
57
|
+
{
|
|
58
|
+
type: "text",
|
|
59
|
+
text: systemPrompt,
|
|
60
|
+
cache_control: buildCacheControlBlock(cacheConfig)
|
|
61
|
+
}
|
|
62
|
+
];
|
|
63
|
+
}
|
|
64
|
+
function convertAnthropicMessages(systemPrompt, messages, cacheConfig, skipCacheWrite) {
|
|
65
|
+
const result = [];
|
|
66
|
+
const caching = isCachingEnabled(cacheConfig);
|
|
67
|
+
const cacheBreakpointIdx = caching ? getMessageCacheBreakpointIndex(messages, skipCacheWrite) : -1;
|
|
68
|
+
for (let mi = 0; mi < messages.length; mi++) {
|
|
69
|
+
const msg = messages[mi];
|
|
70
|
+
const addCache = mi === cacheBreakpointIdx;
|
|
71
|
+
if (msg.role === "system") continue;
|
|
72
|
+
if (msg.role === "user") {
|
|
73
|
+
const isMultipart = Array.isArray(msg.content);
|
|
74
|
+
if (addCache && caching) {
|
|
75
|
+
const blocks = isMultipart ? contentPartsToAnthropic(msg.content) : [{ type: "text", text: msg.content }];
|
|
76
|
+
const lastBlock = blocks[blocks.length - 1];
|
|
77
|
+
lastBlock.cache_control = buildCacheControlBlock(cacheConfig);
|
|
78
|
+
result.push({ role: "user", content: blocks });
|
|
79
|
+
} else if (isMultipart) {
|
|
80
|
+
result.push({
|
|
81
|
+
role: "user",
|
|
82
|
+
content: contentPartsToAnthropic(msg.content)
|
|
83
|
+
});
|
|
84
|
+
} else {
|
|
85
|
+
result.push({ role: "user", content: msg.content });
|
|
86
|
+
}
|
|
87
|
+
} else if (msg.role === "assistant") {
|
|
88
|
+
const content = [];
|
|
89
|
+
if (msg.thinking_content) {
|
|
90
|
+
const thinkingBlock = {
|
|
91
|
+
type: "thinking",
|
|
92
|
+
thinking: msg.thinking_content
|
|
93
|
+
};
|
|
94
|
+
if (msg.thinking_signature) {
|
|
95
|
+
thinkingBlock.signature = msg.thinking_signature;
|
|
96
|
+
}
|
|
97
|
+
content.push(thinkingBlock);
|
|
98
|
+
}
|
|
99
|
+
if (msg.redacted_thinking_data) {
|
|
100
|
+
content.push({
|
|
101
|
+
type: "redacted_thinking",
|
|
102
|
+
data: msg.redacted_thinking_data
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
if (msg.content && (typeof msg.content !== "string" || msg.content.trim() !== "")) {
|
|
106
|
+
content.push({ type: "text", text: msg.content });
|
|
107
|
+
}
|
|
108
|
+
if (msg.tool_calls) {
|
|
109
|
+
for (const tc of msg.tool_calls) {
|
|
110
|
+
let input = {};
|
|
111
|
+
try {
|
|
112
|
+
input = JSON.parse(tc.function.arguments);
|
|
113
|
+
} catch {
|
|
114
|
+
}
|
|
115
|
+
content.push({
|
|
116
|
+
type: "tool_use",
|
|
117
|
+
id: tc.id,
|
|
118
|
+
name: tc.function.name,
|
|
119
|
+
input
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
if (content.length === 0) {
|
|
124
|
+
content.push({ type: "text", text: "" });
|
|
125
|
+
}
|
|
126
|
+
if (addCache && caching && content.length > 0) {
|
|
127
|
+
for (let i = content.length - 1; i >= 0; i--) {
|
|
128
|
+
const block = content[i];
|
|
129
|
+
if (block.type !== "thinking" && block.type !== "redacted_thinking") {
|
|
130
|
+
block.cache_control = buildCacheControlBlock(cacheConfig);
|
|
131
|
+
break;
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
result.push({ role: "assistant", content });
|
|
136
|
+
} else if (msg.role === "tool") {
|
|
137
|
+
const isMultipart = Array.isArray(msg.content);
|
|
138
|
+
let toolContent;
|
|
139
|
+
if (msg.isError && isMultipart) {
|
|
140
|
+
const textOnly = msg.content.filter(
|
|
141
|
+
(p) => p.type === "text"
|
|
142
|
+
);
|
|
143
|
+
toolContent = textOnly.length > 0 ? contentPartsToAnthropic(textOnly) : String(msg.content);
|
|
144
|
+
} else {
|
|
145
|
+
toolContent = isMultipart ? contentPartsToAnthropic(msg.content) : msg.content;
|
|
146
|
+
}
|
|
147
|
+
const toolResultBlock = {
|
|
148
|
+
type: "tool_result",
|
|
149
|
+
tool_use_id: msg.tool_call_id,
|
|
150
|
+
content: toolContent
|
|
151
|
+
};
|
|
152
|
+
if (msg.isError) {
|
|
153
|
+
toolResultBlock.is_error = true;
|
|
154
|
+
}
|
|
155
|
+
if (addCache && caching) {
|
|
156
|
+
toolResultBlock.cache_control = buildCacheControlBlock(cacheConfig);
|
|
157
|
+
}
|
|
158
|
+
const prev = result[result.length - 1];
|
|
159
|
+
if (prev && prev.role === "user" && Array.isArray(prev.content)) {
|
|
160
|
+
const blocks = prev.content;
|
|
161
|
+
if (blocks.length > 0 && blocks[0].type === "tool_result") {
|
|
162
|
+
blocks.push(toolResultBlock);
|
|
163
|
+
continue;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
result.push({ role: "user", content: [toolResultBlock] });
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
return {
|
|
170
|
+
system: buildAnthropicSystemBlocks(systemPrompt, cacheConfig),
|
|
171
|
+
messages: result
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
function makeChunk(id, model, delta) {
|
|
175
|
+
return {
|
|
176
|
+
id,
|
|
177
|
+
model,
|
|
178
|
+
choices: [
|
|
179
|
+
{
|
|
180
|
+
index: 0,
|
|
181
|
+
delta,
|
|
182
|
+
finish_reason: null
|
|
183
|
+
}
|
|
184
|
+
]
|
|
185
|
+
};
|
|
186
|
+
}
|
|
187
|
+
function buildAnthropicRequestParams(params, defaultModel, cacheConfig) {
|
|
188
|
+
const { system, messages: inputMessages } = convertAnthropicMessages(
|
|
189
|
+
params.system,
|
|
190
|
+
params.messages,
|
|
191
|
+
cacheConfig,
|
|
192
|
+
params.skipCacheWrite
|
|
193
|
+
);
|
|
194
|
+
const tools = buildAnthropicTools(params, cacheConfig);
|
|
195
|
+
const thinkingEnabled = params.thinking?.type === "enabled" && params.thinking.budgetTokens > 0;
|
|
196
|
+
const budgetTokens = thinkingEnabled ? params.thinking.budgetTokens : 0;
|
|
197
|
+
const model = params.model ?? defaultModel;
|
|
198
|
+
const modelMaxOutput = getMaxOutputTokensForModel(model);
|
|
199
|
+
const maxOutputTokens = thinkingEnabled ? params.max_tokens ?? modelMaxOutput : params.max_tokens ?? 8192;
|
|
200
|
+
const clampedBudget = thinkingEnabled ? Math.min(budgetTokens, maxOutputTokens - 1) : 0;
|
|
201
|
+
const streamParams = {
|
|
202
|
+
model,
|
|
203
|
+
max_tokens: maxOutputTokens,
|
|
204
|
+
system,
|
|
205
|
+
messages: inputMessages,
|
|
206
|
+
tools
|
|
207
|
+
};
|
|
208
|
+
if (!thinkingEnabled && params.temperature !== void 0) {
|
|
209
|
+
streamParams.temperature = params.temperature;
|
|
210
|
+
}
|
|
211
|
+
if (thinkingEnabled) {
|
|
212
|
+
streamParams.thinking = {
|
|
213
|
+
type: "enabled",
|
|
214
|
+
budget_tokens: clampedBudget
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
if (params.outputFormat?.type === "json_schema") {
|
|
218
|
+
streamParams.output_config = {
|
|
219
|
+
format: {
|
|
220
|
+
type: "json_schema",
|
|
221
|
+
json_schema: {
|
|
222
|
+
name: params.outputFormat.name ?? "response",
|
|
223
|
+
schema: params.outputFormat.schema
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
};
|
|
227
|
+
const betas = streamParams.betas ?? [];
|
|
228
|
+
if (!betas.includes("structured-outputs-2025-12-15")) {
|
|
229
|
+
betas.push("structured-outputs-2025-12-15");
|
|
230
|
+
}
|
|
231
|
+
streamParams.betas = betas;
|
|
232
|
+
} else if (params.outputFormat?.type === "json_object") {
|
|
233
|
+
const hint = "\n\nYou MUST respond with valid JSON only. No markdown, no explanation \u2014 just a single JSON object.";
|
|
234
|
+
if (typeof streamParams.system === "string") {
|
|
235
|
+
streamParams.system = streamParams.system + hint;
|
|
236
|
+
} else if (Array.isArray(streamParams.system)) {
|
|
237
|
+
const blocks = streamParams.system;
|
|
238
|
+
if (blocks.length > 0) {
|
|
239
|
+
const last = blocks[blocks.length - 1];
|
|
240
|
+
if (last.type === "text" && typeof last.text === "string") {
|
|
241
|
+
last.text = last.text + hint;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
} else if (!streamParams.system) {
|
|
245
|
+
streamParams.system = hint.trim();
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
return { streamParams, model };
|
|
249
|
+
}
|
|
250
|
+
function mapAnthropicStopReason(stopReason, hasToolCalls) {
|
|
251
|
+
switch (stopReason) {
|
|
252
|
+
case "end_turn":
|
|
253
|
+
return "stop";
|
|
254
|
+
case "tool_use":
|
|
255
|
+
return "tool_calls";
|
|
256
|
+
case "max_tokens":
|
|
257
|
+
return "length";
|
|
258
|
+
case "model_context_window_exceeded":
|
|
259
|
+
return "length";
|
|
260
|
+
case "stop_sequence":
|
|
261
|
+
return "stop";
|
|
262
|
+
case "refusal":
|
|
263
|
+
return "content_filter";
|
|
264
|
+
default:
|
|
265
|
+
return hasToolCalls ? "tool_calls" : "stop";
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
function createAnthropicStreamState() {
|
|
269
|
+
return {
|
|
270
|
+
chunkIndex: 0,
|
|
271
|
+
toolIndexMap: /* @__PURE__ */ new Map(),
|
|
272
|
+
blockIndexToToolId: /* @__PURE__ */ new Map(),
|
|
273
|
+
blockIndexToType: /* @__PURE__ */ new Map(),
|
|
274
|
+
nextToolIndex: 0,
|
|
275
|
+
inputTokens: 0,
|
|
276
|
+
outputTokens: 0,
|
|
277
|
+
cacheReadTokens: 0,
|
|
278
|
+
cacheCreationTokens: 0,
|
|
279
|
+
thinkingTokens: 0,
|
|
280
|
+
stopReason: void 0,
|
|
281
|
+
receivedMessageStop: false
|
|
282
|
+
};
|
|
283
|
+
}
|
|
284
|
+
function processAnthropicStreamEvent(ev, state, model) {
|
|
285
|
+
const chunks = [];
|
|
286
|
+
const chunkId = `chatcmpl-${state.chunkIndex++}`;
|
|
287
|
+
if (ev.type === "message_start") {
|
|
288
|
+
const msg = ev.message ?? {};
|
|
289
|
+
const usage = msg.usage;
|
|
290
|
+
if (usage) {
|
|
291
|
+
state.inputTokens = usage.input_tokens ?? 0;
|
|
292
|
+
state.outputTokens = usage.output_tokens ?? 0;
|
|
293
|
+
state.cacheReadTokens = usage.cache_read_input_tokens ?? 0;
|
|
294
|
+
state.cacheCreationTokens = usage.cache_creation_input_tokens ?? 0;
|
|
295
|
+
if (usage.thinking_tokens) state.thinkingTokens = usage.thinking_tokens;
|
|
296
|
+
}
|
|
297
|
+
return chunks;
|
|
298
|
+
}
|
|
299
|
+
if (ev.type === "message_delta") {
|
|
300
|
+
const delta = ev.delta;
|
|
301
|
+
if (delta?.stop_reason) {
|
|
302
|
+
state.stopReason = delta.stop_reason;
|
|
303
|
+
}
|
|
304
|
+
const usage = ev.usage;
|
|
305
|
+
if (usage?.output_tokens != null && usage.output_tokens > 0) {
|
|
306
|
+
state.outputTokens = usage.output_tokens;
|
|
307
|
+
}
|
|
308
|
+
if (usage?.thinking_tokens != null && usage.thinking_tokens > 0) {
|
|
309
|
+
state.thinkingTokens = usage.thinking_tokens;
|
|
310
|
+
}
|
|
311
|
+
return chunks;
|
|
312
|
+
}
|
|
313
|
+
if (ev.type === "content_block_start") {
|
|
314
|
+
const block = { ...ev.content_block ?? {} };
|
|
315
|
+
const blockIndex = ev.index;
|
|
316
|
+
if (blockIndex !== void 0) {
|
|
317
|
+
state.blockIndexToType.set(blockIndex, block.type);
|
|
318
|
+
}
|
|
319
|
+
if (block.type === "thinking") {
|
|
320
|
+
chunks.push(makeChunk(chunkId, model, { thinking_content: "" }));
|
|
321
|
+
} else if (block.type === "redacted_thinking") {
|
|
322
|
+
const redactedData = block.data;
|
|
323
|
+
chunks.push(makeChunk(chunkId, model, { redacted_thinking_data: redactedData ?? "" }));
|
|
324
|
+
} else if (block.type === "text") {
|
|
325
|
+
chunks.push(makeChunk(chunkId, model, { content: "" }));
|
|
326
|
+
} else if (block.type === "tool_use") {
|
|
327
|
+
const toolBlock = block;
|
|
328
|
+
if (!toolBlock.id || !toolBlock.name) return chunks;
|
|
329
|
+
const idx = state.nextToolIndex++;
|
|
330
|
+
state.toolIndexMap.set(toolBlock.id, idx);
|
|
331
|
+
if (blockIndex !== void 0) {
|
|
332
|
+
state.blockIndexToToolId.set(blockIndex, toolBlock.id);
|
|
333
|
+
}
|
|
334
|
+
chunks.push(makeChunk(chunkId, model, {
|
|
335
|
+
tool_calls: [
|
|
336
|
+
{
|
|
337
|
+
index: idx,
|
|
338
|
+
id: toolBlock.id,
|
|
339
|
+
type: "function",
|
|
340
|
+
function: { name: toolBlock.name, arguments: "" }
|
|
341
|
+
}
|
|
342
|
+
]
|
|
343
|
+
}));
|
|
344
|
+
}
|
|
345
|
+
return chunks;
|
|
346
|
+
}
|
|
347
|
+
if (ev.type === "content_block_delta") {
|
|
348
|
+
if (!ev.delta) return chunks;
|
|
349
|
+
const delta = ev.delta;
|
|
350
|
+
const deltaType = delta.type;
|
|
351
|
+
const blockIndex = ev.index;
|
|
352
|
+
if (deltaType === "thinking_delta") {
|
|
353
|
+
chunks.push(makeChunk(chunkId, model, {
|
|
354
|
+
thinking_content: delta.thinking
|
|
355
|
+
}));
|
|
356
|
+
} else if (deltaType === "text_delta") {
|
|
357
|
+
chunks.push(makeChunk(chunkId, model, {
|
|
358
|
+
content: delta.text
|
|
359
|
+
}));
|
|
360
|
+
} else if (deltaType === "signature_delta") {
|
|
361
|
+
if (blockIndex !== void 0 && state.blockIndexToType.get(blockIndex) === "thinking") {
|
|
362
|
+
chunks.push(makeChunk(chunkId, model, {
|
|
363
|
+
thinking_signature: delta.signature
|
|
364
|
+
}));
|
|
365
|
+
}
|
|
366
|
+
} else if (deltaType === "input_json_delta") {
|
|
367
|
+
const partialJson = delta.partial_json ?? "";
|
|
368
|
+
if (!partialJson) return chunks;
|
|
369
|
+
let toolId;
|
|
370
|
+
if (blockIndex !== void 0) {
|
|
371
|
+
toolId = state.blockIndexToToolId.get(blockIndex);
|
|
372
|
+
}
|
|
373
|
+
if (!toolId) {
|
|
374
|
+
toolId = Array.from(state.toolIndexMap.keys()).pop();
|
|
375
|
+
}
|
|
376
|
+
if (toolId) {
|
|
377
|
+
const idx = state.toolIndexMap.get(toolId);
|
|
378
|
+
chunks.push(makeChunk(chunkId, model, {
|
|
379
|
+
tool_calls: [
|
|
380
|
+
{
|
|
381
|
+
index: idx,
|
|
382
|
+
function: { arguments: partialJson }
|
|
383
|
+
}
|
|
384
|
+
]
|
|
385
|
+
}));
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
return chunks;
|
|
389
|
+
}
|
|
390
|
+
if (ev.type === "message_stop") {
|
|
391
|
+
state.receivedMessageStop = true;
|
|
392
|
+
const finishReason = mapAnthropicStopReason(state.stopReason, state.toolIndexMap.size > 0);
|
|
393
|
+
chunks.push({
|
|
394
|
+
id: chunkId,
|
|
395
|
+
model,
|
|
396
|
+
choices: [
|
|
397
|
+
{
|
|
398
|
+
index: 0,
|
|
399
|
+
delta: {},
|
|
400
|
+
finish_reason: finishReason
|
|
401
|
+
}
|
|
402
|
+
],
|
|
403
|
+
usage: {
|
|
404
|
+
prompt_tokens: state.inputTokens,
|
|
405
|
+
completion_tokens: state.outputTokens,
|
|
406
|
+
total_tokens: state.inputTokens + state.outputTokens,
|
|
407
|
+
cache_read_tokens: state.cacheReadTokens || void 0,
|
|
408
|
+
cache_creation_tokens: state.cacheCreationTokens || void 0,
|
|
409
|
+
thinking_tokens: state.thinkingTokens || void 0
|
|
410
|
+
}
|
|
411
|
+
});
|
|
412
|
+
return chunks;
|
|
413
|
+
}
|
|
414
|
+
return chunks;
|
|
415
|
+
}
|
|
416
|
+
async function* streamAnthropicChat(client, params, defaultModel, cacheConfig) {
|
|
417
|
+
const { streamParams, model } = buildAnthropicRequestParams(params, defaultModel, cacheConfig);
|
|
418
|
+
const requestSignal = params.signal;
|
|
419
|
+
let stream;
|
|
420
|
+
try {
|
|
421
|
+
stream = client.messages.stream(
|
|
422
|
+
streamParams,
|
|
423
|
+
requestSignal ? { signal: requestSignal } : void 0
|
|
424
|
+
);
|
|
425
|
+
} catch (err) {
|
|
426
|
+
const apiErr = err;
|
|
427
|
+
throw new ChatStreamError(
|
|
428
|
+
err instanceof Error ? err.message : String(err),
|
|
429
|
+
{
|
|
430
|
+
status: apiErr.status,
|
|
431
|
+
retryAfter: apiErr.headers?.get?.("retry-after") ?? apiErr.headers?.["retry-after"] ?? void 0,
|
|
432
|
+
cause: err
|
|
433
|
+
}
|
|
434
|
+
);
|
|
435
|
+
}
|
|
436
|
+
const state = createAnthropicStreamState();
|
|
437
|
+
try {
|
|
438
|
+
for await (const event of stream) {
|
|
439
|
+
const ev = event;
|
|
440
|
+
for (const chunk of processAnthropicStreamEvent(ev, state, model)) {
|
|
441
|
+
yield chunk;
|
|
442
|
+
}
|
|
443
|
+
}
|
|
444
|
+
if (!state.receivedMessageStop && state.chunkIndex > 0) {
|
|
445
|
+
throw new ChatStreamError(
|
|
446
|
+
"Stream ended without receiving message_stop event",
|
|
447
|
+
{ cause: new Error("incomplete_stream") }
|
|
448
|
+
);
|
|
449
|
+
} else if (state.chunkIndex === 0) {
|
|
450
|
+
throw new ChatStreamError(
|
|
451
|
+
"Stream returned no events",
|
|
452
|
+
{ cause: new Error("empty_stream") }
|
|
453
|
+
);
|
|
454
|
+
}
|
|
455
|
+
} catch (err) {
|
|
456
|
+
if (err instanceof ChatStreamError) throw err;
|
|
457
|
+
const apiErr = err;
|
|
458
|
+
throw new ChatStreamError(
|
|
459
|
+
err instanceof Error ? err.message : String(err),
|
|
460
|
+
{
|
|
461
|
+
status: apiErr.status,
|
|
462
|
+
retryAfter: apiErr.headers?.get?.("retry-after") ?? apiErr.headers?.["retry-after"] ?? void 0,
|
|
463
|
+
cause: err
|
|
464
|
+
}
|
|
465
|
+
);
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
export {
|
|
470
|
+
streamAnthropicChat
|
|
471
|
+
};
|
|
472
|
+
//# sourceMappingURL=chunk-EKOGVTBT.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/providers/anthropic-shared.ts"],"sourcesContent":["/**\n * Shared Anthropic streaming, message conversion, and tool mapping logic.\n *\n * Used by AnthropicProvider, BedrockAnthropicProvider, and VertexAnthropicProvider.\n * Accepts a generic client shape so it works with all three SDKs without\n * importing any of them directly.\n */\n\nimport type { ChatParams, ChatStreamChunk } from \"./types.js\";\nimport { ChatStreamError } from \"./types.js\";\nimport type { ChatMessage, ContentPart } from \"../session/types.js\";\nimport type { CacheControlConfig } from \"./cache.js\";\nimport { getMessageCacheBreakpointIndex } from \"./cache.js\";\nimport { getMaxOutputTokensForModel } from \"../utils/context.js\";\n\ninterface AnthropicToolUseBlock {\n type: \"tool_use\";\n id: string;\n name: string;\n input: Record<string, unknown>;\n}\n\ntype CacheControlBlock = {\n type: \"ephemeral\";\n ttl?: \"1h\";\n scope?: \"global\" | \"org\";\n};\n\nexport interface AnthropicStreamClient {\n messages: {\n stream(\n params: Record<string, unknown>,\n options?: { signal?: AbortSignal },\n ): AsyncIterable<Record<string, unknown>>;\n };\n}\n\nexport function buildCacheControlBlock(\n config: CacheControlConfig | undefined,\n): CacheControlBlock {\n const cc: CacheControlBlock = { type: \"ephemeral\" };\n if (config?.ttl) cc.ttl = config.ttl;\n if (config?.scope) cc.scope = config.scope;\n return cc;\n}\n\nfunction isCachingEnabled(config: CacheControlConfig | undefined): boolean {\n return config?.enabled === true;\n}\n\nexport function contentPartsToAnthropic(\n parts: ContentPart[],\n): Record<string, unknown>[] {\n return parts.map((part) => {\n if (part.type === \"text\") {\n return { type: \"text\", text: part.text };\n }\n if (part.type === \"image\") {\n return {\n type: \"image\",\n source: {\n type: \"base64\",\n media_type: part.media_type,\n data: part.data,\n },\n };\n }\n return {\n type: \"image\",\n source: { type: \"url\", url: part.url },\n };\n });\n}\n\nexport function buildAnthropicTools(\n params: ChatParams,\n cacheConfig?: CacheControlConfig,\n): Record<string, unknown>[] | undefined {\n if (!params.tools) return undefined;\n\n const tools = params.tools.map((t) => ({\n name: t.function.name,\n description: t.function.description,\n input_schema: t.function.parameters,\n }));\n\n if (isCachingEnabled(cacheConfig) && tools.length > 0) {\n const lastTool = tools[tools.length - 1] as Record<string, unknown>;\n lastTool.cache_control = buildCacheControlBlock(cacheConfig);\n }\n\n return tools;\n}\n\nexport function buildAnthropicSystemBlocks(\n systemPrompt: string | undefined,\n cacheConfig?: CacheControlConfig,\n): unknown {\n if (!systemPrompt) return undefined;\n if (!isCachingEnabled(cacheConfig)) return systemPrompt;\n\n return [\n {\n type: \"text\",\n text: systemPrompt,\n cache_control: buildCacheControlBlock(cacheConfig),\n },\n ];\n}\n\nexport function convertAnthropicMessages(\n systemPrompt: string | undefined,\n messages: ChatMessage[],\n cacheConfig?: CacheControlConfig,\n skipCacheWrite?: boolean,\n): {\n system: unknown;\n messages: Record<string, unknown>[];\n} {\n const result: Record<string, unknown>[] = [];\n const caching = isCachingEnabled(cacheConfig);\n const cacheBreakpointIdx = caching\n ? getMessageCacheBreakpointIndex(messages, skipCacheWrite)\n : -1;\n\n for (let mi = 0; mi < messages.length; mi++) {\n const msg = messages[mi];\n const addCache = mi === cacheBreakpointIdx;\n\n if (msg.role === \"system\") continue;\n\n if (msg.role === \"user\") {\n const isMultipart = Array.isArray(msg.content);\n if (addCache && caching) {\n const blocks = isMultipart\n ? contentPartsToAnthropic(msg.content as ContentPart[])\n : [{ type: \"text\", text: msg.content as string }];\n const lastBlock = blocks[blocks.length - 1] as Record<string, unknown>;\n lastBlock.cache_control = buildCacheControlBlock(cacheConfig);\n result.push({ role: \"user\", content: blocks });\n } else if (isMultipart) {\n result.push({\n role: \"user\",\n content: contentPartsToAnthropic(msg.content as ContentPart[]),\n });\n } else {\n result.push({ role: \"user\", content: msg.content as string });\n }\n } else if (msg.role === \"assistant\") {\n const content: Record<string, unknown>[] = [];\n if (msg.thinking_content) {\n const thinkingBlock: Record<string, unknown> = {\n type: \"thinking\",\n thinking: msg.thinking_content,\n };\n if (msg.thinking_signature) {\n thinkingBlock.signature = msg.thinking_signature;\n }\n content.push(thinkingBlock);\n }\n if (msg.redacted_thinking_data) {\n content.push({\n type: \"redacted_thinking\",\n data: msg.redacted_thinking_data,\n });\n }\n if (msg.content && (typeof msg.content !== \"string\" || msg.content.trim() !== \"\")) {\n content.push({ type: \"text\", text: msg.content });\n }\n if (msg.tool_calls) {\n for (const tc of msg.tool_calls) {\n let input: Record<string, unknown> = {};\n try {\n input = JSON.parse(tc.function.arguments);\n } catch {\n // malformed JSON from truncated stream — send empty input\n }\n content.push({\n type: \"tool_use\",\n id: tc.id,\n name: tc.function.name,\n input,\n });\n }\n }\n if (content.length === 0) {\n content.push({ type: \"text\", text: \"\" });\n }\n if (addCache && caching && content.length > 0) {\n for (let i = content.length - 1; i >= 0; i--) {\n const block = content[i] as Record<string, unknown>;\n if (block.type !== \"thinking\" && block.type !== \"redacted_thinking\") {\n block.cache_control = buildCacheControlBlock(cacheConfig);\n break;\n }\n }\n }\n result.push({ role: \"assistant\", content });\n } else if (msg.role === \"tool\") {\n const isMultipart = Array.isArray(msg.content);\n let toolContent: string | Record<string, unknown>[];\n\n if (msg.isError && isMultipart) {\n const textOnly = (msg.content as ContentPart[]).filter(\n (p) => p.type === \"text\",\n );\n toolContent =\n textOnly.length > 0\n ? contentPartsToAnthropic(textOnly)\n : String(msg.content);\n } else {\n toolContent = isMultipart\n ? contentPartsToAnthropic(msg.content as ContentPart[])\n : (msg.content as string);\n }\n\n const toolResultBlock: Record<string, unknown> = {\n type: \"tool_result\",\n tool_use_id: msg.tool_call_id,\n content: toolContent,\n };\n if (msg.isError) {\n toolResultBlock.is_error = true;\n }\n if (addCache && caching) {\n toolResultBlock.cache_control = buildCacheControlBlock(cacheConfig);\n }\n\n const prev = result[result.length - 1];\n if (prev && prev.role === \"user\" && Array.isArray(prev.content)) {\n const blocks = prev.content as Record<string, unknown>[];\n if (blocks.length > 0 && blocks[0].type === \"tool_result\") {\n blocks.push(toolResultBlock);\n continue;\n }\n }\n result.push({ role: \"user\", content: [toolResultBlock] });\n }\n }\n\n return {\n system: buildAnthropicSystemBlocks(systemPrompt, cacheConfig),\n messages: result,\n };\n}\n\nexport function makeChunk(\n id: string,\n model: string,\n delta: Record<string, unknown>,\n): ChatStreamChunk {\n return {\n id,\n model,\n choices: [\n {\n index: 0,\n delta: delta as ChatStreamChunk[\"choices\"][0][\"delta\"],\n finish_reason: null,\n },\n ],\n };\n}\n\n// ---------------------------------------------------------------------------\n// buildAnthropicRequestParams — pure param construction\n// ---------------------------------------------------------------------------\n\nexport interface AnthropicRequestParamsResult {\n streamParams: Record<string, unknown>;\n model: string;\n}\n\nexport function buildAnthropicRequestParams(\n params: ChatParams,\n defaultModel: string,\n cacheConfig?: CacheControlConfig,\n): AnthropicRequestParamsResult {\n const { system, messages: inputMessages } = convertAnthropicMessages(\n params.system,\n params.messages,\n cacheConfig,\n params.skipCacheWrite,\n );\n\n const tools = buildAnthropicTools(params, cacheConfig);\n\n const thinkingEnabled =\n params.thinking?.type === \"enabled\" &&\n (params.thinking as { budgetTokens: number }).budgetTokens > 0;\n const budgetTokens = thinkingEnabled\n ? (params.thinking as { type: \"enabled\"; budgetTokens: number }).budgetTokens\n : 0;\n\n const model = params.model ?? defaultModel;\n\n const modelMaxOutput = getMaxOutputTokensForModel(model);\n const maxOutputTokens = thinkingEnabled\n ? (params.max_tokens ?? modelMaxOutput)\n : (params.max_tokens ?? 8192);\n const clampedBudget = thinkingEnabled\n ? Math.min(budgetTokens, maxOutputTokens - 1)\n : 0;\n\n const streamParams: Record<string, unknown> = {\n model,\n max_tokens: maxOutputTokens,\n system,\n messages: inputMessages,\n tools,\n };\n\n if (!thinkingEnabled && params.temperature !== undefined) {\n streamParams.temperature = params.temperature;\n }\n\n if (thinkingEnabled) {\n streamParams.thinking = {\n type: \"enabled\",\n budget_tokens: clampedBudget,\n };\n }\n\n if (params.outputFormat?.type === \"json_schema\") {\n streamParams.output_config = {\n format: {\n type: \"json_schema\",\n json_schema: {\n name: params.outputFormat.name ?? \"response\",\n schema: params.outputFormat.schema,\n },\n },\n };\n const betas: string[] = (streamParams.betas as string[] | undefined) ?? [];\n if (!betas.includes(\"structured-outputs-2025-12-15\")) {\n betas.push(\"structured-outputs-2025-12-15\");\n }\n streamParams.betas = betas;\n } else if (params.outputFormat?.type === \"json_object\") {\n const hint = \"\\n\\nYou MUST respond with valid JSON only. No markdown, no explanation — just a single JSON object.\";\n if (typeof streamParams.system === \"string\") {\n streamParams.system = streamParams.system + hint;\n } else if (Array.isArray(streamParams.system)) {\n const blocks = streamParams.system as Array<Record<string, unknown>>;\n if (blocks.length > 0) {\n const last = blocks[blocks.length - 1];\n if (last.type === \"text\" && typeof last.text === \"string\") {\n last.text = last.text + hint;\n }\n }\n } else if (!streamParams.system) {\n streamParams.system = hint.trim();\n }\n }\n\n return { streamParams, model };\n}\n\n// ---------------------------------------------------------------------------\n// mapAnthropicStopReason — pure stop_reason -> finish_reason mapping\n// ---------------------------------------------------------------------------\n\nexport function mapAnthropicStopReason(\n stopReason: string | undefined,\n hasToolCalls: boolean,\n): string {\n switch (stopReason) {\n case \"end_turn\": return \"stop\";\n case \"tool_use\": return \"tool_calls\";\n case \"max_tokens\": return \"length\";\n case \"model_context_window_exceeded\": return \"length\";\n case \"stop_sequence\": return \"stop\";\n case \"refusal\": return \"content_filter\";\n default: return hasToolCalls ? \"tool_calls\" : \"stop\";\n }\n}\n\n// ---------------------------------------------------------------------------\n// AnthropicStreamState + processAnthropicStreamEvent — reducer pattern\n// ---------------------------------------------------------------------------\n\nexport interface AnthropicStreamState {\n chunkIndex: number;\n toolIndexMap: Map<string, number>;\n blockIndexToToolId: Map<number, string>;\n blockIndexToType: Map<number, string>;\n nextToolIndex: number;\n inputTokens: number;\n outputTokens: number;\n cacheReadTokens: number;\n cacheCreationTokens: number;\n thinkingTokens: number;\n stopReason: string | undefined;\n receivedMessageStop: boolean;\n}\n\nexport function createAnthropicStreamState(): AnthropicStreamState {\n return {\n chunkIndex: 0,\n toolIndexMap: new Map(),\n blockIndexToToolId: new Map(),\n blockIndexToType: new Map(),\n nextToolIndex: 0,\n inputTokens: 0,\n outputTokens: 0,\n cacheReadTokens: 0,\n cacheCreationTokens: 0,\n thinkingTokens: 0,\n stopReason: undefined,\n receivedMessageStop: false,\n };\n}\n\nexport function processAnthropicStreamEvent(\n ev: Record<string, unknown>,\n state: AnthropicStreamState,\n model: string,\n): ChatStreamChunk[] {\n const chunks: ChatStreamChunk[] = [];\n const chunkId = `chatcmpl-${state.chunkIndex++}`;\n\n if (ev.type === \"message_start\") {\n const msg = (ev.message as Record<string, unknown>) ?? {};\n const usage = msg.usage as Record<string, unknown> | undefined;\n if (usage) {\n state.inputTokens = (usage.input_tokens as number) ?? 0;\n state.outputTokens = (usage.output_tokens as number) ?? 0;\n state.cacheReadTokens = (usage.cache_read_input_tokens as number) ?? 0;\n state.cacheCreationTokens = (usage.cache_creation_input_tokens as number) ?? 0;\n if (usage.thinking_tokens) state.thinkingTokens = usage.thinking_tokens as number;\n }\n return chunks;\n }\n\n if (ev.type === \"message_delta\") {\n const delta = ev.delta as Record<string, unknown> | undefined;\n if (delta?.stop_reason) {\n state.stopReason = delta.stop_reason as string;\n }\n const usage = ev.usage as Record<string, unknown> | undefined;\n if (usage?.output_tokens != null && (usage.output_tokens as number) > 0) {\n state.outputTokens = usage.output_tokens as number;\n }\n if (usage?.thinking_tokens != null && (usage.thinking_tokens as number) > 0) {\n state.thinkingTokens = usage.thinking_tokens as number;\n }\n return chunks;\n }\n\n if (ev.type === \"content_block_start\") {\n // Shallow-copy to guard against SDK mutating the original object\n const block = { ...((ev.content_block as Record<string, unknown>) ?? {}) };\n const blockIndex = ev.index as number | undefined;\n if (blockIndex !== undefined) {\n state.blockIndexToType.set(blockIndex, block.type as string);\n }\n\n if (block.type === \"thinking\") {\n chunks.push(makeChunk(chunkId, model, { thinking_content: \"\" }));\n } else if (block.type === \"redacted_thinking\") {\n const redactedData = block.data as string | undefined;\n chunks.push(makeChunk(chunkId, model, { redacted_thinking_data: redactedData ?? \"\" }));\n } else if (block.type === \"text\") {\n chunks.push(makeChunk(chunkId, model, { content: \"\" }));\n } else if (block.type === \"tool_use\") {\n const toolBlock = block as unknown as AnthropicToolUseBlock;\n if (!toolBlock.id || !toolBlock.name) return chunks;\n const idx = state.nextToolIndex++;\n state.toolIndexMap.set(toolBlock.id, idx);\n if (blockIndex !== undefined) {\n state.blockIndexToToolId.set(blockIndex, toolBlock.id);\n }\n chunks.push(makeChunk(chunkId, model, {\n tool_calls: [\n {\n index: idx,\n id: toolBlock.id,\n type: \"function\",\n function: { name: toolBlock.name, arguments: \"\" },\n },\n ],\n }));\n }\n return chunks;\n }\n\n if (ev.type === \"content_block_delta\") {\n if (!ev.delta) return chunks;\n const delta = ev.delta as Record<string, unknown>;\n const deltaType = delta.type;\n const blockIndex = ev.index as number | undefined;\n\n if (deltaType === \"thinking_delta\") {\n chunks.push(makeChunk(chunkId, model, {\n thinking_content: delta.thinking as string,\n }));\n } else if (deltaType === \"text_delta\") {\n chunks.push(makeChunk(chunkId, model, {\n content: delta.text as string,\n }));\n } else if (deltaType === \"signature_delta\") {\n if (blockIndex !== undefined && state.blockIndexToType.get(blockIndex) === \"thinking\") {\n chunks.push(makeChunk(chunkId, model, {\n thinking_signature: delta.signature as string,\n }));\n }\n } else if (deltaType === \"input_json_delta\") {\n const partialJson = (delta.partial_json as string) ?? \"\";\n if (!partialJson) return chunks;\n let toolId: string | undefined;\n if (blockIndex !== undefined) {\n toolId = state.blockIndexToToolId.get(blockIndex);\n }\n if (!toolId) {\n toolId = Array.from(state.toolIndexMap.keys()).pop();\n }\n if (toolId) {\n const idx = state.toolIndexMap.get(toolId)!;\n chunks.push(makeChunk(chunkId, model, {\n tool_calls: [\n {\n index: idx,\n function: { arguments: partialJson },\n },\n ],\n }));\n }\n }\n return chunks;\n }\n\n if (ev.type === \"message_stop\") {\n state.receivedMessageStop = true;\n const finishReason = mapAnthropicStopReason(state.stopReason, state.toolIndexMap.size > 0);\n chunks.push({\n id: chunkId,\n model,\n choices: [\n {\n index: 0,\n delta: {},\n finish_reason: finishReason,\n },\n ],\n usage: {\n prompt_tokens: state.inputTokens,\n completion_tokens: state.outputTokens,\n total_tokens: state.inputTokens + state.outputTokens,\n cache_read_tokens: state.cacheReadTokens || undefined,\n cache_creation_tokens: state.cacheCreationTokens || undefined,\n thinking_tokens: state.thinkingTokens || undefined,\n },\n });\n return chunks;\n }\n\n return chunks;\n}\n\n/**\n * Stream an Anthropic-compatible chat call and yield OpenAI-shaped ChatStreamChunks.\n * Works with Anthropic, AnthropicBedrock, and AnthropicVertex clients.\n */\nexport async function* streamAnthropicChat(\n client: AnthropicStreamClient,\n params: ChatParams,\n defaultModel: string,\n cacheConfig?: CacheControlConfig,\n): AsyncIterable<ChatStreamChunk> {\n const { streamParams, model } = buildAnthropicRequestParams(params, defaultModel, cacheConfig);\n const requestSignal = params.signal;\n\n let stream: AsyncIterable<Record<string, unknown>>;\n try {\n stream = client.messages.stream(\n streamParams,\n requestSignal ? { signal: requestSignal } : undefined,\n );\n } catch (err: unknown) {\n const apiErr = err as { status?: number; headers?: Record<string, string> & { get?(k: string): string | null } };\n throw new ChatStreamError(\n err instanceof Error ? err.message : String(err),\n {\n status: apiErr.status,\n retryAfter: apiErr.headers?.get?.(\"retry-after\") ?? apiErr.headers?.[\"retry-after\"] ?? undefined,\n cause: err,\n },\n );\n }\n\n const state = createAnthropicStreamState();\n\n try {\n for await (const event of stream) {\n const ev = event as Record<string, unknown>;\n for (const chunk of processAnthropicStreamEvent(ev, state, model)) {\n yield chunk;\n }\n }\n\n if (!state.receivedMessageStop && state.chunkIndex > 0) {\n throw new ChatStreamError(\n \"Stream ended without receiving message_stop event\",\n { cause: new Error(\"incomplete_stream\") },\n );\n } else if (state.chunkIndex === 0) {\n throw new ChatStreamError(\n \"Stream returned no events\",\n { cause: new Error(\"empty_stream\") },\n );\n }\n } catch (err: unknown) {\n if (err instanceof ChatStreamError) throw err;\n const apiErr = err as { status?: number; headers?: Record<string, string> & { get?(k: string): string | null } };\n throw new ChatStreamError(\n err instanceof Error ? err.message : String(err),\n {\n status: apiErr.status,\n retryAfter: apiErr.headers?.get?.(\"retry-after\") ?? apiErr.headers?.[\"retry-after\"] ?? undefined,\n cause: err,\n },\n );\n }\n}\n"],"mappings":";;;;;;;;;AAqCO,SAAS,uBACd,QACmB;AACnB,QAAM,KAAwB,EAAE,MAAM,YAAY;AAClD,MAAI,QAAQ,IAAK,IAAG,MAAM,OAAO;AACjC,MAAI,QAAQ,MAAO,IAAG,QAAQ,OAAO;AACrC,SAAO;AACT;AAEA,SAAS,iBAAiB,QAAiD;AACzE,SAAO,QAAQ,YAAY;AAC7B;AAEO,SAAS,wBACd,OAC2B;AAC3B,SAAO,MAAM,IAAI,CAAC,SAAS;AACzB,QAAI,KAAK,SAAS,QAAQ;AACxB,aAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,IACzC;AACA,QAAI,KAAK,SAAS,SAAS;AACzB,aAAO;AAAA,QACL,MAAM;AAAA,QACN,QAAQ;AAAA,UACN,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,MAAM,KAAK;AAAA,QACb;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,QAAQ,EAAE,MAAM,OAAO,KAAK,KAAK,IAAI;AAAA,IACvC;AAAA,EACF,CAAC;AACH;AAEO,SAAS,oBACd,QACA,aACuC;AACvC,MAAI,CAAC,OAAO,MAAO,QAAO;AAE1B,QAAM,QAAQ,OAAO,MAAM,IAAI,CAAC,OAAO;AAAA,IACrC,MAAM,EAAE,SAAS;AAAA,IACjB,aAAa,EAAE,SAAS;AAAA,IACxB,cAAc,EAAE,SAAS;AAAA,EAC3B,EAAE;AAEF,MAAI,iBAAiB,WAAW,KAAK,MAAM,SAAS,GAAG;AACrD,UAAM,WAAW,MAAM,MAAM,SAAS,CAAC;AACvC,aAAS,gBAAgB,uBAAuB,WAAW;AAAA,EAC7D;AAEA,SAAO;AACT;AAEO,SAAS,2BACd,cACA,aACS;AACT,MAAI,CAAC,aAAc,QAAO;AAC1B,MAAI,CAAC,iBAAiB,WAAW,EAAG,QAAO;AAE3C,SAAO;AAAA,IACL;AAAA,MACE,MAAM;AAAA,MACN,MAAM;AAAA,MACN,eAAe,uBAAuB,WAAW;AAAA,IACnD;AAAA,EACF;AACF;AAEO,SAAS,yBACd,cACA,UACA,aACA,gBAIA;AACA,QAAM,SAAoC,CAAC;AAC3C,QAAM,UAAU,iBAAiB,WAAW;AAC5C,QAAM,qBAAqB,UACvB,+BAA+B,UAAU,cAAc,IACvD;AAEJ,WAAS,KAAK,GAAG,KAAK,SAAS,QAAQ,MAAM;AAC3C,UAAM,MAAM,SAAS,EAAE;AACvB,UAAM,WAAW,OAAO;AAExB,QAAI,IAAI,SAAS,SAAU;AAE3B,QAAI,IAAI,SAAS,QAAQ;AACvB,YAAM,cAAc,MAAM,QAAQ,IAAI,OAAO;AAC7C,UAAI,YAAY,SAAS;AACvB,cAAM,SAAS,cACX,wBAAwB,IAAI,OAAwB,IACpD,CAAC,EAAE,MAAM,QAAQ,MAAM,IAAI,QAAkB,CAAC;AAClD,cAAM,YAAY,OAAO,OAAO,SAAS,CAAC;AAC1C,kBAAU,gBAAgB,uBAAuB,WAAW;AAC5D,eAAO,KAAK,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,MAC/C,WAAW,aAAa;AACtB,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,SAAS,wBAAwB,IAAI,OAAwB;AAAA,QAC/D,CAAC;AAAA,MACH,OAAO;AACL,eAAO,KAAK,EAAE,MAAM,QAAQ,SAAS,IAAI,QAAkB,CAAC;AAAA,MAC9D;AAAA,IACF,WAAW,IAAI,SAAS,aAAa;AACnC,YAAM,UAAqC,CAAC;AAC5C,UAAI,IAAI,kBAAkB;AACxB,cAAM,gBAAyC;AAAA,UAC7C,MAAM;AAAA,UACN,UAAU,IAAI;AAAA,QAChB;AACA,YAAI,IAAI,oBAAoB;AAC1B,wBAAc,YAAY,IAAI;AAAA,QAChC;AACA,gBAAQ,KAAK,aAAa;AAAA,MAC5B;AACA,UAAI,IAAI,wBAAwB;AAC9B,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,MAAM,IAAI;AAAA,QACZ,CAAC;AAAA,MACH;AACA,UAAI,IAAI,YAAY,OAAO,IAAI,YAAY,YAAY,IAAI,QAAQ,KAAK,MAAM,KAAK;AACjF,gBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,IAAI,QAAQ,CAAC;AAAA,MAClD;AACA,UAAI,IAAI,YAAY;AAClB,mBAAW,MAAM,IAAI,YAAY;AAC/B,cAAI,QAAiC,CAAC;AACtC,cAAI;AACF,oBAAQ,KAAK,MAAM,GAAG,SAAS,SAAS;AAAA,UAC1C,QAAQ;AAAA,UAER;AACA,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,IAAI,GAAG;AAAA,YACP,MAAM,GAAG,SAAS;AAAA,YAClB;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,UAAI,QAAQ,WAAW,GAAG;AACxB,gBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,GAAG,CAAC;AAAA,MACzC;AACA,UAAI,YAAY,WAAW,QAAQ,SAAS,GAAG;AAC7C,iBAAS,IAAI,QAAQ,SAAS,GAAG,KAAK,GAAG,KAAK;AAC5C,gBAAM,QAAQ,QAAQ,CAAC;AACvB,cAAI,MAAM,SAAS,cAAc,MAAM,SAAS,qBAAqB;AACnE,kBAAM,gBAAgB,uBAAuB,WAAW;AACxD;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,aAAO,KAAK,EAAE,MAAM,aAAa,QAAQ,CAAC;AAAA,IAC5C,WAAW,IAAI,SAAS,QAAQ;AAC9B,YAAM,cAAc,MAAM,QAAQ,IAAI,OAAO;AAC7C,UAAI;AAEJ,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,WAAY,IAAI,QAA0B;AAAA,UAC9C,CAAC,MAAM,EAAE,SAAS;AAAA,QACpB;AACA,sBACE,SAAS,SAAS,IACd,wBAAwB,QAAQ,IAChC,OAAO,IAAI,OAAO;AAAA,MAC1B,OAAO;AACL,sBAAc,cACV,wBAAwB,IAAI,OAAwB,IACnD,IAAI;AAAA,MACX;AAEA,YAAM,kBAA2C;AAAA,QAC/C,MAAM;AAAA,QACN,aAAa,IAAI;AAAA,QACjB,SAAS;AAAA,MACX;AACA,UAAI,IAAI,SAAS;AACf,wBAAgB,WAAW;AAAA,MAC7B;AACA,UAAI,YAAY,SAAS;AACvB,wBAAgB,gBAAgB,uBAAuB,WAAW;AAAA,MACpE;AAEA,YAAM,OAAO,OAAO,OAAO,SAAS,CAAC;AACrC,UAAI,QAAQ,KAAK,SAAS,UAAU,MAAM,QAAQ,KAAK,OAAO,GAAG;AAC/D,cAAM,SAAS,KAAK;AACpB,YAAI,OAAO,SAAS,KAAK,OAAO,CAAC,EAAE,SAAS,eAAe;AACzD,iBAAO,KAAK,eAAe;AAC3B;AAAA,QACF;AAAA,MACF;AACA,aAAO,KAAK,EAAE,MAAM,QAAQ,SAAS,CAAC,eAAe,EAAE,CAAC;AAAA,IAC1D;AAAA,EACF;AAEA,SAAO;AAAA,IACL,QAAQ,2BAA2B,cAAc,WAAW;AAAA,IAC5D,UAAU;AAAA,EACZ;AACF;AAEO,SAAS,UACd,IACA,OACA,OACiB;AACjB,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,SAAS;AAAA,MACP;AAAA,QACE,OAAO;AAAA,QACP;AAAA,QACA,eAAe;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AACF;AAWO,SAAS,4BACd,QACA,cACA,aAC8B;AAC9B,QAAM,EAAE,QAAQ,UAAU,cAAc,IAAI;AAAA,IAC1C,OAAO;AAAA,IACP,OAAO;AAAA,IACP;AAAA,IACA,OAAO;AAAA,EACT;AAEA,QAAM,QAAQ,oBAAoB,QAAQ,WAAW;AAErD,QAAM,kBACJ,OAAO,UAAU,SAAS,aACzB,OAAO,SAAsC,eAAe;AAC/D,QAAM,eAAe,kBAChB,OAAO,SAAuD,eAC/D;AAEJ,QAAM,QAAQ,OAAO,SAAS;AAE9B,QAAM,iBAAiB,2BAA2B,KAAK;AACvD,QAAM,kBAAkB,kBACnB,OAAO,cAAc,iBACrB,OAAO,cAAc;AAC1B,QAAM,gBAAgB,kBAClB,KAAK,IAAI,cAAc,kBAAkB,CAAC,IAC1C;AAEJ,QAAM,eAAwC;AAAA,IAC5C;AAAA,IACA,YAAY;AAAA,IACZ;AAAA,IACA,UAAU;AAAA,IACV;AAAA,EACF;AAEA,MAAI,CAAC,mBAAmB,OAAO,gBAAgB,QAAW;AACxD,iBAAa,cAAc,OAAO;AAAA,EACpC;AAEA,MAAI,iBAAiB;AACnB,iBAAa,WAAW;AAAA,MACtB,MAAM;AAAA,MACN,eAAe;AAAA,IACjB;AAAA,EACF;AAEA,MAAI,OAAO,cAAc,SAAS,eAAe;AAC/C,iBAAa,gBAAgB;AAAA,MAC3B,QAAQ;AAAA,QACN,MAAM;AAAA,QACN,aAAa;AAAA,UACX,MAAM,OAAO,aAAa,QAAQ;AAAA,UAClC,QAAQ,OAAO,aAAa;AAAA,QAC9B;AAAA,MACF;AAAA,IACF;AACA,UAAM,QAAmB,aAAa,SAAkC,CAAC;AACzE,QAAI,CAAC,MAAM,SAAS,+BAA+B,GAAG;AACpD,YAAM,KAAK,+BAA+B;AAAA,IAC5C;AACA,iBAAa,QAAQ;AAAA,EACvB,WAAW,OAAO,cAAc,SAAS,eAAe;AACtD,UAAM,OAAO;AACb,QAAI,OAAO,aAAa,WAAW,UAAU;AAC3C,mBAAa,SAAS,aAAa,SAAS;AAAA,IAC9C,WAAW,MAAM,QAAQ,aAAa,MAAM,GAAG;AAC7C,YAAM,SAAS,aAAa;AAC5B,UAAI,OAAO,SAAS,GAAG;AACrB,cAAM,OAAO,OAAO,OAAO,SAAS,CAAC;AACrC,YAAI,KAAK,SAAS,UAAU,OAAO,KAAK,SAAS,UAAU;AACzD,eAAK,OAAO,KAAK,OAAO;AAAA,QAC1B;AAAA,MACF;AAAA,IACF,WAAW,CAAC,aAAa,QAAQ;AAC/B,mBAAa,SAAS,KAAK,KAAK;AAAA,IAClC;AAAA,EACF;AAEA,SAAO,EAAE,cAAc,MAAM;AAC/B;AAMO,SAAS,uBACd,YACA,cACQ;AACR,UAAQ,YAAY;AAAA,IAClB,KAAK;AAAY,aAAO;AAAA,IACxB,KAAK;AAAY,aAAO;AAAA,IACxB,KAAK;AAAc,aAAO;AAAA,IAC1B,KAAK;AAAiC,aAAO;AAAA,IAC7C,KAAK;AAAiB,aAAO;AAAA,IAC7B,KAAK;AAAW,aAAO;AAAA,IACvB;AAAS,aAAO,eAAe,eAAe;AAAA,EAChD;AACF;AAqBO,SAAS,6BAAmD;AACjE,SAAO;AAAA,IACL,YAAY;AAAA,IACZ,cAAc,oBAAI,IAAI;AAAA,IACtB,oBAAoB,oBAAI,IAAI;AAAA,IAC5B,kBAAkB,oBAAI,IAAI;AAAA,IAC1B,eAAe;AAAA,IACf,aAAa;AAAA,IACb,cAAc;AAAA,IACd,iBAAiB;AAAA,IACjB,qBAAqB;AAAA,IACrB,gBAAgB;AAAA,IAChB,YAAY;AAAA,IACZ,qBAAqB;AAAA,EACvB;AACF;AAEO,SAAS,4BACd,IACA,OACA,OACmB;AACnB,QAAM,SAA4B,CAAC;AACnC,QAAM,UAAU,YAAY,MAAM,YAAY;AAE9C,MAAI,GAAG,SAAS,iBAAiB;AAC/B,UAAM,MAAO,GAAG,WAAuC,CAAC;AACxD,UAAM,QAAQ,IAAI;AAClB,QAAI,OAAO;AACT,YAAM,cAAe,MAAM,gBAA2B;AACtD,YAAM,eAAgB,MAAM,iBAA4B;AACxD,YAAM,kBAAmB,MAAM,2BAAsC;AACrE,YAAM,sBAAuB,MAAM,+BAA0C;AAC7E,UAAI,MAAM,gBAAiB,OAAM,iBAAiB,MAAM;AAAA,IAC1D;AACA,WAAO;AAAA,EACT;AAEA,MAAI,GAAG,SAAS,iBAAiB;AAC/B,UAAM,QAAQ,GAAG;AACjB,QAAI,OAAO,aAAa;AACtB,YAAM,aAAa,MAAM;AAAA,IAC3B;AACA,UAAM,QAAQ,GAAG;AACjB,QAAI,OAAO,iBAAiB,QAAS,MAAM,gBAA2B,GAAG;AACvE,YAAM,eAAe,MAAM;AAAA,IAC7B;AACA,QAAI,OAAO,mBAAmB,QAAS,MAAM,kBAA6B,GAAG;AAC3E,YAAM,iBAAiB,MAAM;AAAA,IAC/B;AACA,WAAO;AAAA,EACT;AAEA,MAAI,GAAG,SAAS,uBAAuB;AAErC,UAAM,QAAQ,EAAE,GAAK,GAAG,iBAA6C,CAAC,EAAG;AACzE,UAAM,aAAa,GAAG;AACtB,QAAI,eAAe,QAAW;AAC5B,YAAM,iBAAiB,IAAI,YAAY,MAAM,IAAc;AAAA,IAC7D;AAEA,QAAI,MAAM,SAAS,YAAY;AAC7B,aAAO,KAAK,UAAU,SAAS,OAAO,EAAE,kBAAkB,GAAG,CAAC,CAAC;AAAA,IACjE,WAAW,MAAM,SAAS,qBAAqB;AAC7C,YAAM,eAAe,MAAM;AAC3B,aAAO,KAAK,UAAU,SAAS,OAAO,EAAE,wBAAwB,gBAAgB,GAAG,CAAC,CAAC;AAAA,IACvF,WAAW,MAAM,SAAS,QAAQ;AAChC,aAAO,KAAK,UAAU,SAAS,OAAO,EAAE,SAAS,GAAG,CAAC,CAAC;AAAA,IACxD,WAAW,MAAM,SAAS,YAAY;AACpC,YAAM,YAAY;AAClB,UAAI,CAAC,UAAU,MAAM,CAAC,UAAU,KAAM,QAAO;AAC7C,YAAM,MAAM,MAAM;AAClB,YAAM,aAAa,IAAI,UAAU,IAAI,GAAG;AACxC,UAAI,eAAe,QAAW;AAC5B,cAAM,mBAAmB,IAAI,YAAY,UAAU,EAAE;AAAA,MACvD;AACA,aAAO,KAAK,UAAU,SAAS,OAAO;AAAA,QACpC,YAAY;AAAA,UACV;AAAA,YACE,OAAO;AAAA,YACP,IAAI,UAAU;AAAA,YACd,MAAM;AAAA,YACN,UAAU,EAAE,MAAM,UAAU,MAAM,WAAW,GAAG;AAAA,UAClD;AAAA,QACF;AAAA,MACF,CAAC,CAAC;AAAA,IACJ;AACA,WAAO;AAAA,EACT;AAEA,MAAI,GAAG,SAAS,uBAAuB;AACrC,QAAI,CAAC,GAAG,MAAO,QAAO;AACtB,UAAM,QAAQ,GAAG;AACjB,UAAM,YAAY,MAAM;AACxB,UAAM,aAAa,GAAG;AAEtB,QAAI,cAAc,kBAAkB;AAClC,aAAO,KAAK,UAAU,SAAS,OAAO;AAAA,QACpC,kBAAkB,MAAM;AAAA,MAC1B,CAAC,CAAC;AAAA,IACJ,WAAW,cAAc,cAAc;AACrC,aAAO,KAAK,UAAU,SAAS,OAAO;AAAA,QACpC,SAAS,MAAM;AAAA,MACjB,CAAC,CAAC;AAAA,IACJ,WAAW,cAAc,mBAAmB;AAC1C,UAAI,eAAe,UAAa,MAAM,iBAAiB,IAAI,UAAU,MAAM,YAAY;AACrF,eAAO,KAAK,UAAU,SAAS,OAAO;AAAA,UACpC,oBAAoB,MAAM;AAAA,QAC5B,CAAC,CAAC;AAAA,MACJ;AAAA,IACF,WAAW,cAAc,oBAAoB;AAC3C,YAAM,cAAe,MAAM,gBAA2B;AACtD,UAAI,CAAC,YAAa,QAAO;AACzB,UAAI;AACJ,UAAI,eAAe,QAAW;AAC5B,iBAAS,MAAM,mBAAmB,IAAI,UAAU;AAAA,MAClD;AACA,UAAI,CAAC,QAAQ;AACX,iBAAS,MAAM,KAAK,MAAM,aAAa,KAAK,CAAC,EAAE,IAAI;AAAA,MACrD;AACA,UAAI,QAAQ;AACV,cAAM,MAAM,MAAM,aAAa,IAAI,MAAM;AACzC,eAAO,KAAK,UAAU,SAAS,OAAO;AAAA,UACpC,YAAY;AAAA,YACV;AAAA,cACE,OAAO;AAAA,cACP,UAAU,EAAE,WAAW,YAAY;AAAA,YACrC;AAAA,UACF;AAAA,QACF,CAAC,CAAC;AAAA,MACJ;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,MAAI,GAAG,SAAS,gBAAgB;AAC9B,UAAM,sBAAsB;AAC5B,UAAM,eAAe,uBAAuB,MAAM,YAAY,MAAM,aAAa,OAAO,CAAC;AACzF,WAAO,KAAK;AAAA,MACV,IAAI;AAAA,MACJ;AAAA,MACA,SAAS;AAAA,QACP;AAAA,UACE,OAAO;AAAA,UACP,OAAO,CAAC;AAAA,UACR,eAAe;AAAA,QACjB;AAAA,MACF;AAAA,MACA,OAAO;AAAA,QACL,eAAe,MAAM;AAAA,QACrB,mBAAmB,MAAM;AAAA,QACzB,cAAc,MAAM,cAAc,MAAM;AAAA,QACxC,mBAAmB,MAAM,mBAAmB;AAAA,QAC5C,uBAAuB,MAAM,uBAAuB;AAAA,QACpD,iBAAiB,MAAM,kBAAkB;AAAA,MAC3C;AAAA,IACF,CAAC;AACD,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAMA,gBAAuB,oBACrB,QACA,QACA,cACA,aACgC;AAChC,QAAM,EAAE,cAAc,MAAM,IAAI,4BAA4B,QAAQ,cAAc,WAAW;AAC7F,QAAM,gBAAgB,OAAO;AAE7B,MAAI;AACJ,MAAI;AACF,aAAS,OAAO,SAAS;AAAA,MACvB;AAAA,MACA,gBAAgB,EAAE,QAAQ,cAAc,IAAI;AAAA,IAC9C;AAAA,EACF,SAAS,KAAc;AACrB,UAAM,SAAS;AACf,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,MAC/C;AAAA,QACE,QAAQ,OAAO;AAAA,QACf,YAAY,OAAO,SAAS,MAAM,aAAa,KAAK,OAAO,UAAU,aAAa,KAAK;AAAA,QACvF,OAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAQ,2BAA2B;AAEzC,MAAI;AACF,qBAAiB,SAAS,QAAQ;AAChC,YAAM,KAAK;AACX,iBAAW,SAAS,4BAA4B,IAAI,OAAO,KAAK,GAAG;AACjE,cAAM;AAAA,MACR;AAAA,IACF;AAEA,QAAI,CAAC,MAAM,uBAAuB,MAAM,aAAa,GAAG;AACtD,YAAM,IAAI;AAAA,QACR;AAAA,QACA,EAAE,OAAO,IAAI,MAAM,mBAAmB,EAAE;AAAA,MAC1C;AAAA,IACF,WAAW,MAAM,eAAe,GAAG;AACjC,YAAM,IAAI;AAAA,QACR;AAAA,QACA,EAAE,OAAO,IAAI,MAAM,cAAc,EAAE;AAAA,MACrC;AAAA,IACF;AAAA,EACF,SAAS,KAAc;AACrB,QAAI,eAAe,gBAAiB,OAAM;AAC1C,UAAM,SAAS;AACf,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,MAC/C;AAAA,QACE,QAAQ,OAAO;AAAA,QACf,YAAY,OAAO,SAAS,MAAM,aAAa,KAAK,OAAO,UAAU,aAAa,KAAK;AAAA,QACvF,OAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
|
|
@@ -1,34 +1,21 @@
|
|
|
1
|
-
// src/providers/cache.ts
|
|
2
|
-
function sortToolDefinitionsForCache(tools, mcpToolNames) {
|
|
3
|
-
const byName = (a, b) => a.function.name.localeCompare(b.function.name);
|
|
4
|
-
if (!mcpToolNames || mcpToolNames.size === 0) {
|
|
5
|
-
return [...tools].sort(byName);
|
|
6
|
-
}
|
|
7
|
-
const builtIn = [];
|
|
8
|
-
const mcp = [];
|
|
9
|
-
for (const t of tools) {
|
|
10
|
-
if (mcpToolNames.has(t.function.name)) {
|
|
11
|
-
mcp.push(t);
|
|
12
|
-
} else {
|
|
13
|
-
builtIn.push(t);
|
|
14
|
-
}
|
|
15
|
-
}
|
|
16
|
-
return [...builtIn.sort(byName), ...mcp.sort(byName)];
|
|
17
|
-
}
|
|
18
|
-
function getMessageCacheBreakpointIndex(messages, skipCacheWrite) {
|
|
19
|
-
if (messages.length === 0) return -1;
|
|
20
|
-
return skipCacheWrite && messages.length >= 2 ? messages.length - 2 : messages.length - 1;
|
|
21
|
-
}
|
|
22
|
-
|
|
23
1
|
// src/utils/context.ts
|
|
24
2
|
var MODEL_CONTEXT_WINDOWS = {
|
|
25
3
|
// Anthropic (evergreen prefixes — also match dated variants via startsWith)
|
|
26
4
|
"claude-sonnet-4": 2e5,
|
|
27
5
|
"claude-opus-4": 2e5,
|
|
6
|
+
"claude-haiku-4": 2e5,
|
|
28
7
|
"claude-haiku-3-5": 2e5,
|
|
29
8
|
"claude-3-5-sonnet": 2e5,
|
|
30
9
|
"claude-3-5-haiku": 2e5,
|
|
10
|
+
// Bedrock / Vertex model ID patterns (prefix-matched)
|
|
11
|
+
"us.anthropic.claude": 2e5,
|
|
12
|
+
"eu.anthropic.claude": 2e5,
|
|
13
|
+
"ap.anthropic.claude": 2e5,
|
|
14
|
+
"anthropic.claude": 2e5,
|
|
31
15
|
// OpenAI
|
|
16
|
+
"gpt-4.1": 1047576,
|
|
17
|
+
"gpt-4.1-mini": 1047576,
|
|
18
|
+
"gpt-4.1-nano": 1047576,
|
|
32
19
|
"gpt-4o": 128e3,
|
|
33
20
|
"gpt-4o-mini": 128e3,
|
|
34
21
|
"gpt-4-turbo": 128e3,
|
|
@@ -47,11 +34,21 @@ var MODEL_CONTEXT_WINDOWS = {
|
|
|
47
34
|
"gemini-1.5-flash": 1048576
|
|
48
35
|
};
|
|
49
36
|
var MODEL_MAX_OUTPUT_TOKENS = {
|
|
50
|
-
"claude-sonnet-4":
|
|
51
|
-
"claude-opus-4":
|
|
37
|
+
"claude-sonnet-4-6": 128e3,
|
|
38
|
+
"claude-opus-4-6": 128e3,
|
|
39
|
+
"claude-sonnet-4": 64e3,
|
|
40
|
+
"claude-opus-4": 64e3,
|
|
41
|
+
"claude-haiku-4": 64e3,
|
|
52
42
|
"claude-haiku-3-5": 8192,
|
|
53
43
|
"claude-3-5-sonnet": 8192,
|
|
54
44
|
"claude-3-5-haiku": 8192,
|
|
45
|
+
"us.anthropic.claude": 64e3,
|
|
46
|
+
"eu.anthropic.claude": 64e3,
|
|
47
|
+
"ap.anthropic.claude": 64e3,
|
|
48
|
+
"anthropic.claude": 64e3,
|
|
49
|
+
"gpt-4.1": 32768,
|
|
50
|
+
"gpt-4.1-mini": 32768,
|
|
51
|
+
"gpt-4.1-nano": 32768,
|
|
55
52
|
"gpt-4o": 16384,
|
|
56
53
|
"gpt-4o-mini": 16384,
|
|
57
54
|
"gpt-4-turbo": 4096,
|
|
@@ -100,13 +97,35 @@ function getMaxOutputTokensForModel(model) {
|
|
|
100
97
|
return DEFAULT_MAX_OUTPUT_TOKENS;
|
|
101
98
|
}
|
|
102
99
|
|
|
100
|
+
// src/providers/cache.ts
|
|
101
|
+
function sortToolDefinitionsForCache(tools, mcpToolNames) {
|
|
102
|
+
const byName = (a, b) => a.function.name.localeCompare(b.function.name);
|
|
103
|
+
if (!mcpToolNames || mcpToolNames.size === 0) {
|
|
104
|
+
return [...tools].sort(byName);
|
|
105
|
+
}
|
|
106
|
+
const builtIn = [];
|
|
107
|
+
const mcp = [];
|
|
108
|
+
for (const t of tools) {
|
|
109
|
+
if (mcpToolNames.has(t.function.name)) {
|
|
110
|
+
mcp.push(t);
|
|
111
|
+
} else {
|
|
112
|
+
builtIn.push(t);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
return [...builtIn.sort(byName), ...mcp.sort(byName)];
|
|
116
|
+
}
|
|
117
|
+
function getMessageCacheBreakpointIndex(messages, skipCacheWrite) {
|
|
118
|
+
if (messages.length === 0) return -1;
|
|
119
|
+
return skipCacheWrite && messages.length >= 2 ? messages.length - 2 : messages.length - 1;
|
|
120
|
+
}
|
|
121
|
+
|
|
103
122
|
export {
|
|
104
|
-
sortToolDefinitionsForCache,
|
|
105
|
-
getMessageCacheBreakpointIndex,
|
|
106
123
|
registerContextWindows,
|
|
107
124
|
getContextWindowForModel,
|
|
108
125
|
getEffectiveContextWindow,
|
|
109
126
|
getAutoCompactThreshold,
|
|
110
|
-
getMaxOutputTokensForModel
|
|
127
|
+
getMaxOutputTokensForModel,
|
|
128
|
+
sortToolDefinitionsForCache,
|
|
129
|
+
getMessageCacheBreakpointIndex
|
|
111
130
|
};
|
|
112
|
-
//# sourceMappingURL=chunk-
|
|
131
|
+
//# sourceMappingURL=chunk-HEQQQGK5.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/utils/context.ts","../src/providers/cache.ts"],"sourcesContent":["/**\n * Model context window sizes and effective window calculations.\n */\n\nconst MODEL_CONTEXT_WINDOWS: Record<string, number> = {\n // Anthropic (evergreen prefixes — also match dated variants via startsWith)\n \"claude-sonnet-4\": 200_000,\n \"claude-opus-4\": 200_000,\n \"claude-haiku-4\": 200_000,\n \"claude-haiku-3-5\": 200_000,\n \"claude-3-5-sonnet\": 200_000,\n \"claude-3-5-haiku\": 200_000,\n // Bedrock / Vertex model ID patterns (prefix-matched)\n \"us.anthropic.claude\": 200_000,\n \"eu.anthropic.claude\": 200_000,\n \"ap.anthropic.claude\": 200_000,\n \"anthropic.claude\": 200_000,\n // OpenAI\n \"gpt-4.1\": 1_047_576,\n \"gpt-4.1-mini\": 1_047_576,\n \"gpt-4.1-nano\": 1_047_576,\n \"gpt-4o\": 128_000,\n \"gpt-4o-mini\": 128_000,\n \"gpt-4-turbo\": 128_000,\n \"gpt-4\": 8_192,\n \"o1\": 200_000,\n \"o1-mini\": 128_000,\n \"o1-preview\": 128_000,\n \"o3\": 200_000,\n \"o3-mini\": 200_000,\n \"o4-mini\": 200_000,\n // Google\n \"gemini-2.5-pro\": 1_048_576,\n \"gemini-2.5-flash\": 1_048_576,\n \"gemini-2.0-flash\": 1_048_576,\n \"gemini-1.5-pro\": 2_097_152,\n \"gemini-1.5-flash\": 1_048_576,\n};\n\nconst MODEL_MAX_OUTPUT_TOKENS: Record<string, number> = {\n \"claude-sonnet-4-6\": 128_000,\n \"claude-opus-4-6\": 128_000,\n \"claude-sonnet-4\": 64_000,\n \"claude-opus-4\": 64_000,\n \"claude-haiku-4\": 64_000,\n \"claude-haiku-3-5\": 8_192,\n \"claude-3-5-sonnet\": 8_192,\n \"claude-3-5-haiku\": 8_192,\n \"us.anthropic.claude\": 64_000,\n \"eu.anthropic.claude\": 64_000,\n \"ap.anthropic.claude\": 64_000,\n \"anthropic.claude\": 64_000,\n \"gpt-4.1\": 32_768,\n \"gpt-4.1-mini\": 32_768,\n \"gpt-4.1-nano\": 32_768,\n \"gpt-4o\": 16_384,\n \"gpt-4o-mini\": 16_384,\n \"gpt-4-turbo\": 4_096,\n \"o1\": 100_000,\n \"o3\": 100_000,\n \"o3-mini\": 100_000,\n \"o4-mini\": 100_000,\n \"gemini-2.5-pro\": 65_536,\n \"gemini-2.5-flash\": 65_536,\n \"gemini-2.0-flash\": 8_192,\n};\n\nconst DEFAULT_MAX_OUTPUT_TOKENS = 16_384;\nconst DEFAULT_CONTEXT_WINDOW = 128_000;\nconst AUTOCOMPACT_BUFFER_TOKENS = 13_000;\nconst MAX_OUTPUT_RESERVE = 20_000;\n\nlet customWindows: Record<string, number> = {};\n\n/**\n * Register custom context window sizes for models not in the built-in table.\n */\nexport function registerContextWindows(\n windows: Record<string, number>,\n): void {\n customWindows = { ...customWindows, ...windows };\n}\n\n/**\n * Get the context window size for a model. Checks custom overrides first,\n * then built-in table, then prefix-matches, then falls back to default.\n */\nexport function getContextWindowForModel(model: string): number {\n if (customWindows[model] !== undefined) return customWindows[model];\n if (MODEL_CONTEXT_WINDOWS[model] !== undefined)\n return MODEL_CONTEXT_WINDOWS[model];\n\n for (const [prefix, size] of Object.entries(MODEL_CONTEXT_WINDOWS)) {\n if (model.startsWith(prefix)) return size;\n }\n for (const [prefix, size] of Object.entries(customWindows)) {\n if (model.startsWith(prefix)) return size;\n }\n\n return DEFAULT_CONTEXT_WINDOW;\n}\n\n/**\n * Effective context window = total window minus space reserved for the\n * model's output during a compaction/summary request.\n */\nexport function getEffectiveContextWindow(\n model: string,\n maxOutputTokens?: number,\n): number {\n const window = getContextWindowForModel(model);\n const reserve = Math.min(maxOutputTokens ?? MAX_OUTPUT_RESERVE, MAX_OUTPUT_RESERVE);\n return window - reserve;\n}\n\n/**\n * Auto-compact threshold: effective window minus a buffer to ensure we\n * compact before we're at the hard limit.\n */\nexport function getAutoCompactThreshold(\n model: string,\n maxOutputTokens?: number,\n): number {\n return getEffectiveContextWindow(model, maxOutputTokens) - AUTOCOMPACT_BUFFER_TOKENS;\n}\n\n/**\n * Get the maximum output tokens a model supports. Used to clamp\n * max_tokens when extended thinking budgets are added.\n */\nexport function getMaxOutputTokensForModel(model: string): number {\n if (MODEL_MAX_OUTPUT_TOKENS[model] !== undefined)\n return MODEL_MAX_OUTPUT_TOKENS[model];\n for (const [prefix, size] of Object.entries(MODEL_MAX_OUTPUT_TOKENS)) {\n if (model.startsWith(prefix)) return size;\n }\n return DEFAULT_MAX_OUTPUT_TOKENS;\n}\n","/**\n * Provider-agnostic prompt caching utilities.\n *\n * Stable tool ordering prevents cache invalidation when the tool set is\n * unchanged. The breakpoint index helper determines which message gets a\n * single cache_control marker per request (matching claude-code's strategy).\n */\n\nimport type { ToolDefinition } from \"./types.js\";\nimport type { ChatMessage } from \"../session/types.js\";\n\nexport type CacheScope = \"global\" | \"org\";\n\nexport interface CacheControlConfig {\n enabled: boolean;\n /** TTL for cached content. When set, produces `ttl: '1h'` in cache_control. */\n ttl?: \"1h\";\n /** Scope for shared cache across sessions/orgs. */\n scope?: CacheScope;\n}\n\n/**\n * Sort tool definitions deterministically for prompt cache stability.\n *\n * Strategy (matching claude-code's assembleToolPool): built-in tools form a\n * contiguous prefix sorted by name, followed by MCP/external tools sorted by\n * name. Tools with `mcpInfo` on the original Tool object are treated as MCP;\n * everything else is built-in. Since ToolDefinition doesn't carry mcpInfo,\n * callers can pass an optional set of MCP tool names to partition correctly.\n */\nexport function sortToolDefinitionsForCache(\n tools: ToolDefinition[],\n mcpToolNames?: ReadonlySet<string>,\n): ToolDefinition[] {\n const byName = (a: ToolDefinition, b: ToolDefinition) =>\n a.function.name.localeCompare(b.function.name);\n\n if (!mcpToolNames || mcpToolNames.size === 0) {\n return [...tools].sort(byName);\n }\n\n const builtIn: ToolDefinition[] = [];\n const mcp: ToolDefinition[] = [];\n\n for (const t of tools) {\n if (mcpToolNames.has(t.function.name)) {\n mcp.push(t);\n } else {\n builtIn.push(t);\n }\n }\n\n return [...builtIn.sort(byName), ...mcp.sort(byName)];\n}\n\n/**\n * Determine which message index should receive the cache_control breakpoint.\n *\n * Exactly one message per request is marked. Normally the last message;\n * for forked agents with skipCacheWrite the second-to-last so the fork\n * doesn't write its own tail into the cache.\n */\nexport function getMessageCacheBreakpointIndex(\n messages: ChatMessage[],\n skipCacheWrite?: boolean,\n): number {\n if (messages.length === 0) return -1;\n return skipCacheWrite && messages.length >= 2\n ? messages.length - 2\n : messages.length - 1;\n}\n"],"mappings":";AAIA,IAAM,wBAAgD;AAAA;AAAA,EAEpD,mBAAmB;AAAA,EACnB,iBAAiB;AAAA,EACjB,kBAAkB;AAAA,EAClB,oBAAoB;AAAA,EACpB,qBAAqB;AAAA,EACrB,oBAAoB;AAAA;AAAA,EAEpB,uBAAuB;AAAA,EACvB,uBAAuB;AAAA,EACvB,uBAAuB;AAAA,EACvB,oBAAoB;AAAA;AAAA,EAEpB,WAAW;AAAA,EACX,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAChB,UAAU;AAAA,EACV,eAAe;AAAA,EACf,eAAe;AAAA,EACf,SAAS;AAAA,EACT,MAAM;AAAA,EACN,WAAW;AAAA,EACX,cAAc;AAAA,EACd,MAAM;AAAA,EACN,WAAW;AAAA,EACX,WAAW;AAAA;AAAA,EAEX,kBAAkB;AAAA,EAClB,oBAAoB;AAAA,EACpB,oBAAoB;AAAA,EACpB,kBAAkB;AAAA,EAClB,oBAAoB;AACtB;AAEA,IAAM,0BAAkD;AAAA,EACtD,qBAAqB;AAAA,EACrB,mBAAmB;AAAA,EACnB,mBAAmB;AAAA,EACnB,iBAAiB;AAAA,EACjB,kBAAkB;AAAA,EAClB,oBAAoB;AAAA,EACpB,qBAAqB;AAAA,EACrB,oBAAoB;AAAA,EACpB,uBAAuB;AAAA,EACvB,uBAAuB;AAAA,EACvB,uBAAuB;AAAA,EACvB,oBAAoB;AAAA,EACpB,WAAW;AAAA,EACX,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAChB,UAAU;AAAA,EACV,eAAe;AAAA,EACf,eAAe;AAAA,EACf,MAAM;AAAA,EACN,MAAM;AAAA,EACN,WAAW;AAAA,EACX,WAAW;AAAA,EACX,kBAAkB;AAAA,EAClB,oBAAoB;AAAA,EACpB,oBAAoB;AACtB;AAEA,IAAM,4BAA4B;AAClC,IAAM,yBAAyB;AAC/B,IAAM,4BAA4B;AAClC,IAAM,qBAAqB;AAE3B,IAAI,gBAAwC,CAAC;AAKtC,SAAS,uBACd,SACM;AACN,kBAAgB,EAAE,GAAG,eAAe,GAAG,QAAQ;AACjD;AAMO,SAAS,yBAAyB,OAAuB;AAC9D,MAAI,cAAc,KAAK,MAAM,OAAW,QAAO,cAAc,KAAK;AAClE,MAAI,sBAAsB,KAAK,MAAM;AACnC,WAAO,sBAAsB,KAAK;AAEpC,aAAW,CAAC,QAAQ,IAAI,KAAK,OAAO,QAAQ,qBAAqB,GAAG;AAClE,QAAI,MAAM,WAAW,MAAM,EAAG,QAAO;AAAA,EACvC;AACA,aAAW,CAAC,QAAQ,IAAI,KAAK,OAAO,QAAQ,aAAa,GAAG;AAC1D,QAAI,MAAM,WAAW,MAAM,EAAG,QAAO;AAAA,EACvC;AAEA,SAAO;AACT;AAMO,SAAS,0BACd,OACA,iBACQ;AACR,QAAM,SAAS,yBAAyB,KAAK;AAC7C,QAAM,UAAU,KAAK,IAAI,mBAAmB,oBAAoB,kBAAkB;AAClF,SAAO,SAAS;AAClB;AAMO,SAAS,wBACd,OACA,iBACQ;AACR,SAAO,0BAA0B,OAAO,eAAe,IAAI;AAC7D;AAMO,SAAS,2BAA2B,OAAuB;AAChE,MAAI,wBAAwB,KAAK,MAAM;AACrC,WAAO,wBAAwB,KAAK;AACtC,aAAW,CAAC,QAAQ,IAAI,KAAK,OAAO,QAAQ,uBAAuB,GAAG;AACpE,QAAI,MAAM,WAAW,MAAM,EAAG,QAAO;AAAA,EACvC;AACA,SAAO;AACT;;;AC3GO,SAAS,4BACd,OACA,cACkB;AAClB,QAAM,SAAS,CAAC,GAAmB,MACjC,EAAE,SAAS,KAAK,cAAc,EAAE,SAAS,IAAI;AAE/C,MAAI,CAAC,gBAAgB,aAAa,SAAS,GAAG;AAC5C,WAAO,CAAC,GAAG,KAAK,EAAE,KAAK,MAAM;AAAA,EAC/B;AAEA,QAAM,UAA4B,CAAC;AACnC,QAAM,MAAwB,CAAC;AAE/B,aAAW,KAAK,OAAO;AACrB,QAAI,aAAa,IAAI,EAAE,SAAS,IAAI,GAAG;AACrC,UAAI,KAAK,CAAC;AAAA,IACZ,OAAO;AACL,cAAQ,KAAK,CAAC;AAAA,IAChB;AAAA,EACF;AAEA,SAAO,CAAC,GAAG,QAAQ,KAAK,MAAM,GAAG,GAAG,IAAI,KAAK,MAAM,CAAC;AACtD;AASO,SAAS,+BACd,UACA,gBACQ;AACR,MAAI,SAAS,WAAW,EAAG,QAAO;AAClC,SAAO,kBAAkB,SAAS,UAAU,IACxC,SAAS,SAAS,IAClB,SAAS,SAAS;AACxB;","names":[]}
|