zidane 4.0.2 → 4.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +196 -614
- package/dist/agent-BoV5Twdl.d.ts +2347 -0
- package/dist/agent-BoV5Twdl.d.ts.map +1 -0
- package/dist/contexts-3Arvn7yR.js +321 -0
- package/dist/contexts-3Arvn7yR.js.map +1 -0
- package/dist/contexts.d.ts +2 -25
- package/dist/contexts.js +2 -10
- package/dist/errors-D1lhd6mX.js +118 -0
- package/dist/errors-D1lhd6mX.js.map +1 -0
- package/dist/index-28otmfLX.d.ts +400 -0
- package/dist/index-28otmfLX.d.ts.map +1 -0
- package/dist/index-BfSdALzk.d.ts +113 -0
- package/dist/index-BfSdALzk.d.ts.map +1 -0
- package/dist/index-DPsd0qwm.d.ts +254 -0
- package/dist/index-DPsd0qwm.d.ts.map +1 -0
- package/dist/index.d.ts +5 -95
- package/dist/index.js +141 -271
- package/dist/index.js.map +1 -0
- package/dist/interpolate-CukJwP2G.js +887 -0
- package/dist/interpolate-CukJwP2G.js.map +1 -0
- package/dist/mcp-8wClKY-3.js +771 -0
- package/dist/mcp-8wClKY-3.js.map +1 -0
- package/dist/mcp.d.ts +2 -4
- package/dist/mcp.js +2 -13
- package/dist/messages-z5Pq20p7.js +1020 -0
- package/dist/messages-z5Pq20p7.js.map +1 -0
- package/dist/presets-Cs7_CsMk.js +39 -0
- package/dist/presets-Cs7_CsMk.js.map +1 -0
- package/dist/presets.d.ts +2 -43
- package/dist/presets.js +2 -17
- package/dist/providers-CX-R-Oy-.js +969 -0
- package/dist/providers-CX-R-Oy-.js.map +1 -0
- package/dist/providers.d.ts +2 -4
- package/dist/providers.js +3 -23
- package/dist/session/sqlite.d.ts +7 -12
- package/dist/session/sqlite.d.ts.map +1 -0
- package/dist/session/sqlite.js +67 -79
- package/dist/session/sqlite.js.map +1 -0
- package/dist/session-Cn68UASv.js +440 -0
- package/dist/session-Cn68UASv.js.map +1 -0
- package/dist/session.d.ts +2 -4
- package/dist/session.js +3 -27
- package/dist/skills.d.ts +3 -322
- package/dist/skills.js +24 -47
- package/dist/skills.js.map +1 -0
- package/dist/stats-DoKUtF5T.js +58 -0
- package/dist/stats-DoKUtF5T.js.map +1 -0
- package/dist/tools-DpeWKzP1.js +3941 -0
- package/dist/tools-DpeWKzP1.js.map +1 -0
- package/dist/tools.d.ts +3 -95
- package/dist/tools.js +2 -40
- package/dist/tui.d.ts +533 -0
- package/dist/tui.d.ts.map +1 -0
- package/dist/tui.js +2004 -0
- package/dist/tui.js.map +1 -0
- package/dist/types-Bx_F8jet.js +39 -0
- package/dist/types-Bx_F8jet.js.map +1 -0
- package/dist/types.d.ts +4 -55
- package/dist/types.js +4 -28
- package/package.json +38 -4
- package/dist/agent-BAHrGtqu.d.ts +0 -2425
- package/dist/chunk-4ILGBQ23.js +0 -803
- package/dist/chunk-4LPBN547.js +0 -3540
- package/dist/chunk-64LLNY7F.js +0 -28
- package/dist/chunk-6STZTA4N.js +0 -830
- package/dist/chunk-7GQ7P6DM.js +0 -566
- package/dist/chunk-IC7FT4OD.js +0 -37
- package/dist/chunk-JCOB6IYO.js +0 -22
- package/dist/chunk-JH6IAAFA.js +0 -28
- package/dist/chunk-LNN5UTS2.js +0 -97
- package/dist/chunk-PMCQOMV4.js +0 -490
- package/dist/chunk-UD25QF3H.js +0 -304
- package/dist/chunk-W57VY6DJ.js +0 -834
- package/dist/sandbox-D7v6Wy62.d.ts +0 -28
- package/dist/skills-use-DwZrNmcw.d.ts +0 -80
- package/dist/types-Bai5rKpa.d.ts +0 -89
- package/dist/validation-Pm--dQEU.d.ts +0 -185
package/dist/chunk-W57VY6DJ.js
DELETED
|
@@ -1,834 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
assistantMessage,
|
|
3
|
-
fromAnthropic,
|
|
4
|
-
openaiCompat,
|
|
5
|
-
toAnthropic,
|
|
6
|
-
toolResultsMessage,
|
|
7
|
-
userMessage
|
|
8
|
-
} from "./chunk-4ILGBQ23.js";
|
|
9
|
-
import {
|
|
10
|
-
matchesContextExceeded
|
|
11
|
-
} from "./chunk-LNN5UTS2.js";
|
|
12
|
-
|
|
13
|
-
// src/providers/oauth.ts
|
|
14
|
-
import { existsSync, readFileSync, renameSync, writeFileSync } from "fs";
|
|
15
|
-
import { resolve } from "path";
|
|
16
|
-
import { getOAuthApiKey } from "@mariozechner/pi-ai/oauth";
|
|
17
|
-
function credentialsFilePath() {
|
|
18
|
-
return resolve(process.cwd(), ".credentials.json");
|
|
19
|
-
}
|
|
20
|
-
var CREDENTIALS_FILE_MODE = 384;
|
|
21
|
-
var refreshLocks = /* @__PURE__ */ new Map();
|
|
22
|
-
function readOAuthCredentials() {
|
|
23
|
-
const path = credentialsFilePath();
|
|
24
|
-
if (!existsSync(path))
|
|
25
|
-
return {};
|
|
26
|
-
try {
|
|
27
|
-
const raw = readFileSync(path, "utf-8");
|
|
28
|
-
const parsed = JSON.parse(raw);
|
|
29
|
-
if (!parsed || typeof parsed !== "object" || Array.isArray(parsed))
|
|
30
|
-
return {};
|
|
31
|
-
return parsed;
|
|
32
|
-
} catch {
|
|
33
|
-
return {};
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
function writeOAuthCredentials(credentials) {
|
|
37
|
-
const path = credentialsFilePath();
|
|
38
|
-
const tmp = `${path}.${process.pid}.${Date.now()}.tmp`;
|
|
39
|
-
writeFileSync(tmp, JSON.stringify(credentials, null, 2), { mode: CREDENTIALS_FILE_MODE });
|
|
40
|
-
renameSync(tmp, path);
|
|
41
|
-
}
|
|
42
|
-
function credentialsFromParams(params, extraKeys = []) {
|
|
43
|
-
if (typeof params?.access !== "string" || typeof params.refresh !== "string" || typeof params.expires !== "number")
|
|
44
|
-
return void 0;
|
|
45
|
-
const extras = Object.fromEntries(
|
|
46
|
-
extraKeys.map((key) => [key, params[key]]).filter(([, value]) => value !== void 0)
|
|
47
|
-
);
|
|
48
|
-
return {
|
|
49
|
-
access: params.access,
|
|
50
|
-
refresh: params.refresh,
|
|
51
|
-
expires: params.expires,
|
|
52
|
-
...extras
|
|
53
|
-
};
|
|
54
|
-
}
|
|
55
|
-
async function resolveOAuthApiKey(options, callbacks) {
|
|
56
|
-
if (typeof options.params?.apiKey === "string")
|
|
57
|
-
return options.params.apiKey;
|
|
58
|
-
const paramsCredentials = credentialsFromParams(options.params, options.extraCredentialKeys);
|
|
59
|
-
if (paramsCredentials) {
|
|
60
|
-
return await withRefreshLock(
|
|
61
|
-
`params:${options.providerId}`,
|
|
62
|
-
() => resolveCredentialSource("params", paramsCredentials)
|
|
63
|
-
);
|
|
64
|
-
}
|
|
65
|
-
if (typeof options.params?.access === "string")
|
|
66
|
-
return options.params.access;
|
|
67
|
-
if (options.envKey && process.env[options.envKey])
|
|
68
|
-
return process.env[options.envKey];
|
|
69
|
-
const readCredentials = options.readCredentials ?? readOAuthCredentials;
|
|
70
|
-
const writeCredentials = options.writeCredentials ?? writeOAuthCredentials;
|
|
71
|
-
return await withRefreshLock(`file:${options.providerId}`, async () => {
|
|
72
|
-
const allCredentials = readCredentials();
|
|
73
|
-
const storedCredentials = allCredentials[options.providerId];
|
|
74
|
-
if (!storedCredentials)
|
|
75
|
-
throw new Error(options.missingError);
|
|
76
|
-
return await resolveCredentialSource("file", storedCredentials, allCredentials, writeCredentials);
|
|
77
|
-
});
|
|
78
|
-
async function resolveCredentialSource(source, current, allCredentials, persistCredentials) {
|
|
79
|
-
try {
|
|
80
|
-
const refreshOAuthApiKey = options.getOAuthApiKey ?? getOAuthApiKey;
|
|
81
|
-
const result = await refreshOAuthApiKey(options.providerId, { [options.providerId]: current });
|
|
82
|
-
if (!result)
|
|
83
|
-
throw new Error(options.missingError);
|
|
84
|
-
if (result.newCredentials !== current) {
|
|
85
|
-
if (source === "file" && allCredentials && persistCredentials) {
|
|
86
|
-
allCredentials[options.providerId] = result.newCredentials;
|
|
87
|
-
persistCredentials(allCredentials);
|
|
88
|
-
}
|
|
89
|
-
await callbacks?.onOAuthRefresh?.({
|
|
90
|
-
provider: options.provider,
|
|
91
|
-
providerId: options.providerId,
|
|
92
|
-
source,
|
|
93
|
-
previousCredentials: { ...current },
|
|
94
|
-
credentials: { ...result.newCredentials }
|
|
95
|
-
});
|
|
96
|
-
}
|
|
97
|
-
return result.apiKey;
|
|
98
|
-
} catch (err) {
|
|
99
|
-
const reason = err instanceof Error ? err.message : String(err);
|
|
100
|
-
throw new Error(options.refreshError(reason));
|
|
101
|
-
}
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
async function withRefreshLock(key, fn) {
|
|
105
|
-
const existing = refreshLocks.get(key);
|
|
106
|
-
if (existing)
|
|
107
|
-
return existing;
|
|
108
|
-
const task = (async () => {
|
|
109
|
-
try {
|
|
110
|
-
return await fn();
|
|
111
|
-
} finally {
|
|
112
|
-
refreshLocks.delete(key);
|
|
113
|
-
}
|
|
114
|
-
})();
|
|
115
|
-
refreshLocks.set(key, task);
|
|
116
|
-
return task;
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
// src/providers/anthropic.ts
|
|
120
|
-
var _sdkCtor = null;
|
|
121
|
-
async function loadAnthropicSdk() {
|
|
122
|
-
if (_sdkCtor)
|
|
123
|
-
return _sdkCtor;
|
|
124
|
-
try {
|
|
125
|
-
const mod = await import("@anthropic-ai/sdk");
|
|
126
|
-
_sdkCtor = mod.default;
|
|
127
|
-
return _sdkCtor;
|
|
128
|
-
} catch (err) {
|
|
129
|
-
throw new Error(
|
|
130
|
-
"The `anthropic` provider requires the `@anthropic-ai/sdk` package, which is an optional peer dependency. Install it with your package manager (e.g. `bun add @anthropic-ai/sdk`).",
|
|
131
|
-
err instanceof Error ? { cause: err } : void 0
|
|
132
|
-
);
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
var OAUTH_DEFAULT_BETAS = ["claude-code-20250219", "oauth-2025-04-20"];
|
|
136
|
-
function resolveAnthropicBetas(isOAuth, extraBetas) {
|
|
137
|
-
const seen = /* @__PURE__ */ new Set();
|
|
138
|
-
const out = [];
|
|
139
|
-
if (isOAuth) {
|
|
140
|
-
for (const b of OAUTH_DEFAULT_BETAS) {
|
|
141
|
-
if (!seen.has(b)) {
|
|
142
|
-
seen.add(b);
|
|
143
|
-
out.push(b);
|
|
144
|
-
}
|
|
145
|
-
}
|
|
146
|
-
}
|
|
147
|
-
if (extraBetas) {
|
|
148
|
-
for (const b of extraBetas) {
|
|
149
|
-
if (typeof b === "string" && b.length > 0 && !seen.has(b)) {
|
|
150
|
-
seen.add(b);
|
|
151
|
-
out.push(b);
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
}
|
|
155
|
-
return out.length > 0 ? out.join(",") : void 0;
|
|
156
|
-
}
|
|
157
|
-
function getConfiguredApiKey(anthropicParams) {
|
|
158
|
-
if (anthropicParams?.apiKey)
|
|
159
|
-
return anthropicParams.apiKey;
|
|
160
|
-
if (anthropicParams?.access)
|
|
161
|
-
return anthropicParams.access;
|
|
162
|
-
if (process.env.ANTHROPIC_API_KEY)
|
|
163
|
-
return process.env.ANTHROPIC_API_KEY;
|
|
164
|
-
const access = readOAuthCredentials().anthropic?.access;
|
|
165
|
-
if (typeof access === "string" && access.length > 0)
|
|
166
|
-
return access;
|
|
167
|
-
throw new Error("No API key found. Run `bun run auth` first.");
|
|
168
|
-
}
|
|
169
|
-
function createClient(SDK, apiKey, isOAuth, baseURL, extraBetas) {
|
|
170
|
-
const base = baseURL ? { baseURL } : {};
|
|
171
|
-
const betaHeader = resolveAnthropicBetas(isOAuth, extraBetas);
|
|
172
|
-
if (isOAuth) {
|
|
173
|
-
const defaultHeaders2 = {
|
|
174
|
-
"anthropic-dangerous-direct-browser-access": "true",
|
|
175
|
-
"user-agent": "zidane/2.0.0",
|
|
176
|
-
"x-app": "cli"
|
|
177
|
-
};
|
|
178
|
-
if (betaHeader)
|
|
179
|
-
defaultHeaders2["anthropic-beta"] = betaHeader;
|
|
180
|
-
return new SDK({
|
|
181
|
-
apiKey: null,
|
|
182
|
-
authToken: apiKey,
|
|
183
|
-
dangerouslyAllowBrowser: true,
|
|
184
|
-
defaultHeaders: defaultHeaders2,
|
|
185
|
-
...base
|
|
186
|
-
});
|
|
187
|
-
}
|
|
188
|
-
const defaultHeaders = betaHeader ? { "anthropic-beta": betaHeader } : void 0;
|
|
189
|
-
return new SDK({
|
|
190
|
-
apiKey,
|
|
191
|
-
...defaultHeaders ? { defaultHeaders } : {},
|
|
192
|
-
...base
|
|
193
|
-
});
|
|
194
|
-
}
|
|
195
|
-
var EFFORT_FOR_LEVEL = {
|
|
196
|
-
minimal: "low",
|
|
197
|
-
low: "low",
|
|
198
|
-
medium: "medium",
|
|
199
|
-
high: "high"
|
|
200
|
-
};
|
|
201
|
-
function planAnthropicThinking(level, customBudget) {
|
|
202
|
-
if (level === "off")
|
|
203
|
-
return null;
|
|
204
|
-
if (level === "adaptive") {
|
|
205
|
-
if (typeof customBudget === "number" && customBudget > 0)
|
|
206
|
-
return { kind: "adaptive", maxTokensCap: customBudget };
|
|
207
|
-
return { kind: "adaptive" };
|
|
208
|
-
}
|
|
209
|
-
if (customBudget !== void 0) {
|
|
210
|
-
return { kind: "enabled", budgetTokens: customBudget, maxTokensBump: customBudget };
|
|
211
|
-
}
|
|
212
|
-
return { kind: "adaptive", effort: EFFORT_FOR_LEVEL[level] };
|
|
213
|
-
}
|
|
214
|
-
function mapStopReason(stopReason) {
|
|
215
|
-
if (!stopReason)
|
|
216
|
-
return void 0;
|
|
217
|
-
switch (stopReason) {
|
|
218
|
-
case "end_turn":
|
|
219
|
-
case "stop_sequence":
|
|
220
|
-
return "stop";
|
|
221
|
-
case "tool_use":
|
|
222
|
-
return "tool-calls";
|
|
223
|
-
case "max_tokens":
|
|
224
|
-
case "model_context_window_exceeded":
|
|
225
|
-
return "length";
|
|
226
|
-
case "refusal":
|
|
227
|
-
return "content-filter";
|
|
228
|
-
// 4.6+: server-side mid-turn pause for long thinking. The loop
|
|
229
|
-
// continues with a synthetic continue message rather than terminating.
|
|
230
|
-
case "pause_turn":
|
|
231
|
-
return "pause";
|
|
232
|
-
default:
|
|
233
|
-
return "other";
|
|
234
|
-
}
|
|
235
|
-
}
|
|
236
|
-
var EPHEMERAL = { type: "ephemeral" };
|
|
237
|
-
function applyAnthropicCacheBreakpoints(params) {
|
|
238
|
-
if (typeof params.system === "string") {
|
|
239
|
-
if (params.system.length > 0) {
|
|
240
|
-
params.system = [{ type: "text", text: params.system, cache_control: EPHEMERAL }];
|
|
241
|
-
}
|
|
242
|
-
} else if (Array.isArray(params.system) && params.system.length > 0) {
|
|
243
|
-
const lastIdx = params.system.length - 1;
|
|
244
|
-
params.system = params.system.map(
|
|
245
|
-
(block, i) => i === lastIdx ? { ...block, cache_control: EPHEMERAL } : block
|
|
246
|
-
);
|
|
247
|
-
}
|
|
248
|
-
if (params.tools && params.tools.length > 0) {
|
|
249
|
-
const lastIdx = params.tools.length - 1;
|
|
250
|
-
params.tools = params.tools.map(
|
|
251
|
-
(tool, i) => i === lastIdx ? { ...tool, cache_control: EPHEMERAL } : tool
|
|
252
|
-
);
|
|
253
|
-
}
|
|
254
|
-
if (params.messages.length === 0)
|
|
255
|
-
return;
|
|
256
|
-
const lastMsgIdx = params.messages.length - 1;
|
|
257
|
-
const lastMsg = params.messages[lastMsgIdx];
|
|
258
|
-
if (typeof lastMsg.content === "string") {
|
|
259
|
-
if (lastMsg.content.length === 0)
|
|
260
|
-
return;
|
|
261
|
-
params.messages[lastMsgIdx] = {
|
|
262
|
-
...lastMsg,
|
|
263
|
-
content: [{ type: "text", text: lastMsg.content, cache_control: EPHEMERAL }]
|
|
264
|
-
};
|
|
265
|
-
return;
|
|
266
|
-
}
|
|
267
|
-
if (!Array.isArray(lastMsg.content) || lastMsg.content.length === 0)
|
|
268
|
-
return;
|
|
269
|
-
const blocks = lastMsg.content;
|
|
270
|
-
let targetIdx = blocks.length - 1;
|
|
271
|
-
while (targetIdx >= 0 && isThinkingBlock(blocks[targetIdx]))
|
|
272
|
-
targetIdx -= 1;
|
|
273
|
-
if (targetIdx < 0)
|
|
274
|
-
return;
|
|
275
|
-
const nextBlocks = blocks.slice();
|
|
276
|
-
nextBlocks[targetIdx] = { ...nextBlocks[targetIdx], cache_control: EPHEMERAL };
|
|
277
|
-
params.messages[lastMsgIdx] = { ...lastMsg, content: nextBlocks };
|
|
278
|
-
}
|
|
279
|
-
function isThinkingBlock(block) {
|
|
280
|
-
return block.type === "thinking" || block.type === "redacted_thinking";
|
|
281
|
-
}
|
|
282
|
-
function looksLikeAnthropicApiError(err) {
|
|
283
|
-
if (!err || typeof err !== "object")
|
|
284
|
-
return false;
|
|
285
|
-
const e = err;
|
|
286
|
-
return typeof e.status === "number" && "error" in e;
|
|
287
|
-
}
|
|
288
|
-
function classifyAnthropicError(err) {
|
|
289
|
-
if (!err || typeof err !== "object")
|
|
290
|
-
return null;
|
|
291
|
-
const anyErr = err;
|
|
292
|
-
if (anyErr.name === "AbortError")
|
|
293
|
-
return { kind: "aborted" };
|
|
294
|
-
if (!looksLikeAnthropicApiError(err))
|
|
295
|
-
return null;
|
|
296
|
-
const innerType = anyErr.error?.error?.type;
|
|
297
|
-
const outerType = anyErr.error?.type;
|
|
298
|
-
const nativeType = innerType && innerType !== "error" ? innerType : outerType;
|
|
299
|
-
const message = anyErr.error?.error?.message ?? anyErr.error?.message ?? anyErr.message ?? "";
|
|
300
|
-
if (matchesContextExceeded(message)) {
|
|
301
|
-
return {
|
|
302
|
-
kind: "context_exceeded",
|
|
303
|
-
providerCode: nativeType ?? "invalid_request_error",
|
|
304
|
-
message
|
|
305
|
-
};
|
|
306
|
-
}
|
|
307
|
-
const status = anyErr.status;
|
|
308
|
-
const retryable = typeof status === "number" ? status === 429 || status >= 500 && status !== 501 : void 0;
|
|
309
|
-
return {
|
|
310
|
-
kind: "provider_error",
|
|
311
|
-
providerCode: nativeType ?? (status ? String(status) : void 0),
|
|
312
|
-
message,
|
|
313
|
-
...retryable !== void 0 ? { retryable } : {}
|
|
314
|
-
};
|
|
315
|
-
}
|
|
316
|
-
function anthropicPromptMessage(parts) {
|
|
317
|
-
const content = [];
|
|
318
|
-
for (const part of parts) {
|
|
319
|
-
if (part.type === "text") {
|
|
320
|
-
if (part.text.length > 0)
|
|
321
|
-
content.push({ type: "text", text: part.text });
|
|
322
|
-
continue;
|
|
323
|
-
}
|
|
324
|
-
if (part.type === "image") {
|
|
325
|
-
content.push({ type: "image", mediaType: part.mediaType, data: part.data });
|
|
326
|
-
continue;
|
|
327
|
-
}
|
|
328
|
-
if (part.encoding === "text") {
|
|
329
|
-
const header2 = part.name ? `<attachment name="${part.name}" media_type="${part.mediaType}">` : `<attachment media_type="${part.mediaType}">`;
|
|
330
|
-
content.push({ type: "text", text: `${header2}
|
|
331
|
-
${part.data}
|
|
332
|
-
</attachment>` });
|
|
333
|
-
continue;
|
|
334
|
-
}
|
|
335
|
-
const header = part.name ? `<attachment name="${part.name}" media_type="${part.mediaType}" encoding="base64">` : `<attachment media_type="${part.mediaType}" encoding="base64">`;
|
|
336
|
-
content.push({ type: "text", text: `${header}
|
|
337
|
-
${part.data}
|
|
338
|
-
</attachment>` });
|
|
339
|
-
}
|
|
340
|
-
return { role: "user", content };
|
|
341
|
-
}
|
|
342
|
-
function anthropic(anthropicParams) {
|
|
343
|
-
const configuredApiKey = getConfiguredApiKey(anthropicParams);
|
|
344
|
-
const isOAuth = configuredApiKey.includes("sk-ant-oat");
|
|
345
|
-
const defaultModel = anthropicParams?.defaultModel || "claude-opus-4-6";
|
|
346
|
-
let runtimeCredentials = typeof anthropicParams?.access === "string" && typeof anthropicParams.refresh === "string" && typeof anthropicParams.expires === "number" ? {
|
|
347
|
-
access: anthropicParams.access,
|
|
348
|
-
refresh: anthropicParams.refresh,
|
|
349
|
-
expires: anthropicParams.expires
|
|
350
|
-
} : void 0;
|
|
351
|
-
return {
|
|
352
|
-
name: "anthropic",
|
|
353
|
-
meta: {
|
|
354
|
-
defaultModel,
|
|
355
|
-
isOAuth,
|
|
356
|
-
capabilities: {
|
|
357
|
-
vision: true,
|
|
358
|
-
imageInToolResult: true
|
|
359
|
-
}
|
|
360
|
-
},
|
|
361
|
-
formatTools(tools) {
|
|
362
|
-
return tools.map((t) => ({
|
|
363
|
-
name: t.name,
|
|
364
|
-
description: t.description,
|
|
365
|
-
input_schema: t.inputSchema
|
|
366
|
-
}));
|
|
367
|
-
},
|
|
368
|
-
userMessage(content) {
|
|
369
|
-
return { role: "user", content: [{ type: "text", text: content }] };
|
|
370
|
-
},
|
|
371
|
-
assistantMessage(content) {
|
|
372
|
-
return { role: "assistant", content: [{ type: "text", text: content }] };
|
|
373
|
-
},
|
|
374
|
-
toolResultsMessage(results) {
|
|
375
|
-
return {
|
|
376
|
-
role: "user",
|
|
377
|
-
content: results.map((r) => ({
|
|
378
|
-
type: "tool_result",
|
|
379
|
-
callId: r.id,
|
|
380
|
-
output: r.content
|
|
381
|
-
}))
|
|
382
|
-
};
|
|
383
|
-
},
|
|
384
|
-
promptMessage: anthropicPromptMessage,
|
|
385
|
-
classifyError: classifyAnthropicError,
|
|
386
|
-
async stream(options, callbacks) {
|
|
387
|
-
const SDK = await loadAnthropicSdk();
|
|
388
|
-
const apiKey = await resolveOAuthApiKey(
|
|
389
|
-
{
|
|
390
|
-
provider: "anthropic",
|
|
391
|
-
providerId: "anthropic",
|
|
392
|
-
params: runtimeCredentials ? { ...anthropicParams, ...runtimeCredentials } : anthropicParams,
|
|
393
|
-
envKey: "ANTHROPIC_API_KEY",
|
|
394
|
-
missingError: "No API key found. Run `bun run auth` first.",
|
|
395
|
-
refreshError: (reason) => `Anthropic OAuth token refresh failed. Run \`bun run auth --anthropic\` again. ${reason}`
|
|
396
|
-
},
|
|
397
|
-
{
|
|
398
|
-
...callbacks,
|
|
399
|
-
async onOAuthRefresh(ctx) {
|
|
400
|
-
if (ctx.source === "params") {
|
|
401
|
-
runtimeCredentials = {
|
|
402
|
-
access: ctx.credentials.access,
|
|
403
|
-
refresh: ctx.credentials.refresh,
|
|
404
|
-
expires: ctx.credentials.expires
|
|
405
|
-
};
|
|
406
|
-
}
|
|
407
|
-
await callbacks.onOAuthRefresh?.(ctx);
|
|
408
|
-
}
|
|
409
|
-
}
|
|
410
|
-
);
|
|
411
|
-
const client = createClient(
|
|
412
|
-
SDK,
|
|
413
|
-
apiKey,
|
|
414
|
-
apiKey.includes("sk-ant-oat"),
|
|
415
|
-
anthropicParams?.baseURL,
|
|
416
|
-
anthropicParams?.extraBetas
|
|
417
|
-
);
|
|
418
|
-
const system = isOAuth ? `You are Claude Code, Anthropic's official CLI for Claude.` : options.system;
|
|
419
|
-
const messages = isOAuth && options.system ? [
|
|
420
|
-
{ role: "user", content: [{ type: "text", text: options.system }] },
|
|
421
|
-
{ role: "assistant", content: [{ type: "text", text: "Understood. I will proceed with these instructions above the rest of my system prompt." }] },
|
|
422
|
-
...options.messages
|
|
423
|
-
] : [...options.messages];
|
|
424
|
-
const thinking = options.thinking ?? "off";
|
|
425
|
-
const modelId = options.model;
|
|
426
|
-
const params = {
|
|
427
|
-
// Forward-compat escape hatch for un-typed beta fields. Spread first so
|
|
428
|
-
// the typed core (model / max_tokens / system / tools / messages /
|
|
429
|
-
// stream) and the explicit `context_management` below override on
|
|
430
|
-
// collision — explicit always wins.
|
|
431
|
-
...anthropicParams?.extraBodyParams ?? {},
|
|
432
|
-
model: modelId,
|
|
433
|
-
max_tokens: options.maxTokens,
|
|
434
|
-
system,
|
|
435
|
-
tools: options.tools,
|
|
436
|
-
messages: messages.map((m) => toAnthropic(m)),
|
|
437
|
-
stream: true
|
|
438
|
-
};
|
|
439
|
-
if (anthropicParams?.contextManagement) {
|
|
440
|
-
;
|
|
441
|
-
params.context_management = anthropicParams.contextManagement;
|
|
442
|
-
}
|
|
443
|
-
if (options.cache !== false)
|
|
444
|
-
applyAnthropicCacheBreakpoints(params);
|
|
445
|
-
const plan = planAnthropicThinking(thinking, options.thinkingBudget);
|
|
446
|
-
if (plan) {
|
|
447
|
-
if (plan.kind === "enabled") {
|
|
448
|
-
params.thinking = { type: "enabled", budget_tokens: plan.budgetTokens };
|
|
449
|
-
params.max_tokens = plan.maxTokensBump + params.max_tokens;
|
|
450
|
-
} else {
|
|
451
|
-
params.thinking = { type: "adaptive" };
|
|
452
|
-
if (plan.effort)
|
|
453
|
-
params.output_config = { effort: plan.effort };
|
|
454
|
-
if (typeof plan.maxTokensCap === "number" && plan.maxTokensCap > 0)
|
|
455
|
-
params.max_tokens = Math.min(params.max_tokens, plan.maxTokensCap);
|
|
456
|
-
}
|
|
457
|
-
params.temperature = 1;
|
|
458
|
-
}
|
|
459
|
-
if (options.toolChoice) {
|
|
460
|
-
if (options.toolChoice.type === "tool" && options.toolChoice.name)
|
|
461
|
-
params.tool_choice = { type: "tool", name: options.toolChoice.name };
|
|
462
|
-
else if (options.toolChoice.type === "required")
|
|
463
|
-
params.tool_choice = { type: "any" };
|
|
464
|
-
else
|
|
465
|
-
params.tool_choice = { type: "auto" };
|
|
466
|
-
}
|
|
467
|
-
const s = client.messages.stream(params, {
|
|
468
|
-
signal: options.signal
|
|
469
|
-
});
|
|
470
|
-
let text = "";
|
|
471
|
-
s.on("text", (delta) => {
|
|
472
|
-
text += delta;
|
|
473
|
-
callbacks.onText(delta);
|
|
474
|
-
});
|
|
475
|
-
if (callbacks.onThinking) {
|
|
476
|
-
s.on("thinking", (delta) => {
|
|
477
|
-
callbacks.onThinking(delta);
|
|
478
|
-
});
|
|
479
|
-
}
|
|
480
|
-
const response = await s.finalMessage();
|
|
481
|
-
const toolCalls = response.content.filter((b) => b.type === "tool_use").map((b) => ({ id: b.id, name: b.name, input: b.input }));
|
|
482
|
-
const finishReason = mapStopReason(response.stop_reason);
|
|
483
|
-
const isPause = response.stop_reason === "pause_turn";
|
|
484
|
-
return {
|
|
485
|
-
assistantMessage: fromAnthropic({ role: "assistant", content: response.content }),
|
|
486
|
-
text,
|
|
487
|
-
toolCalls,
|
|
488
|
-
done: !isPause && (response.stop_reason === "end_turn" || toolCalls.length === 0),
|
|
489
|
-
usage: {
|
|
490
|
-
input: response.usage.input_tokens,
|
|
491
|
-
output: response.usage.output_tokens,
|
|
492
|
-
cacheCreation: response.usage.cache_creation_input_tokens ?? void 0,
|
|
493
|
-
cacheRead: response.usage.cache_read_input_tokens ?? void 0,
|
|
494
|
-
...finishReason ? { finishReason } : {},
|
|
495
|
-
modelId: response.model ?? options.model
|
|
496
|
-
}
|
|
497
|
-
};
|
|
498
|
-
}
|
|
499
|
-
};
|
|
500
|
-
}
|
|
501
|
-
|
|
502
|
-
// src/providers/cerebras.ts
|
|
503
|
-
var BASE_URL = "https://api.cerebras.ai/v1";
|
|
504
|
-
function getApiKey(params) {
|
|
505
|
-
if (typeof params?.apiKey === "string" && params.apiKey.length > 0)
|
|
506
|
-
return params.apiKey;
|
|
507
|
-
if (process.env.CEREBRAS_API_KEY)
|
|
508
|
-
return process.env.CEREBRAS_API_KEY;
|
|
509
|
-
throw new Error("No Cerebras API key found. Pass `apiKey` or set CEREBRAS_API_KEY in your environment.");
|
|
510
|
-
}
|
|
511
|
-
function cerebras(params) {
|
|
512
|
-
const apiKey = getApiKey(params);
|
|
513
|
-
return openaiCompat({
|
|
514
|
-
name: "cerebras",
|
|
515
|
-
apiKey,
|
|
516
|
-
baseURL: BASE_URL,
|
|
517
|
-
defaultModel: params?.defaultModel || "zai-glm-4.7",
|
|
518
|
-
capabilities: params?.capabilities ?? { vision: false, imageInToolResult: false }
|
|
519
|
-
});
|
|
520
|
-
}
|
|
521
|
-
|
|
522
|
-
// src/providers/openai.ts
|
|
523
|
-
import { getModel } from "@mariozechner/pi-ai";
|
|
524
|
-
import { streamOpenAICodexResponses } from "@mariozechner/pi-ai/openai-codex-responses";
|
|
525
|
-
var PROVIDER_ID = "openai-codex";
|
|
526
|
-
var DEFAULT_MODEL = "gpt-5.4";
|
|
527
|
-
function resolveModel(modelId) {
|
|
528
|
-
const model = getModel(PROVIDER_ID, modelId);
|
|
529
|
-
if (model)
|
|
530
|
-
return model;
|
|
531
|
-
const fallback = getModel(PROVIDER_ID, DEFAULT_MODEL);
|
|
532
|
-
if (!fallback)
|
|
533
|
-
throw new Error(`OpenAI Codex model registry is missing the default model: ${DEFAULT_MODEL}`);
|
|
534
|
-
return { ...fallback, id: modelId, name: modelId };
|
|
535
|
-
}
|
|
536
|
-
function emptyUsage() {
|
|
537
|
-
return {
|
|
538
|
-
input: 0,
|
|
539
|
-
output: 0,
|
|
540
|
-
cacheRead: 0,
|
|
541
|
-
cacheWrite: 0,
|
|
542
|
-
totalTokens: 0,
|
|
543
|
-
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 }
|
|
544
|
-
};
|
|
545
|
-
}
|
|
546
|
-
function formatTools(tools) {
|
|
547
|
-
return tools.map((t) => ({
|
|
548
|
-
name: t.name,
|
|
549
|
-
description: t.description,
|
|
550
|
-
parameters: t.inputSchema
|
|
551
|
-
}));
|
|
552
|
-
}
|
|
553
|
-
function toPiMessages(messages, modelId) {
|
|
554
|
-
const out = [];
|
|
555
|
-
for (const msg of messages) {
|
|
556
|
-
const toolResults = msg.content.filter((b) => b.type === "tool_result");
|
|
557
|
-
if (toolResults.length > 0) {
|
|
558
|
-
for (const result of toolResults) {
|
|
559
|
-
const content2 = typeof result.output === "string" ? [{ type: "text", text: result.output }] : result.output.map((block) => block.type === "image" ? { type: "image", data: block.data, mimeType: block.mediaType } : { type: "text", text: block.text });
|
|
560
|
-
out.push({
|
|
561
|
-
role: "toolResult",
|
|
562
|
-
toolCallId: result.callId,
|
|
563
|
-
toolName: "",
|
|
564
|
-
content: content2,
|
|
565
|
-
isError: result.isError ?? false,
|
|
566
|
-
timestamp: Date.now()
|
|
567
|
-
});
|
|
568
|
-
}
|
|
569
|
-
continue;
|
|
570
|
-
}
|
|
571
|
-
const textBlocks = msg.content.filter((b) => b.type === "text");
|
|
572
|
-
const imageBlocks = msg.content.filter((b) => b.type === "image");
|
|
573
|
-
if (msg.role === "user") {
|
|
574
|
-
if (imageBlocks.length === 0 && textBlocks.length === 1) {
|
|
575
|
-
out.push({ role: "user", content: textBlocks[0].text, timestamp: Date.now() });
|
|
576
|
-
continue;
|
|
577
|
-
}
|
|
578
|
-
out.push({
|
|
579
|
-
role: "user",
|
|
580
|
-
content: [
|
|
581
|
-
...imageBlocks.map((img) => ({ type: "image", data: img.data, mimeType: img.mediaType })),
|
|
582
|
-
...textBlocks.map((block) => ({ type: "text", text: block.text }))
|
|
583
|
-
],
|
|
584
|
-
timestamp: Date.now()
|
|
585
|
-
});
|
|
586
|
-
continue;
|
|
587
|
-
}
|
|
588
|
-
const content = [];
|
|
589
|
-
for (const block of msg.content) {
|
|
590
|
-
if (block.type === "text") {
|
|
591
|
-
content.push({ type: "text", text: block.text });
|
|
592
|
-
} else if (block.type === "thinking") {
|
|
593
|
-
if (block.signatureProducer === "anthropic")
|
|
594
|
-
continue;
|
|
595
|
-
content.push({ type: "thinking", thinking: block.text, thinkingSignature: block.signature });
|
|
596
|
-
} else if (block.type === "tool_call") {
|
|
597
|
-
content.push({ type: "toolCall", id: block.id, name: block.name, arguments: block.input });
|
|
598
|
-
}
|
|
599
|
-
}
|
|
600
|
-
out.push({
|
|
601
|
-
role: "assistant",
|
|
602
|
-
content,
|
|
603
|
-
api: "openai-codex-responses",
|
|
604
|
-
provider: PROVIDER_ID,
|
|
605
|
-
model: modelId,
|
|
606
|
-
usage: emptyUsage(),
|
|
607
|
-
stopReason: "stop",
|
|
608
|
-
timestamp: Date.now()
|
|
609
|
-
});
|
|
610
|
-
}
|
|
611
|
-
return out;
|
|
612
|
-
}
|
|
613
|
-
function fromPiAssistantMessage(message) {
|
|
614
|
-
const content = [];
|
|
615
|
-
for (const block of message.content) {
|
|
616
|
-
if (block.type === "text") {
|
|
617
|
-
content.push({ type: "text", text: block.text });
|
|
618
|
-
} else if (block.type === "thinking") {
|
|
619
|
-
const out = {
|
|
620
|
-
type: "thinking",
|
|
621
|
-
text: block.thinking
|
|
622
|
-
};
|
|
623
|
-
if (typeof block.thinkingSignature === "string") {
|
|
624
|
-
out.signature = block.thinkingSignature;
|
|
625
|
-
out.signatureProducer = "openai";
|
|
626
|
-
}
|
|
627
|
-
content.push(out);
|
|
628
|
-
} else if (block.type === "toolCall") {
|
|
629
|
-
content.push({ type: "tool_call", id: block.id, name: block.name, input: block.arguments });
|
|
630
|
-
}
|
|
631
|
-
}
|
|
632
|
-
return { role: "assistant", content };
|
|
633
|
-
}
|
|
634
|
-
function extractToolCalls(message) {
|
|
635
|
-
return message.content.filter((block) => block.type === "toolCall").map((block) => ({
|
|
636
|
-
id: block.id,
|
|
637
|
-
name: block.name,
|
|
638
|
-
input: block.arguments
|
|
639
|
-
}));
|
|
640
|
-
}
|
|
641
|
-
function extractText(message) {
|
|
642
|
-
return message.content.filter((block) => block.type === "text").map((block) => block.text).join("");
|
|
643
|
-
}
|
|
644
|
-
function toTurnUsage(usage, finishReason, modelId) {
|
|
645
|
-
return {
|
|
646
|
-
input: usage.input,
|
|
647
|
-
output: usage.output,
|
|
648
|
-
cacheRead: usage.cacheRead || void 0,
|
|
649
|
-
cacheCreation: usage.cacheWrite || void 0,
|
|
650
|
-
cost: usage.cost.total || void 0,
|
|
651
|
-
...finishReason ? { finishReason } : {},
|
|
652
|
-
modelId
|
|
653
|
-
};
|
|
654
|
-
}
|
|
655
|
-
function classifyOpenAIError(err) {
|
|
656
|
-
if (!err || typeof err !== "object")
|
|
657
|
-
return null;
|
|
658
|
-
const anyErr = err;
|
|
659
|
-
if (anyErr.name === "AbortError")
|
|
660
|
-
return { kind: "aborted" };
|
|
661
|
-
const message = anyErr.message ?? "";
|
|
662
|
-
const code = anyErr.code ?? anyErr.type;
|
|
663
|
-
if (code === "context_length_exceeded" || matchesContextExceeded(message)) {
|
|
664
|
-
return {
|
|
665
|
-
kind: "context_exceeded",
|
|
666
|
-
providerCode: code ?? "context_length_exceeded",
|
|
667
|
-
message
|
|
668
|
-
};
|
|
669
|
-
}
|
|
670
|
-
if (message.length > 0) {
|
|
671
|
-
return {
|
|
672
|
-
kind: "provider_error",
|
|
673
|
-
providerCode: code,
|
|
674
|
-
message
|
|
675
|
-
};
|
|
676
|
-
}
|
|
677
|
-
return null;
|
|
678
|
-
}
|
|
679
|
-
function applyPayloadOverrides(payload, options) {
|
|
680
|
-
const body = payload;
|
|
681
|
-
if (options.toolChoice) {
|
|
682
|
-
if (options.toolChoice.type === "tool" && options.toolChoice.name)
|
|
683
|
-
body.tool_choice = { type: "function", name: options.toolChoice.name };
|
|
684
|
-
else if (options.toolChoice.type === "required")
|
|
685
|
-
body.tool_choice = "required";
|
|
686
|
-
else
|
|
687
|
-
body.tool_choice = "auto";
|
|
688
|
-
}
|
|
689
|
-
return body;
|
|
690
|
-
}
|
|
691
|
-
function openai(params) {
|
|
692
|
-
const defaultModel = params?.defaultModel || DEFAULT_MODEL;
|
|
693
|
-
let runtimeCredentials = typeof params?.access === "string" && typeof params.refresh === "string" && typeof params.expires === "number" ? {
|
|
694
|
-
access: params.access,
|
|
695
|
-
refresh: params.refresh,
|
|
696
|
-
expires: params.expires,
|
|
697
|
-
...params.accountId ? { accountId: params.accountId } : {}
|
|
698
|
-
} : void 0;
|
|
699
|
-
return {
|
|
700
|
-
name: "openai",
|
|
701
|
-
meta: {
|
|
702
|
-
defaultModel,
|
|
703
|
-
isOAuth: true,
|
|
704
|
-
capabilities: {
|
|
705
|
-
vision: true,
|
|
706
|
-
imageInToolResult: true
|
|
707
|
-
}
|
|
708
|
-
},
|
|
709
|
-
formatTools,
|
|
710
|
-
userMessage,
|
|
711
|
-
assistantMessage,
|
|
712
|
-
toolResultsMessage,
|
|
713
|
-
classifyError: classifyOpenAIError,
|
|
714
|
-
async stream(options, callbacks) {
|
|
715
|
-
const modelId = options.model || defaultModel;
|
|
716
|
-
const model = resolveModel(modelId);
|
|
717
|
-
const apiKey = await resolveOAuthApiKey(
|
|
718
|
-
{
|
|
719
|
-
provider: "openai",
|
|
720
|
-
providerId: PROVIDER_ID,
|
|
721
|
-
params: runtimeCredentials ? { ...params, ...runtimeCredentials } : params,
|
|
722
|
-
envKey: "OPENAI_CODEX_API_KEY",
|
|
723
|
-
extraCredentialKeys: ["accountId"],
|
|
724
|
-
missingError: "No OpenAI Codex OAuth token found. Run `bun run auth --openai` first.",
|
|
725
|
-
refreshError: (reason) => `OpenAI Codex OAuth token refresh failed. Run \`bun run auth --openai\` again. ${reason}`
|
|
726
|
-
},
|
|
727
|
-
{
|
|
728
|
-
...callbacks,
|
|
729
|
-
async onOAuthRefresh(ctx) {
|
|
730
|
-
if (ctx.source === "params") {
|
|
731
|
-
runtimeCredentials = {
|
|
732
|
-
access: ctx.credentials.access,
|
|
733
|
-
refresh: ctx.credentials.refresh,
|
|
734
|
-
expires: ctx.credentials.expires,
|
|
735
|
-
...typeof ctx.credentials.accountId === "string" ? { accountId: ctx.credentials.accountId } : {}
|
|
736
|
-
};
|
|
737
|
-
}
|
|
738
|
-
await callbacks.onOAuthRefresh?.(ctx);
|
|
739
|
-
}
|
|
740
|
-
}
|
|
741
|
-
);
|
|
742
|
-
const context = {
|
|
743
|
-
systemPrompt: options.system,
|
|
744
|
-
messages: toPiMessages(options.messages, modelId),
|
|
745
|
-
tools: options.tools
|
|
746
|
-
};
|
|
747
|
-
const reasoningLevel = options.thinking && options.thinking !== "off" && options.thinking !== "adaptive" ? options.thinking : void 0;
|
|
748
|
-
const stream = streamOpenAICodexResponses(model, context, {
|
|
749
|
-
apiKey,
|
|
750
|
-
maxTokens: options.maxTokens,
|
|
751
|
-
signal: options.signal,
|
|
752
|
-
transport: params?.transport,
|
|
753
|
-
reasoningEffort: reasoningLevel,
|
|
754
|
-
reasoningSummary: reasoningLevel ? "auto" : void 0,
|
|
755
|
-
onPayload: (payload) => applyPayloadOverrides(payload, options)
|
|
756
|
-
});
|
|
757
|
-
let finalMessage;
|
|
758
|
-
let text = "";
|
|
759
|
-
let thinking = "";
|
|
760
|
-
for await (const event of stream) {
|
|
761
|
-
if (event.type === "text_delta") {
|
|
762
|
-
text += event.delta;
|
|
763
|
-
callbacks.onText(event.delta);
|
|
764
|
-
} else if (event.type === "thinking_delta") {
|
|
765
|
-
thinking += event.delta;
|
|
766
|
-
callbacks.onThinking?.(event.delta);
|
|
767
|
-
} else if (event.type === "thinking_end") {
|
|
768
|
-
const delta = event.content.startsWith(thinking) ? event.content.slice(thinking.length) : thinking ? "" : event.content;
|
|
769
|
-
if (delta) {
|
|
770
|
-
thinking += delta;
|
|
771
|
-
callbacks.onThinking?.(delta);
|
|
772
|
-
}
|
|
773
|
-
} else if (event.type === "done") {
|
|
774
|
-
finalMessage = event.message;
|
|
775
|
-
} else if (event.type === "error") {
|
|
776
|
-
throw new Error(event.error.errorMessage || "OpenAI Codex API error");
|
|
777
|
-
}
|
|
778
|
-
}
|
|
779
|
-
finalMessage ??= await stream.result();
|
|
780
|
-
text ||= extractText(finalMessage);
|
|
781
|
-
const toolCalls = extractToolCalls(finalMessage);
|
|
782
|
-
const assistantTurn = fromPiAssistantMessage(finalMessage);
|
|
783
|
-
const finishReason = toolCalls.length > 0 ? "tool-calls" : "stop";
|
|
784
|
-
return {
|
|
785
|
-
assistantMessage: assistantTurn,
|
|
786
|
-
text,
|
|
787
|
-
toolCalls,
|
|
788
|
-
done: toolCalls.length === 0,
|
|
789
|
-
usage: toTurnUsage(finalMessage.usage, finishReason, modelId)
|
|
790
|
-
};
|
|
791
|
-
}
|
|
792
|
-
};
|
|
793
|
-
}
|
|
794
|
-
|
|
795
|
-
// src/providers/openrouter.ts
|
|
796
|
-
var BASE_URL2 = "https://openrouter.ai/api/v1";
|
|
797
|
-
function getApiKey2(params) {
|
|
798
|
-
if (typeof params?.apiKey === "string" && params.apiKey.length > 0)
|
|
799
|
-
return params.apiKey;
|
|
800
|
-
if (process.env.OPENROUTER_API_KEY)
|
|
801
|
-
return process.env.OPENROUTER_API_KEY;
|
|
802
|
-
throw new Error("No OpenRouter API key found. Pass `apiKey` or set OPENROUTER_API_KEY in your environment.");
|
|
803
|
-
}
|
|
804
|
-
function openrouter(params) {
|
|
805
|
-
const apiKey = getApiKey2(params);
|
|
806
|
-
return openaiCompat({
|
|
807
|
-
name: "openrouter",
|
|
808
|
-
apiKey,
|
|
809
|
-
baseURL: BASE_URL2,
|
|
810
|
-
defaultModel: params?.defaultModel || "anthropic/claude-sonnet-4-6",
|
|
811
|
-
extraHeaders: {
|
|
812
|
-
"HTTP-Referer": "https://github.com/Tahul/zidane",
|
|
813
|
-
"X-Title": "zidane"
|
|
814
|
-
},
|
|
815
|
-
capabilities: params?.capabilities ?? { vision: true, imageInToolResult: false },
|
|
816
|
-
// OpenRouter honors `cache_control` markers for Anthropic + Gemini routes and
|
|
817
|
-
// silently ignores them for routes that cache automatically. Safe to turn on
|
|
818
|
-
// by default — the caller can still flip `behavior.cache = false` to opt out
|
|
819
|
-
// without needing to re-instantiate the provider.
|
|
820
|
-
cacheBreakpoints: true,
|
|
821
|
-
// OpenRouter speaks the normalized `reasoning` request field and round-trips
|
|
822
|
-
// structured `reasoning_details` on assistant messages. Captured into
|
|
823
|
-
// `provider_reasoning` blocks and echoed back to preserve extended-reasoning
|
|
824
|
-
// state across turns on the same upstream route.
|
|
825
|
-
supportsReasoning: true
|
|
826
|
-
});
|
|
827
|
-
}
|
|
828
|
-
|
|
829
|
-
export {
|
|
830
|
-
anthropic,
|
|
831
|
-
cerebras,
|
|
832
|
-
openai,
|
|
833
|
-
openrouter
|
|
834
|
-
};
|