claudish 2.4.0 → 2.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +10 -4
- package/dist/index.js +1546 -1245
- package/package.json +21 -20
- package/scripts/extract-models.ts +10 -0
- package/scripts/postinstall.cjs +0 -0
- package/skills/claudish-usage/SKILL.md +43 -24
package/dist/index.js
CHANGED
|
@@ -1,8 +1,22 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
+
import { createRequire } from "node:module";
|
|
3
|
+
var __create = Object.create;
|
|
4
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
2
5
|
var __defProp = Object.defineProperty;
|
|
3
6
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
7
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
8
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
9
|
+
var __toESM = (mod, isNodeMode, target) => {
|
|
10
|
+
target = mod != null ? __create(__getProtoOf(mod)) : {};
|
|
11
|
+
const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
|
|
12
|
+
for (let key of __getOwnPropNames(mod))
|
|
13
|
+
if (!__hasOwnProp.call(to, key))
|
|
14
|
+
__defProp(to, key, {
|
|
15
|
+
get: () => mod[key],
|
|
16
|
+
enumerable: true
|
|
17
|
+
});
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
6
20
|
var __moduleCache = /* @__PURE__ */ new WeakMap;
|
|
7
21
|
var __toCommonJS = (from) => {
|
|
8
22
|
var entry = __moduleCache.get(from), desc;
|
|
@@ -17,6 +31,7 @@ var __toCommonJS = (from) => {
|
|
|
17
31
|
__moduleCache.set(from, entry);
|
|
18
32
|
return entry;
|
|
19
33
|
};
|
|
34
|
+
var __commonJS = (cb, mod) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports);
|
|
20
35
|
var __export = (target, all) => {
|
|
21
36
|
for (var name in all)
|
|
22
37
|
__defProp(target, name, {
|
|
@@ -27,6 +42,402 @@ var __export = (target, all) => {
|
|
|
27
42
|
});
|
|
28
43
|
};
|
|
29
44
|
var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res);
|
|
45
|
+
var __require = /* @__PURE__ */ createRequire(import.meta.url);
|
|
46
|
+
|
|
47
|
+
// node_modules/dotenv/package.json
|
|
48
|
+
var require_package = __commonJS((exports, module) => {
|
|
49
|
+
module.exports = {
|
|
50
|
+
name: "dotenv",
|
|
51
|
+
version: "17.2.3",
|
|
52
|
+
description: "Loads environment variables from .env file",
|
|
53
|
+
main: "lib/main.js",
|
|
54
|
+
types: "lib/main.d.ts",
|
|
55
|
+
exports: {
|
|
56
|
+
".": {
|
|
57
|
+
types: "./lib/main.d.ts",
|
|
58
|
+
require: "./lib/main.js",
|
|
59
|
+
default: "./lib/main.js"
|
|
60
|
+
},
|
|
61
|
+
"./config": "./config.js",
|
|
62
|
+
"./config.js": "./config.js",
|
|
63
|
+
"./lib/env-options": "./lib/env-options.js",
|
|
64
|
+
"./lib/env-options.js": "./lib/env-options.js",
|
|
65
|
+
"./lib/cli-options": "./lib/cli-options.js",
|
|
66
|
+
"./lib/cli-options.js": "./lib/cli-options.js",
|
|
67
|
+
"./package.json": "./package.json"
|
|
68
|
+
},
|
|
69
|
+
scripts: {
|
|
70
|
+
"dts-check": "tsc --project tests/types/tsconfig.json",
|
|
71
|
+
lint: "standard",
|
|
72
|
+
pretest: "npm run lint && npm run dts-check",
|
|
73
|
+
test: "tap run tests/**/*.js --allow-empty-coverage --disable-coverage --timeout=60000",
|
|
74
|
+
"test:coverage": "tap run tests/**/*.js --show-full-coverage --timeout=60000 --coverage-report=text --coverage-report=lcov",
|
|
75
|
+
prerelease: "npm test",
|
|
76
|
+
release: "standard-version"
|
|
77
|
+
},
|
|
78
|
+
repository: {
|
|
79
|
+
type: "git",
|
|
80
|
+
url: "git://github.com/motdotla/dotenv.git"
|
|
81
|
+
},
|
|
82
|
+
homepage: "https://github.com/motdotla/dotenv#readme",
|
|
83
|
+
funding: "https://dotenvx.com",
|
|
84
|
+
keywords: [
|
|
85
|
+
"dotenv",
|
|
86
|
+
"env",
|
|
87
|
+
".env",
|
|
88
|
+
"environment",
|
|
89
|
+
"variables",
|
|
90
|
+
"config",
|
|
91
|
+
"settings"
|
|
92
|
+
],
|
|
93
|
+
readmeFilename: "README.md",
|
|
94
|
+
license: "BSD-2-Clause",
|
|
95
|
+
devDependencies: {
|
|
96
|
+
"@types/node": "^18.11.3",
|
|
97
|
+
decache: "^4.6.2",
|
|
98
|
+
sinon: "^14.0.1",
|
|
99
|
+
standard: "^17.0.0",
|
|
100
|
+
"standard-version": "^9.5.0",
|
|
101
|
+
tap: "^19.2.0",
|
|
102
|
+
typescript: "^4.8.4"
|
|
103
|
+
},
|
|
104
|
+
engines: {
|
|
105
|
+
node: ">=12"
|
|
106
|
+
},
|
|
107
|
+
browser: {
|
|
108
|
+
fs: false
|
|
109
|
+
}
|
|
110
|
+
};
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
// node_modules/dotenv/lib/main.js
|
|
114
|
+
var require_main = __commonJS((exports, module) => {
|
|
115
|
+
var fs = __require("fs");
|
|
116
|
+
var path = __require("path");
|
|
117
|
+
var os = __require("os");
|
|
118
|
+
var crypto = __require("crypto");
|
|
119
|
+
var packageJson = require_package();
|
|
120
|
+
var version = packageJson.version;
|
|
121
|
+
var TIPS = [
|
|
122
|
+
"\uD83D\uDD10 encrypt with Dotenvx: https://dotenvx.com",
|
|
123
|
+
"\uD83D\uDD10 prevent committing .env to code: https://dotenvx.com/precommit",
|
|
124
|
+
"\uD83D\uDD10 prevent building .env in docker: https://dotenvx.com/prebuild",
|
|
125
|
+
"\uD83D\uDCE1 add observability to secrets: https://dotenvx.com/ops",
|
|
126
|
+
"\uD83D\uDC65 sync secrets across teammates & machines: https://dotenvx.com/ops",
|
|
127
|
+
"\uD83D\uDDC2️ backup and recover secrets: https://dotenvx.com/ops",
|
|
128
|
+
"✅ audit secrets and track compliance: https://dotenvx.com/ops",
|
|
129
|
+
"\uD83D\uDD04 add secrets lifecycle management: https://dotenvx.com/ops",
|
|
130
|
+
"\uD83D\uDD11 add access controls to secrets: https://dotenvx.com/ops",
|
|
131
|
+
"\uD83D\uDEE0️ run anywhere with `dotenvx run -- yourcommand`",
|
|
132
|
+
"⚙️ specify custom .env file path with { path: '/custom/path/.env' }",
|
|
133
|
+
"⚙️ enable debug logging with { debug: true }",
|
|
134
|
+
"⚙️ override existing env vars with { override: true }",
|
|
135
|
+
"⚙️ suppress all logs with { quiet: true }",
|
|
136
|
+
"⚙️ write to custom object with { processEnv: myObject }",
|
|
137
|
+
"⚙️ load multiple .env files with { path: ['.env.local', '.env'] }"
|
|
138
|
+
];
|
|
139
|
+
function _getRandomTip() {
|
|
140
|
+
return TIPS[Math.floor(Math.random() * TIPS.length)];
|
|
141
|
+
}
|
|
142
|
+
function parseBoolean(value) {
|
|
143
|
+
if (typeof value === "string") {
|
|
144
|
+
return !["false", "0", "no", "off", ""].includes(value.toLowerCase());
|
|
145
|
+
}
|
|
146
|
+
return Boolean(value);
|
|
147
|
+
}
|
|
148
|
+
function supportsAnsi() {
|
|
149
|
+
return process.stdout.isTTY;
|
|
150
|
+
}
|
|
151
|
+
function dim(text) {
|
|
152
|
+
return supportsAnsi() ? `\x1B[2m${text}\x1B[0m` : text;
|
|
153
|
+
}
|
|
154
|
+
var LINE = /(?:^|^)\s*(?:export\s+)?([\w.-]+)(?:\s*=\s*?|:\s+?)(\s*'(?:\\'|[^'])*'|\s*"(?:\\"|[^"])*"|\s*`(?:\\`|[^`])*`|[^#\r\n]+)?\s*(?:#.*)?(?:$|$)/mg;
|
|
155
|
+
function parse(src) {
|
|
156
|
+
const obj = {};
|
|
157
|
+
let lines = src.toString();
|
|
158
|
+
lines = lines.replace(/\r\n?/mg, `
|
|
159
|
+
`);
|
|
160
|
+
let match;
|
|
161
|
+
while ((match = LINE.exec(lines)) != null) {
|
|
162
|
+
const key = match[1];
|
|
163
|
+
let value = match[2] || "";
|
|
164
|
+
value = value.trim();
|
|
165
|
+
const maybeQuote = value[0];
|
|
166
|
+
value = value.replace(/^(['"`])([\s\S]*)\1$/mg, "$2");
|
|
167
|
+
if (maybeQuote === '"') {
|
|
168
|
+
value = value.replace(/\\n/g, `
|
|
169
|
+
`);
|
|
170
|
+
value = value.replace(/\\r/g, "\r");
|
|
171
|
+
}
|
|
172
|
+
obj[key] = value;
|
|
173
|
+
}
|
|
174
|
+
return obj;
|
|
175
|
+
}
|
|
176
|
+
function _parseVault(options) {
|
|
177
|
+
options = options || {};
|
|
178
|
+
const vaultPath = _vaultPath(options);
|
|
179
|
+
options.path = vaultPath;
|
|
180
|
+
const result = DotenvModule.configDotenv(options);
|
|
181
|
+
if (!result.parsed) {
|
|
182
|
+
const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`);
|
|
183
|
+
err.code = "MISSING_DATA";
|
|
184
|
+
throw err;
|
|
185
|
+
}
|
|
186
|
+
const keys = _dotenvKey(options).split(",");
|
|
187
|
+
const length = keys.length;
|
|
188
|
+
let decrypted;
|
|
189
|
+
for (let i = 0;i < length; i++) {
|
|
190
|
+
try {
|
|
191
|
+
const key = keys[i].trim();
|
|
192
|
+
const attrs = _instructions(result, key);
|
|
193
|
+
decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key);
|
|
194
|
+
break;
|
|
195
|
+
} catch (error) {
|
|
196
|
+
if (i + 1 >= length) {
|
|
197
|
+
throw error;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
return DotenvModule.parse(decrypted);
|
|
202
|
+
}
|
|
203
|
+
function _warn(message) {
|
|
204
|
+
console.error(`[dotenv@${version}][WARN] ${message}`);
|
|
205
|
+
}
|
|
206
|
+
function _debug(message) {
|
|
207
|
+
console.log(`[dotenv@${version}][DEBUG] ${message}`);
|
|
208
|
+
}
|
|
209
|
+
function _log(message) {
|
|
210
|
+
console.log(`[dotenv@${version}] ${message}`);
|
|
211
|
+
}
|
|
212
|
+
function _dotenvKey(options) {
|
|
213
|
+
if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) {
|
|
214
|
+
return options.DOTENV_KEY;
|
|
215
|
+
}
|
|
216
|
+
if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) {
|
|
217
|
+
return process.env.DOTENV_KEY;
|
|
218
|
+
}
|
|
219
|
+
return "";
|
|
220
|
+
}
|
|
221
|
+
function _instructions(result, dotenvKey) {
|
|
222
|
+
let uri;
|
|
223
|
+
try {
|
|
224
|
+
uri = new URL(dotenvKey);
|
|
225
|
+
} catch (error) {
|
|
226
|
+
if (error.code === "ERR_INVALID_URL") {
|
|
227
|
+
const err = new Error("INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=development");
|
|
228
|
+
err.code = "INVALID_DOTENV_KEY";
|
|
229
|
+
throw err;
|
|
230
|
+
}
|
|
231
|
+
throw error;
|
|
232
|
+
}
|
|
233
|
+
const key = uri.password;
|
|
234
|
+
if (!key) {
|
|
235
|
+
const err = new Error("INVALID_DOTENV_KEY: Missing key part");
|
|
236
|
+
err.code = "INVALID_DOTENV_KEY";
|
|
237
|
+
throw err;
|
|
238
|
+
}
|
|
239
|
+
const environment = uri.searchParams.get("environment");
|
|
240
|
+
if (!environment) {
|
|
241
|
+
const err = new Error("INVALID_DOTENV_KEY: Missing environment part");
|
|
242
|
+
err.code = "INVALID_DOTENV_KEY";
|
|
243
|
+
throw err;
|
|
244
|
+
}
|
|
245
|
+
const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`;
|
|
246
|
+
const ciphertext = result.parsed[environmentKey];
|
|
247
|
+
if (!ciphertext) {
|
|
248
|
+
const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`);
|
|
249
|
+
err.code = "NOT_FOUND_DOTENV_ENVIRONMENT";
|
|
250
|
+
throw err;
|
|
251
|
+
}
|
|
252
|
+
return { ciphertext, key };
|
|
253
|
+
}
|
|
254
|
+
function _vaultPath(options) {
|
|
255
|
+
let possibleVaultPath = null;
|
|
256
|
+
if (options && options.path && options.path.length > 0) {
|
|
257
|
+
if (Array.isArray(options.path)) {
|
|
258
|
+
for (const filepath of options.path) {
|
|
259
|
+
if (fs.existsSync(filepath)) {
|
|
260
|
+
possibleVaultPath = filepath.endsWith(".vault") ? filepath : `${filepath}.vault`;
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
} else {
|
|
264
|
+
possibleVaultPath = options.path.endsWith(".vault") ? options.path : `${options.path}.vault`;
|
|
265
|
+
}
|
|
266
|
+
} else {
|
|
267
|
+
possibleVaultPath = path.resolve(process.cwd(), ".env.vault");
|
|
268
|
+
}
|
|
269
|
+
if (fs.existsSync(possibleVaultPath)) {
|
|
270
|
+
return possibleVaultPath;
|
|
271
|
+
}
|
|
272
|
+
return null;
|
|
273
|
+
}
|
|
274
|
+
function _resolveHome(envPath) {
|
|
275
|
+
return envPath[0] === "~" ? path.join(os.homedir(), envPath.slice(1)) : envPath;
|
|
276
|
+
}
|
|
277
|
+
function _configVault(options) {
|
|
278
|
+
const debug = parseBoolean(process.env.DOTENV_CONFIG_DEBUG || options && options.debug);
|
|
279
|
+
const quiet = parseBoolean(process.env.DOTENV_CONFIG_QUIET || options && options.quiet);
|
|
280
|
+
if (debug || !quiet) {
|
|
281
|
+
_log("Loading env from encrypted .env.vault");
|
|
282
|
+
}
|
|
283
|
+
const parsed = DotenvModule._parseVault(options);
|
|
284
|
+
let processEnv = process.env;
|
|
285
|
+
if (options && options.processEnv != null) {
|
|
286
|
+
processEnv = options.processEnv;
|
|
287
|
+
}
|
|
288
|
+
DotenvModule.populate(processEnv, parsed, options);
|
|
289
|
+
return { parsed };
|
|
290
|
+
}
|
|
291
|
+
function configDotenv(options) {
|
|
292
|
+
const dotenvPath = path.resolve(process.cwd(), ".env");
|
|
293
|
+
let encoding = "utf8";
|
|
294
|
+
let processEnv = process.env;
|
|
295
|
+
if (options && options.processEnv != null) {
|
|
296
|
+
processEnv = options.processEnv;
|
|
297
|
+
}
|
|
298
|
+
let debug = parseBoolean(processEnv.DOTENV_CONFIG_DEBUG || options && options.debug);
|
|
299
|
+
let quiet = parseBoolean(processEnv.DOTENV_CONFIG_QUIET || options && options.quiet);
|
|
300
|
+
if (options && options.encoding) {
|
|
301
|
+
encoding = options.encoding;
|
|
302
|
+
} else {
|
|
303
|
+
if (debug) {
|
|
304
|
+
_debug("No encoding is specified. UTF-8 is used by default");
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
let optionPaths = [dotenvPath];
|
|
308
|
+
if (options && options.path) {
|
|
309
|
+
if (!Array.isArray(options.path)) {
|
|
310
|
+
optionPaths = [_resolveHome(options.path)];
|
|
311
|
+
} else {
|
|
312
|
+
optionPaths = [];
|
|
313
|
+
for (const filepath of options.path) {
|
|
314
|
+
optionPaths.push(_resolveHome(filepath));
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
let lastError;
|
|
319
|
+
const parsedAll = {};
|
|
320
|
+
for (const path2 of optionPaths) {
|
|
321
|
+
try {
|
|
322
|
+
const parsed = DotenvModule.parse(fs.readFileSync(path2, { encoding }));
|
|
323
|
+
DotenvModule.populate(parsedAll, parsed, options);
|
|
324
|
+
} catch (e) {
|
|
325
|
+
if (debug) {
|
|
326
|
+
_debug(`Failed to load ${path2} ${e.message}`);
|
|
327
|
+
}
|
|
328
|
+
lastError = e;
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
const populated = DotenvModule.populate(processEnv, parsedAll, options);
|
|
332
|
+
debug = parseBoolean(processEnv.DOTENV_CONFIG_DEBUG || debug);
|
|
333
|
+
quiet = parseBoolean(processEnv.DOTENV_CONFIG_QUIET || quiet);
|
|
334
|
+
if (debug || !quiet) {
|
|
335
|
+
const keysCount = Object.keys(populated).length;
|
|
336
|
+
const shortPaths = [];
|
|
337
|
+
for (const filePath of optionPaths) {
|
|
338
|
+
try {
|
|
339
|
+
const relative = path.relative(process.cwd(), filePath);
|
|
340
|
+
shortPaths.push(relative);
|
|
341
|
+
} catch (e) {
|
|
342
|
+
if (debug) {
|
|
343
|
+
_debug(`Failed to load ${filePath} ${e.message}`);
|
|
344
|
+
}
|
|
345
|
+
lastError = e;
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
_log(`injecting env (${keysCount}) from ${shortPaths.join(",")} ${dim(`-- tip: ${_getRandomTip()}`)}`);
|
|
349
|
+
}
|
|
350
|
+
if (lastError) {
|
|
351
|
+
return { parsed: parsedAll, error: lastError };
|
|
352
|
+
} else {
|
|
353
|
+
return { parsed: parsedAll };
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
function config(options) {
|
|
357
|
+
if (_dotenvKey(options).length === 0) {
|
|
358
|
+
return DotenvModule.configDotenv(options);
|
|
359
|
+
}
|
|
360
|
+
const vaultPath = _vaultPath(options);
|
|
361
|
+
if (!vaultPath) {
|
|
362
|
+
_warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`);
|
|
363
|
+
return DotenvModule.configDotenv(options);
|
|
364
|
+
}
|
|
365
|
+
return DotenvModule._configVault(options);
|
|
366
|
+
}
|
|
367
|
+
function decrypt(encrypted, keyStr) {
|
|
368
|
+
const key = Buffer.from(keyStr.slice(-64), "hex");
|
|
369
|
+
let ciphertext = Buffer.from(encrypted, "base64");
|
|
370
|
+
const nonce = ciphertext.subarray(0, 12);
|
|
371
|
+
const authTag = ciphertext.subarray(-16);
|
|
372
|
+
ciphertext = ciphertext.subarray(12, -16);
|
|
373
|
+
try {
|
|
374
|
+
const aesgcm = crypto.createDecipheriv("aes-256-gcm", key, nonce);
|
|
375
|
+
aesgcm.setAuthTag(authTag);
|
|
376
|
+
return `${aesgcm.update(ciphertext)}${aesgcm.final()}`;
|
|
377
|
+
} catch (error) {
|
|
378
|
+
const isRange = error instanceof RangeError;
|
|
379
|
+
const invalidKeyLength = error.message === "Invalid key length";
|
|
380
|
+
const decryptionFailed = error.message === "Unsupported state or unable to authenticate data";
|
|
381
|
+
if (isRange || invalidKeyLength) {
|
|
382
|
+
const err = new Error("INVALID_DOTENV_KEY: It must be 64 characters long (or more)");
|
|
383
|
+
err.code = "INVALID_DOTENV_KEY";
|
|
384
|
+
throw err;
|
|
385
|
+
} else if (decryptionFailed) {
|
|
386
|
+
const err = new Error("DECRYPTION_FAILED: Please check your DOTENV_KEY");
|
|
387
|
+
err.code = "DECRYPTION_FAILED";
|
|
388
|
+
throw err;
|
|
389
|
+
} else {
|
|
390
|
+
throw error;
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
function populate(processEnv, parsed, options = {}) {
|
|
395
|
+
const debug = Boolean(options && options.debug);
|
|
396
|
+
const override = Boolean(options && options.override);
|
|
397
|
+
const populated = {};
|
|
398
|
+
if (typeof parsed !== "object") {
|
|
399
|
+
const err = new Error("OBJECT_REQUIRED: Please check the processEnv argument being passed to populate");
|
|
400
|
+
err.code = "OBJECT_REQUIRED";
|
|
401
|
+
throw err;
|
|
402
|
+
}
|
|
403
|
+
for (const key of Object.keys(parsed)) {
|
|
404
|
+
if (Object.prototype.hasOwnProperty.call(processEnv, key)) {
|
|
405
|
+
if (override === true) {
|
|
406
|
+
processEnv[key] = parsed[key];
|
|
407
|
+
populated[key] = parsed[key];
|
|
408
|
+
}
|
|
409
|
+
if (debug) {
|
|
410
|
+
if (override === true) {
|
|
411
|
+
_debug(`"${key}" is already defined and WAS overwritten`);
|
|
412
|
+
} else {
|
|
413
|
+
_debug(`"${key}" is already defined and was NOT overwritten`);
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
} else {
|
|
417
|
+
processEnv[key] = parsed[key];
|
|
418
|
+
populated[key] = parsed[key];
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
return populated;
|
|
422
|
+
}
|
|
423
|
+
var DotenvModule = {
|
|
424
|
+
configDotenv,
|
|
425
|
+
_configVault,
|
|
426
|
+
_parseVault,
|
|
427
|
+
config,
|
|
428
|
+
decrypt,
|
|
429
|
+
parse,
|
|
430
|
+
populate
|
|
431
|
+
};
|
|
432
|
+
exports.configDotenv = DotenvModule.configDotenv;
|
|
433
|
+
exports._configVault = DotenvModule._configVault;
|
|
434
|
+
exports._parseVault = DotenvModule._parseVault;
|
|
435
|
+
exports.config = DotenvModule.config;
|
|
436
|
+
exports.decrypt = DotenvModule.decrypt;
|
|
437
|
+
exports.parse = DotenvModule.parse;
|
|
438
|
+
exports.populate = DotenvModule.populate;
|
|
439
|
+
module.exports = DotenvModule;
|
|
440
|
+
});
|
|
30
441
|
|
|
31
442
|
// src/config.ts
|
|
32
443
|
var exports_config = {};
|
|
@@ -97,7 +508,15 @@ var init_config = __esm(() => {
|
|
|
97
508
|
CLAUDISH_PORT: "CLAUDISH_PORT",
|
|
98
509
|
CLAUDISH_ACTIVE_MODEL_NAME: "CLAUDISH_ACTIVE_MODEL_NAME",
|
|
99
510
|
ANTHROPIC_MODEL: "ANTHROPIC_MODEL",
|
|
100
|
-
ANTHROPIC_SMALL_FAST_MODEL: "ANTHROPIC_SMALL_FAST_MODEL"
|
|
511
|
+
ANTHROPIC_SMALL_FAST_MODEL: "ANTHROPIC_SMALL_FAST_MODEL",
|
|
512
|
+
CLAUDISH_MODEL_OPUS: "CLAUDISH_MODEL_OPUS",
|
|
513
|
+
CLAUDISH_MODEL_SONNET: "CLAUDISH_MODEL_SONNET",
|
|
514
|
+
CLAUDISH_MODEL_HAIKU: "CLAUDISH_MODEL_HAIKU",
|
|
515
|
+
CLAUDISH_MODEL_SUBAGENT: "CLAUDISH_MODEL_SUBAGENT",
|
|
516
|
+
ANTHROPIC_DEFAULT_OPUS_MODEL: "ANTHROPIC_DEFAULT_OPUS_MODEL",
|
|
517
|
+
ANTHROPIC_DEFAULT_SONNET_MODEL: "ANTHROPIC_DEFAULT_SONNET_MODEL",
|
|
518
|
+
ANTHROPIC_DEFAULT_HAIKU_MODEL: "ANTHROPIC_DEFAULT_HAIKU_MODEL",
|
|
519
|
+
CLAUDE_CODE_SUBAGENT_MODEL: "CLAUDE_CODE_SUBAGENT_MODEL"
|
|
101
520
|
};
|
|
102
521
|
OPENROUTER_HEADERS = {
|
|
103
522
|
"HTTP-Referer": "https://github.com/MadAppGang/claude-code",
|
|
@@ -124,6 +543,9 @@ var init_types = __esm(() => {
|
|
|
124
543
|
];
|
|
125
544
|
});
|
|
126
545
|
|
|
546
|
+
// src/index.ts
|
|
547
|
+
var import_dotenv = __toESM(require_main(), 1);
|
|
548
|
+
|
|
127
549
|
// src/claude-runner.ts
|
|
128
550
|
init_config();
|
|
129
551
|
import { spawn } from "node:child_process";
|
|
@@ -369,6 +791,18 @@ async function fetchModelContextWindow(modelId) {
|
|
|
369
791
|
}
|
|
370
792
|
return 200000;
|
|
371
793
|
}
|
|
794
|
+
async function doesModelSupportReasoning(modelId) {
|
|
795
|
+
if (!_cachedOpenRouterModels) {
|
|
796
|
+
await fetchModelContextWindow(modelId);
|
|
797
|
+
}
|
|
798
|
+
if (_cachedOpenRouterModels) {
|
|
799
|
+
const model = _cachedOpenRouterModels.find((m) => m.id === modelId);
|
|
800
|
+
if (model && model.supported_parameters) {
|
|
801
|
+
return model.supported_parameters.includes("include_reasoning") || model.supported_parameters.includes("reasoning") || model.id.includes("o1") || model.id.includes("o3") || model.id.includes("r1");
|
|
802
|
+
}
|
|
803
|
+
}
|
|
804
|
+
return false;
|
|
805
|
+
}
|
|
372
806
|
|
|
373
807
|
// src/cli.ts
|
|
374
808
|
import { readFileSync as readFileSync2, writeFileSync as writeFileSync3, existsSync as existsSync2, mkdirSync, copyFileSync } from "node:fs";
|
|
@@ -427,6 +861,7 @@ async function parseArgs(args) {
|
|
|
427
861
|
jsonOutput: false,
|
|
428
862
|
monitor: false,
|
|
429
863
|
stdin: false,
|
|
864
|
+
freeOnly: false,
|
|
430
865
|
claudeArgs: []
|
|
431
866
|
};
|
|
432
867
|
const claudishModel = process.env[ENV.CLAUDISH_MODEL];
|
|
@@ -436,6 +871,10 @@ async function parseArgs(args) {
|
|
|
436
871
|
} else if (anthropicModel) {
|
|
437
872
|
config.model = anthropicModel;
|
|
438
873
|
}
|
|
874
|
+
config.modelOpus = process.env[ENV.CLAUDISH_MODEL_OPUS] || process.env[ENV.ANTHROPIC_DEFAULT_OPUS_MODEL];
|
|
875
|
+
config.modelSonnet = process.env[ENV.CLAUDISH_MODEL_SONNET] || process.env[ENV.ANTHROPIC_DEFAULT_SONNET_MODEL];
|
|
876
|
+
config.modelHaiku = process.env[ENV.CLAUDISH_MODEL_HAIKU] || process.env[ENV.ANTHROPIC_DEFAULT_HAIKU_MODEL];
|
|
877
|
+
config.modelSubagent = process.env[ENV.CLAUDISH_MODEL_SUBAGENT] || process.env[ENV.CLAUDE_CODE_SUBAGENT_MODEL];
|
|
439
878
|
const envPort = process.env[ENV.CLAUDISH_PORT];
|
|
440
879
|
if (envPort) {
|
|
441
880
|
const port = Number.parseInt(envPort, 10);
|
|
@@ -454,6 +893,22 @@ async function parseArgs(args) {
|
|
|
454
893
|
process.exit(1);
|
|
455
894
|
}
|
|
456
895
|
config.model = modelArg;
|
|
896
|
+
} else if (arg === "--model-opus") {
|
|
897
|
+
const val = args[++i];
|
|
898
|
+
if (val)
|
|
899
|
+
config.modelOpus = val;
|
|
900
|
+
} else if (arg === "--model-sonnet") {
|
|
901
|
+
const val = args[++i];
|
|
902
|
+
if (val)
|
|
903
|
+
config.modelSonnet = val;
|
|
904
|
+
} else if (arg === "--model-haiku") {
|
|
905
|
+
const val = args[++i];
|
|
906
|
+
if (val)
|
|
907
|
+
config.modelHaiku = val;
|
|
908
|
+
} else if (arg === "--model-subagent") {
|
|
909
|
+
const val = args[++i];
|
|
910
|
+
if (val)
|
|
911
|
+
config.modelSubagent = val;
|
|
457
912
|
} else if (arg === "--port" || arg === "-p") {
|
|
458
913
|
const portArg = args[++i];
|
|
459
914
|
if (!portArg) {
|
|
@@ -491,6 +946,8 @@ async function parseArgs(args) {
|
|
|
491
946
|
config.monitor = true;
|
|
492
947
|
} else if (arg === "--stdin") {
|
|
493
948
|
config.stdin = true;
|
|
949
|
+
} else if (arg === "--free") {
|
|
950
|
+
config.freeOnly = true;
|
|
494
951
|
} else if (arg === "--cost-tracker") {
|
|
495
952
|
config.costTracking = true;
|
|
496
953
|
if (!config.monitor) {
|
|
@@ -512,7 +969,7 @@ async function parseArgs(args) {
|
|
|
512
969
|
} else if (arg === "--init") {
|
|
513
970
|
await initializeClaudishSkill();
|
|
514
971
|
process.exit(0);
|
|
515
|
-
} else if (arg === "--
|
|
972
|
+
} else if (arg === "--top-models") {
|
|
516
973
|
const hasJsonFlag = args.includes("--json");
|
|
517
974
|
const forceUpdate = args.includes("--force-update");
|
|
518
975
|
await checkAndUpdateModelsCache(forceUpdate);
|
|
@@ -522,14 +979,17 @@ async function parseArgs(args) {
|
|
|
522
979
|
printAvailableModels();
|
|
523
980
|
}
|
|
524
981
|
process.exit(0);
|
|
525
|
-
} else if (arg === "--
|
|
526
|
-
const
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
}
|
|
982
|
+
} else if (arg === "--models" || arg === "-s" || arg === "--search") {
|
|
983
|
+
const nextArg = args[i + 1];
|
|
984
|
+
const hasQuery = nextArg && !nextArg.startsWith("--");
|
|
985
|
+
const query = hasQuery ? args[++i] : null;
|
|
986
|
+
const hasJsonFlag = args.includes("--json");
|
|
531
987
|
const forceUpdate = args.includes("--force-update");
|
|
532
|
-
|
|
988
|
+
if (query) {
|
|
989
|
+
await searchAndPrintModels(query, forceUpdate);
|
|
990
|
+
} else {
|
|
991
|
+
await printAllModels(hasJsonFlag, forceUpdate);
|
|
992
|
+
}
|
|
533
993
|
process.exit(0);
|
|
534
994
|
} else {
|
|
535
995
|
config.claudeArgs = args.slice(i);
|
|
@@ -638,7 +1098,14 @@ Found ${results.length} matching models:
|
|
|
638
1098
|
const promptPrice = parseFloat(model.pricing?.prompt || "0") * 1e6;
|
|
639
1099
|
const completionPrice = parseFloat(model.pricing?.completion || "0") * 1e6;
|
|
640
1100
|
const avg = (promptPrice + completionPrice) / 2;
|
|
641
|
-
|
|
1101
|
+
let pricing;
|
|
1102
|
+
if (avg < 0) {
|
|
1103
|
+
pricing = "varies";
|
|
1104
|
+
} else if (avg === 0) {
|
|
1105
|
+
pricing = "FREE";
|
|
1106
|
+
} else {
|
|
1107
|
+
pricing = `$${avg.toFixed(2)}/1M`;
|
|
1108
|
+
}
|
|
642
1109
|
const pricingPadded = pricing.padEnd(10);
|
|
643
1110
|
const contextLen = model.context_length || model.top_provider?.context_length || 0;
|
|
644
1111
|
const context = contextLen > 0 ? `${Math.round(contextLen / 1000)}K` : "N/A";
|
|
@@ -648,6 +1115,98 @@ Found ${results.length} matching models:
|
|
|
648
1115
|
console.log("");
|
|
649
1116
|
console.log("Use a model: claudish --model <model-id>");
|
|
650
1117
|
}
|
|
1118
|
+
async function printAllModels(jsonOutput, forceUpdate) {
|
|
1119
|
+
let models = [];
|
|
1120
|
+
if (!forceUpdate && existsSync2(ALL_MODELS_JSON_PATH)) {
|
|
1121
|
+
try {
|
|
1122
|
+
const cacheData = JSON.parse(readFileSync2(ALL_MODELS_JSON_PATH, "utf-8"));
|
|
1123
|
+
const lastUpdated = new Date(cacheData.lastUpdated);
|
|
1124
|
+
const now = new Date;
|
|
1125
|
+
const ageInDays = (now.getTime() - lastUpdated.getTime()) / (1000 * 60 * 60 * 24);
|
|
1126
|
+
if (ageInDays <= CACHE_MAX_AGE_DAYS) {
|
|
1127
|
+
models = cacheData.models;
|
|
1128
|
+
if (!jsonOutput) {
|
|
1129
|
+
console.error(`✓ Using cached models (last updated: ${cacheData.lastUpdated.split("T")[0]})`);
|
|
1130
|
+
}
|
|
1131
|
+
}
|
|
1132
|
+
} catch (e) {}
|
|
1133
|
+
}
|
|
1134
|
+
if (models.length === 0) {
|
|
1135
|
+
console.error("\uD83D\uDD04 Fetching all models from OpenRouter...");
|
|
1136
|
+
try {
|
|
1137
|
+
const response = await fetch("https://openrouter.ai/api/v1/models");
|
|
1138
|
+
if (!response.ok)
|
|
1139
|
+
throw new Error(`API returned ${response.status}`);
|
|
1140
|
+
const data = await response.json();
|
|
1141
|
+
models = data.data;
|
|
1142
|
+
writeFileSync3(ALL_MODELS_JSON_PATH, JSON.stringify({
|
|
1143
|
+
lastUpdated: new Date().toISOString(),
|
|
1144
|
+
models
|
|
1145
|
+
}), "utf-8");
|
|
1146
|
+
console.error(`✅ Cached ${models.length} models`);
|
|
1147
|
+
} catch (error) {
|
|
1148
|
+
console.error(`❌ Failed to fetch models: ${error}`);
|
|
1149
|
+
process.exit(1);
|
|
1150
|
+
}
|
|
1151
|
+
}
|
|
1152
|
+
if (jsonOutput) {
|
|
1153
|
+
console.log(JSON.stringify({
|
|
1154
|
+
count: models.length,
|
|
1155
|
+
lastUpdated: new Date().toISOString().split("T")[0],
|
|
1156
|
+
models: models.map((m) => ({
|
|
1157
|
+
id: m.id,
|
|
1158
|
+
name: m.name,
|
|
1159
|
+
context: m.context_length || m.top_provider?.context_length,
|
|
1160
|
+
pricing: m.pricing
|
|
1161
|
+
}))
|
|
1162
|
+
}, null, 2));
|
|
1163
|
+
return;
|
|
1164
|
+
}
|
|
1165
|
+
const byProvider = new Map;
|
|
1166
|
+
for (const model of models) {
|
|
1167
|
+
const provider = model.id.split("/")[0];
|
|
1168
|
+
if (!byProvider.has(provider)) {
|
|
1169
|
+
byProvider.set(provider, []);
|
|
1170
|
+
}
|
|
1171
|
+
byProvider.get(provider).push(model);
|
|
1172
|
+
}
|
|
1173
|
+
const sortedProviders = [...byProvider.keys()].sort();
|
|
1174
|
+
console.log(`
|
|
1175
|
+
All OpenRouter Models (${models.length} total):
|
|
1176
|
+
`);
|
|
1177
|
+
for (const provider of sortedProviders) {
|
|
1178
|
+
const providerModels = byProvider.get(provider);
|
|
1179
|
+
console.log(`
|
|
1180
|
+
${provider.toUpperCase()} (${providerModels.length} models)`);
|
|
1181
|
+
console.log(" " + "─".repeat(70));
|
|
1182
|
+
for (const model of providerModels) {
|
|
1183
|
+
const shortId = model.id.split("/").slice(1).join("/");
|
|
1184
|
+
const modelId = shortId.length > 40 ? shortId.substring(0, 37) + "..." : shortId;
|
|
1185
|
+
const modelIdPadded = modelId.padEnd(42);
|
|
1186
|
+
const promptPrice = parseFloat(model.pricing?.prompt || "0") * 1e6;
|
|
1187
|
+
const completionPrice = parseFloat(model.pricing?.completion || "0") * 1e6;
|
|
1188
|
+
const avg = (promptPrice + completionPrice) / 2;
|
|
1189
|
+
let pricing;
|
|
1190
|
+
if (avg < 0) {
|
|
1191
|
+
pricing = "varies";
|
|
1192
|
+
} else if (avg === 0) {
|
|
1193
|
+
pricing = "FREE";
|
|
1194
|
+
} else {
|
|
1195
|
+
pricing = `$${avg.toFixed(2)}/1M`;
|
|
1196
|
+
}
|
|
1197
|
+
const pricingPadded = pricing.padEnd(12);
|
|
1198
|
+
const contextLen = model.context_length || model.top_provider?.context_length || 0;
|
|
1199
|
+
const context = contextLen > 0 ? `${Math.round(contextLen / 1000)}K` : "N/A";
|
|
1200
|
+
const contextPadded = context.padEnd(8);
|
|
1201
|
+
console.log(` ${modelIdPadded} ${pricingPadded} ${contextPadded}`);
|
|
1202
|
+
}
|
|
1203
|
+
}
|
|
1204
|
+
console.log(`
|
|
1205
|
+
`);
|
|
1206
|
+
console.log("Use a model: claudish --model <provider/model-id>");
|
|
1207
|
+
console.log("Search: claudish --search <query>");
|
|
1208
|
+
console.log("Top models: claudish --top-models");
|
|
1209
|
+
}
|
|
651
1210
|
function isCacheStale() {
|
|
652
1211
|
if (!existsSync2(MODELS_JSON_PATH)) {
|
|
653
1212
|
return true;
|
|
@@ -805,20 +1364,29 @@ OPTIONS:
|
|
|
805
1364
|
-v, --verbose Show [claudish] log messages (default in interactive mode)
|
|
806
1365
|
--json Output in JSON format for tool integration (implies --quiet)
|
|
807
1366
|
--stdin Read prompt from stdin (useful for large prompts or piping)
|
|
1367
|
+
--free Show only FREE models in the interactive selector
|
|
808
1368
|
--monitor Monitor mode - proxy to REAL Anthropic API and log all traffic
|
|
809
1369
|
--no-auto-approve Disable auto permission skip (prompts enabled)
|
|
810
1370
|
--dangerous Pass --dangerouslyDisableSandbox to Claude Code
|
|
811
1371
|
--cost-tracker Enable cost tracking for API usage (NB!)
|
|
812
1372
|
--audit-costs Show cost analysis report
|
|
813
1373
|
--reset-costs Reset accumulated cost statistics
|
|
814
|
-
--
|
|
815
|
-
--
|
|
1374
|
+
--models List ALL OpenRouter models grouped by provider
|
|
1375
|
+
--models <query> Fuzzy search all models by name, ID, or description
|
|
1376
|
+
--top-models List recommended/top programming models (curated)
|
|
1377
|
+
--json Output in JSON format (use with --models or --top-models)
|
|
816
1378
|
--force-update Force refresh model cache from OpenRouter API
|
|
817
1379
|
--version Show version information
|
|
818
1380
|
-h, --help Show this help message
|
|
819
1381
|
--help-ai Show AI agent usage guide (file-based patterns, sub-agents)
|
|
820
1382
|
--init Install Claudish skill in current project (.claude/skills/)
|
|
821
1383
|
|
|
1384
|
+
MODEL MAPPING (per-role override):
|
|
1385
|
+
--model-opus <model> Model for Opus role (planning, complex tasks)
|
|
1386
|
+
--model-sonnet <model> Model for Sonnet role (default coding)
|
|
1387
|
+
--model-haiku <model> Model for Haiku role (fast tasks, background)
|
|
1388
|
+
--model-subagent <model> Model for sub-agents (Task tool)
|
|
1389
|
+
|
|
822
1390
|
CUSTOM MODELS:
|
|
823
1391
|
Claudish accepts ANY valid OpenRouter model ID, even if not in --list-models
|
|
824
1392
|
Example: claudish --model your_provider/custom-model-123 "task"
|
|
@@ -834,18 +1402,32 @@ NOTES:
|
|
|
834
1402
|
• Use --dangerous to disable sandbox (use with extreme caution!)
|
|
835
1403
|
|
|
836
1404
|
ENVIRONMENT VARIABLES:
|
|
1405
|
+
Claudish automatically loads .env file from current directory.
|
|
1406
|
+
|
|
837
1407
|
OPENROUTER_API_KEY Required: Your OpenRouter API key
|
|
838
1408
|
CLAUDISH_MODEL Default model to use (takes priority)
|
|
839
|
-
ANTHROPIC_MODEL Claude Code standard: model to use (fallback
|
|
840
|
-
ANTHROPIC_SMALL_FAST_MODEL Claude Code standard: fast model (auto-set by claudish)
|
|
1409
|
+
ANTHROPIC_MODEL Claude Code standard: model to use (fallback)
|
|
841
1410
|
CLAUDISH_PORT Default port for proxy
|
|
842
|
-
CLAUDISH_ACTIVE_MODEL_NAME Auto-set by claudish (read-only) - shows active model
|
|
1411
|
+
CLAUDISH_ACTIVE_MODEL_NAME Auto-set by claudish (read-only) - shows active model
|
|
1412
|
+
|
|
1413
|
+
Model mapping (CLAUDISH_* takes priority over ANTHROPIC_DEFAULT_*):
|
|
1414
|
+
CLAUDISH_MODEL_OPUS Override model for Opus role
|
|
1415
|
+
CLAUDISH_MODEL_SONNET Override model for Sonnet role
|
|
1416
|
+
CLAUDISH_MODEL_HAIKU Override model for Haiku role
|
|
1417
|
+
CLAUDISH_MODEL_SUBAGENT Override model for sub-agents
|
|
1418
|
+
ANTHROPIC_DEFAULT_OPUS_MODEL Claude Code standard: Opus model (fallback)
|
|
1419
|
+
ANTHROPIC_DEFAULT_SONNET_MODEL Claude Code standard: Sonnet model (fallback)
|
|
1420
|
+
ANTHROPIC_DEFAULT_HAIKU_MODEL Claude Code standard: Haiku model (fallback)
|
|
1421
|
+
CLAUDE_CODE_SUBAGENT_MODEL Claude Code standard: sub-agent model (fallback)
|
|
843
1422
|
|
|
844
1423
|
EXAMPLES:
|
|
845
1424
|
# Interactive mode (default) - shows model selector
|
|
846
1425
|
claudish
|
|
847
1426
|
claudish --interactive
|
|
848
1427
|
|
|
1428
|
+
# Interactive mode with only FREE models
|
|
1429
|
+
claudish --free
|
|
1430
|
+
|
|
849
1431
|
# Interactive mode with pre-selected model
|
|
850
1432
|
claudish --model x-ai/grok-code-fast-1
|
|
851
1433
|
|
|
@@ -853,6 +1435,12 @@ EXAMPLES:
|
|
|
853
1435
|
claudish --model openai/gpt-5-codex "implement user authentication"
|
|
854
1436
|
claudish --model x-ai/grok-code-fast-1 "add tests for login"
|
|
855
1437
|
|
|
1438
|
+
# Per-role model mapping (use different models for different Claude Code roles)
|
|
1439
|
+
claudish --model-opus openai/gpt-5 --model-sonnet x-ai/grok-code-fast-1 --model-haiku minimax/minimax-m2
|
|
1440
|
+
|
|
1441
|
+
# Hybrid: Native Anthropic for Opus, OpenRouter for Sonnet/Haiku
|
|
1442
|
+
claudish --model-opus claude-3-opus-20240229 --model-sonnet x-ai/grok-code-fast-1
|
|
1443
|
+
|
|
856
1444
|
# Use stdin for large prompts (e.g., git diffs, code review)
|
|
857
1445
|
echo "Review this code..." | claudish --stdin --model x-ai/grok-code-fast-1
|
|
858
1446
|
git diff | claudish --stdin --model openai/gpt-5-codex "Review these changes"
|
|
@@ -882,9 +1470,11 @@ EXAMPLES:
|
|
|
882
1470
|
claudish --verbose "analyze code structure"
|
|
883
1471
|
|
|
884
1472
|
AVAILABLE MODELS:
|
|
885
|
-
List models:
|
|
886
|
-
|
|
887
|
-
|
|
1473
|
+
List all models: claudish --models
|
|
1474
|
+
Search models: claudish --models <query>
|
|
1475
|
+
Top recommended: claudish --top-models
|
|
1476
|
+
JSON output: claudish --models --json | claudish --top-models --json
|
|
1477
|
+
Force cache update: claudish --models --force-update
|
|
888
1478
|
(Cache auto-updates every 2 days)
|
|
889
1479
|
|
|
890
1480
|
MORE INFO:
|
|
@@ -1067,6 +1657,114 @@ init_config();
|
|
|
1067
1657
|
|
|
1068
1658
|
// src/simple-selector.ts
|
|
1069
1659
|
import { createInterface } from "readline";
|
|
1660
|
+
import { readFileSync as readFileSync3, writeFileSync as writeFileSync4, existsSync as existsSync3 } from "node:fs";
|
|
1661
|
+
import { join as join4, dirname as dirname3 } from "node:path";
|
|
1662
|
+
import { fileURLToPath as fileURLToPath3 } from "node:url";
|
|
1663
|
+
var __filename4 = fileURLToPath3(import.meta.url);
|
|
1664
|
+
var __dirname4 = dirname3(__filename4);
|
|
1665
|
+
var ALL_MODELS_JSON_PATH2 = join4(__dirname4, "../all-models.json");
|
|
1666
|
+
var CACHE_MAX_AGE_DAYS2 = 2;
|
|
1667
|
+
function loadEnhancedModels() {
|
|
1668
|
+
const jsonPath = join4(__dirname4, "../recommended-models.json");
|
|
1669
|
+
if (existsSync3(jsonPath)) {
|
|
1670
|
+
try {
|
|
1671
|
+
const jsonContent = readFileSync3(jsonPath, "utf-8");
|
|
1672
|
+
const data = JSON.parse(jsonContent);
|
|
1673
|
+
return data.models || [];
|
|
1674
|
+
} catch {
|
|
1675
|
+
return [];
|
|
1676
|
+
}
|
|
1677
|
+
}
|
|
1678
|
+
return [];
|
|
1679
|
+
}
|
|
1680
|
+
var TRUSTED_FREE_PROVIDERS = [
|
|
1681
|
+
"google",
|
|
1682
|
+
"openai",
|
|
1683
|
+
"x-ai",
|
|
1684
|
+
"deepseek",
|
|
1685
|
+
"qwen",
|
|
1686
|
+
"alibaba",
|
|
1687
|
+
"meta-llama",
|
|
1688
|
+
"microsoft",
|
|
1689
|
+
"mistralai",
|
|
1690
|
+
"nvidia",
|
|
1691
|
+
"cohere"
|
|
1692
|
+
];
|
|
1693
|
+
async function loadFreeModels() {
|
|
1694
|
+
let allModels = [];
|
|
1695
|
+
if (existsSync3(ALL_MODELS_JSON_PATH2)) {
|
|
1696
|
+
try {
|
|
1697
|
+
const cacheData = JSON.parse(readFileSync3(ALL_MODELS_JSON_PATH2, "utf-8"));
|
|
1698
|
+
const lastUpdated = new Date(cacheData.lastUpdated);
|
|
1699
|
+
const now = new Date;
|
|
1700
|
+
const ageInDays = (now.getTime() - lastUpdated.getTime()) / (1000 * 60 * 60 * 24);
|
|
1701
|
+
if (ageInDays <= CACHE_MAX_AGE_DAYS2) {
|
|
1702
|
+
allModels = cacheData.models;
|
|
1703
|
+
}
|
|
1704
|
+
} catch {}
|
|
1705
|
+
}
|
|
1706
|
+
if (allModels.length === 0) {
|
|
1707
|
+
console.error("\uD83D\uDD04 Fetching models from OpenRouter...");
|
|
1708
|
+
try {
|
|
1709
|
+
const response = await fetch("https://openrouter.ai/api/v1/models");
|
|
1710
|
+
if (!response.ok)
|
|
1711
|
+
throw new Error(`API returned ${response.status}`);
|
|
1712
|
+
const data = await response.json();
|
|
1713
|
+
allModels = data.data;
|
|
1714
|
+
writeFileSync4(ALL_MODELS_JSON_PATH2, JSON.stringify({
|
|
1715
|
+
lastUpdated: new Date().toISOString(),
|
|
1716
|
+
models: allModels
|
|
1717
|
+
}), "utf-8");
|
|
1718
|
+
console.error(`✅ Cached ${allModels.length} models`);
|
|
1719
|
+
} catch (error) {
|
|
1720
|
+
console.error(`❌ Failed to fetch models: ${error}`);
|
|
1721
|
+
return [];
|
|
1722
|
+
}
|
|
1723
|
+
}
|
|
1724
|
+
const freeModels = allModels.filter((model) => {
|
|
1725
|
+
const promptPrice = parseFloat(model.pricing?.prompt || "0");
|
|
1726
|
+
const completionPrice = parseFloat(model.pricing?.completion || "0");
|
|
1727
|
+
const isFree = promptPrice === 0 && completionPrice === 0;
|
|
1728
|
+
if (!isFree)
|
|
1729
|
+
return false;
|
|
1730
|
+
const provider = model.id.split("/")[0].toLowerCase();
|
|
1731
|
+
return TRUSTED_FREE_PROVIDERS.includes(provider);
|
|
1732
|
+
});
|
|
1733
|
+
freeModels.sort((a, b) => {
|
|
1734
|
+
const contextA = a.context_length || a.top_provider?.context_length || 0;
|
|
1735
|
+
const contextB = b.context_length || b.top_provider?.context_length || 0;
|
|
1736
|
+
return contextB - contextA;
|
|
1737
|
+
});
|
|
1738
|
+
const seenBase = new Set;
|
|
1739
|
+
const dedupedModels = freeModels.filter((model) => {
|
|
1740
|
+
const baseId = model.id.replace(/:free$/, "");
|
|
1741
|
+
if (seenBase.has(baseId)) {
|
|
1742
|
+
return false;
|
|
1743
|
+
}
|
|
1744
|
+
seenBase.add(baseId);
|
|
1745
|
+
return true;
|
|
1746
|
+
});
|
|
1747
|
+
const topModels = dedupedModels.slice(0, 15);
|
|
1748
|
+
return topModels.map((model) => {
|
|
1749
|
+
const provider = model.id.split("/")[0];
|
|
1750
|
+
const contextLen = model.context_length || model.top_provider?.context_length || 0;
|
|
1751
|
+
return {
|
|
1752
|
+
id: model.id,
|
|
1753
|
+
name: model.name || model.id,
|
|
1754
|
+
description: model.description || "",
|
|
1755
|
+
provider: provider.charAt(0).toUpperCase() + provider.slice(1),
|
|
1756
|
+
pricing: {
|
|
1757
|
+
input: "FREE",
|
|
1758
|
+
output: "FREE",
|
|
1759
|
+
average: "FREE"
|
|
1760
|
+
},
|
|
1761
|
+
context: contextLen > 0 ? `${Math.round(contextLen / 1000)}K` : "N/A",
|
|
1762
|
+
supportsTools: (model.supported_parameters || []).includes("tools"),
|
|
1763
|
+
supportsReasoning: (model.supported_parameters || []).includes("reasoning"),
|
|
1764
|
+
supportsVision: (model.architecture?.input_modalities || []).includes("image")
|
|
1765
|
+
};
|
|
1766
|
+
});
|
|
1767
|
+
}
|
|
1070
1768
|
async function promptForApiKey() {
|
|
1071
1769
|
return new Promise((resolve) => {
|
|
1072
1770
|
console.log(`
|
|
@@ -1116,27 +1814,75 @@ async function promptForApiKey() {
|
|
|
1116
1814
|
});
|
|
1117
1815
|
});
|
|
1118
1816
|
}
|
|
1119
|
-
async function selectModelInteractively() {
|
|
1120
|
-
const
|
|
1121
|
-
|
|
1817
|
+
async function selectModelInteractively(options = {}) {
|
|
1818
|
+
const { freeOnly = false } = options;
|
|
1819
|
+
let displayModels;
|
|
1820
|
+
let enhancedMap;
|
|
1821
|
+
if (freeOnly) {
|
|
1822
|
+
const freeModels = await loadFreeModels();
|
|
1823
|
+
if (freeModels.length === 0) {
|
|
1824
|
+
console.error("❌ No free models found or failed to fetch models");
|
|
1825
|
+
process.exit(1);
|
|
1826
|
+
}
|
|
1827
|
+
displayModels = freeModels.map((m) => m.id);
|
|
1828
|
+
enhancedMap = new Map;
|
|
1829
|
+
for (const m of freeModels) {
|
|
1830
|
+
enhancedMap.set(m.id, m);
|
|
1831
|
+
}
|
|
1832
|
+
} else {
|
|
1833
|
+
displayModels = getAvailableModels();
|
|
1834
|
+
const enhancedModels = loadEnhancedModels();
|
|
1835
|
+
enhancedMap = new Map;
|
|
1836
|
+
for (const m of enhancedModels) {
|
|
1837
|
+
enhancedMap.set(m.id, m);
|
|
1838
|
+
}
|
|
1839
|
+
}
|
|
1840
|
+
const models = freeOnly ? displayModels : displayModels;
|
|
1122
1841
|
return new Promise((resolve) => {
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1842
|
+
const RESET = "\x1B[0m";
|
|
1843
|
+
const BOLD = "\x1B[1m";
|
|
1844
|
+
const DIM = "\x1B[2m";
|
|
1845
|
+
const CYAN = "\x1B[36m";
|
|
1846
|
+
const GREEN = "\x1B[32m";
|
|
1847
|
+
const YELLOW = "\x1B[33m";
|
|
1848
|
+
const MAGENTA = "\x1B[35m";
|
|
1849
|
+
const pad = (text, width) => {
|
|
1850
|
+
if (text.length > width)
|
|
1851
|
+
return text.slice(0, width - 3) + "...";
|
|
1852
|
+
return text + " ".repeat(width - text.length);
|
|
1853
|
+
};
|
|
1854
|
+
const headerText = freeOnly ? "Select a FREE OpenRouter Model" : "Select an OpenRouter Model";
|
|
1855
|
+
const headerPadding = " ".repeat(82 - 4 - headerText.length);
|
|
1856
|
+
console.log("");
|
|
1857
|
+
console.log(`${DIM}╭${"─".repeat(82)}╮${RESET}`);
|
|
1858
|
+
console.log(`${DIM}│${RESET} ${BOLD}${CYAN}${headerText}${RESET}${headerPadding}${DIM}│${RESET}`);
|
|
1859
|
+
console.log(`${DIM}├${"─".repeat(82)}┤${RESET}`);
|
|
1860
|
+
console.log(`${DIM}│${RESET} ${DIM}# Model Provider Pricing Context Caps${RESET} ${DIM}│${RESET}`);
|
|
1861
|
+
console.log(`${DIM}├${"─".repeat(82)}┤${RESET}`);
|
|
1862
|
+
models.forEach((modelId, index) => {
|
|
1863
|
+
const num = (index + 1).toString().padStart(2);
|
|
1864
|
+
const enhanced = enhancedMap.get(modelId);
|
|
1865
|
+
if (modelId === "custom") {
|
|
1866
|
+
console.log(`${DIM}│${RESET} ${YELLOW}${num}${RESET} ${DIM}Enter custom OpenRouter model ID...${RESET}${" ".repeat(40)}${DIM}│${RESET}`);
|
|
1867
|
+
} else if (enhanced) {
|
|
1868
|
+
const shortId = pad(modelId, 33);
|
|
1869
|
+
const provider = pad(enhanced.provider || "N/A", 10);
|
|
1870
|
+
const pricing = pad(enhanced.pricing?.average || "N/A", 9);
|
|
1871
|
+
const context = pad(enhanced.context || "N/A", 7);
|
|
1872
|
+
const tools = enhanced.supportsTools ? "✓" : "·";
|
|
1873
|
+
const reasoning = enhanced.supportsReasoning ? "✓" : "·";
|
|
1874
|
+
const vision = enhanced.supportsVision ? "✓" : "·";
|
|
1875
|
+
console.log(`${DIM}│${RESET} ${GREEN}${num}${RESET} ${BOLD}${shortId}${RESET} ${CYAN}${provider}${RESET} ${MAGENTA}${pricing}${RESET} ${context} ${tools} ${reasoning} ${vision} ${DIM}│${RESET}`);
|
|
1134
1876
|
} else {
|
|
1135
|
-
|
|
1877
|
+
const shortId = pad(modelId, 33);
|
|
1878
|
+
console.log(`${DIM}│${RESET} ${GREEN}${num}${RESET} ${shortId} ${DIM}${pad("N/A", 10)} ${pad("N/A", 9)} ${pad("N/A", 7)}${RESET} · · · ${DIM}│${RESET}`);
|
|
1136
1879
|
}
|
|
1137
|
-
console.log("");
|
|
1138
1880
|
});
|
|
1139
|
-
console.log(
|
|
1881
|
+
console.log(`${DIM}├${"─".repeat(82)}┤${RESET}`);
|
|
1882
|
+
console.log(`${DIM}│${RESET} ${DIM}Caps: ✓/· = Tools, Reasoning, Vision${RESET}${" ".repeat(44)}${DIM}│${RESET}`);
|
|
1883
|
+
console.log(`${DIM}╰${"─".repeat(82)}╯${RESET}`);
|
|
1884
|
+
console.log("");
|
|
1885
|
+
console.log(`${DIM}Enter number (1-${models.length}) or 'q' to quit:${RESET}`);
|
|
1140
1886
|
const rl = createInterface({
|
|
1141
1887
|
input: process.stdin,
|
|
1142
1888
|
output: process.stdout,
|
|
@@ -1211,8 +1957,8 @@ async function selectModelInteractively() {
|
|
|
1211
1957
|
}
|
|
1212
1958
|
|
|
1213
1959
|
// src/logger.ts
|
|
1214
|
-
import { writeFileSync as
|
|
1215
|
-
import { join as
|
|
1960
|
+
import { writeFileSync as writeFileSync5, appendFile, existsSync as existsSync4, mkdirSync as mkdirSync2 } from "fs";
|
|
1961
|
+
import { join as join5 } from "path";
|
|
1216
1962
|
var logFilePath = null;
|
|
1217
1963
|
var logLevel = "info";
|
|
1218
1964
|
var logBuffer = [];
|
|
@@ -1242,7 +1988,7 @@ function scheduleFlush() {
|
|
|
1242
1988
|
flushTimer = null;
|
|
1243
1989
|
}
|
|
1244
1990
|
if (logFilePath && logBuffer.length > 0) {
|
|
1245
|
-
|
|
1991
|
+
writeFileSync5(logFilePath, logBuffer.join(""), { flag: "a" });
|
|
1246
1992
|
logBuffer = [];
|
|
1247
1993
|
}
|
|
1248
1994
|
});
|
|
@@ -1257,13 +2003,13 @@ function initLogger(debugMode, level = "info") {
|
|
|
1257
2003
|
return;
|
|
1258
2004
|
}
|
|
1259
2005
|
logLevel = level;
|
|
1260
|
-
const logsDir =
|
|
1261
|
-
if (!
|
|
2006
|
+
const logsDir = join5(process.cwd(), "logs");
|
|
2007
|
+
if (!existsSync4(logsDir)) {
|
|
1262
2008
|
mkdirSync2(logsDir, { recursive: true });
|
|
1263
2009
|
}
|
|
1264
2010
|
const timestamp = new Date().toISOString().replace(/[:.]/g, "-").split("T").join("_").slice(0, -5);
|
|
1265
|
-
logFilePath =
|
|
1266
|
-
|
|
2011
|
+
logFilePath = join5(logsDir, `claudish_${timestamp}.log`);
|
|
2012
|
+
writeFileSync5(logFilePath, `Claudish Debug Log - ${new Date().toISOString()}
|
|
1267
2013
|
Log Level: ${level}
|
|
1268
2014
|
${"=".repeat(80)}
|
|
1269
2015
|
|
|
@@ -3497,82 +4243,112 @@ var serve = (options, listeningListener) => {
|
|
|
3497
4243
|
return server;
|
|
3498
4244
|
};
|
|
3499
4245
|
|
|
3500
|
-
// src/
|
|
3501
|
-
|
|
3502
|
-
|
|
3503
|
-
|
|
3504
|
-
|
|
3505
|
-
|
|
3506
|
-
|
|
3507
|
-
|
|
3508
|
-
|
|
3509
|
-
|
|
3510
|
-
|
|
3511
|
-
|
|
3512
|
-
|
|
3513
|
-
|
|
3514
|
-
|
|
3515
|
-
|
|
3516
|
-
|
|
3517
|
-
result[key] = {};
|
|
3518
|
-
for (const propKey in schema[key]) {
|
|
3519
|
-
result[key][propKey] = removeUriFormat(schema[key][propKey]);
|
|
3520
|
-
}
|
|
3521
|
-
} else if (key === "items" && typeof schema[key] === "object") {
|
|
3522
|
-
result[key] = removeUriFormat(schema[key]);
|
|
3523
|
-
} else if (key === "additionalProperties" && typeof schema[key] === "object") {
|
|
3524
|
-
result[key] = removeUriFormat(schema[key]);
|
|
3525
|
-
} else if (["anyOf", "allOf", "oneOf"].includes(key) && Array.isArray(schema[key])) {
|
|
3526
|
-
result[key] = schema[key].map((item) => removeUriFormat(item));
|
|
4246
|
+
// src/handlers/native-handler.ts
|
|
4247
|
+
class NativeHandler {
|
|
4248
|
+
apiKey;
|
|
4249
|
+
baseUrl;
|
|
4250
|
+
constructor(apiKey) {
|
|
4251
|
+
this.apiKey = apiKey;
|
|
4252
|
+
this.baseUrl = process.env.ANTHROPIC_BASE_URL || "https://api.anthropic.com";
|
|
4253
|
+
}
|
|
4254
|
+
async handle(c, payload) {
|
|
4255
|
+
const originalHeaders = c.req.header();
|
|
4256
|
+
const target = payload.model;
|
|
4257
|
+
log(`
|
|
4258
|
+
=== [NATIVE] Claude Code → Anthropic API Request ===`);
|
|
4259
|
+
const extractedApiKey = originalHeaders["x-api-key"] || originalHeaders["authorization"] || this.apiKey;
|
|
4260
|
+
if (!extractedApiKey) {
|
|
4261
|
+
log("[Native] WARNING: No API key found in headers!");
|
|
4262
|
+
log("[Native] Looking for: x-api-key or authorization header");
|
|
3527
4263
|
} else {
|
|
3528
|
-
|
|
4264
|
+
log(`API Key found: ${maskCredential(extractedApiKey)}`);
|
|
4265
|
+
}
|
|
4266
|
+
log(`Request body (Model: ${target}):`);
|
|
4267
|
+
log(`=== End Request ===
|
|
4268
|
+
`);
|
|
4269
|
+
const headers = {
|
|
4270
|
+
"Content-Type": "application/json",
|
|
4271
|
+
"anthropic-version": originalHeaders["anthropic-version"] || "2023-06-01"
|
|
4272
|
+
};
|
|
4273
|
+
if (originalHeaders["authorization"]) {
|
|
4274
|
+
headers["authorization"] = originalHeaders["authorization"];
|
|
4275
|
+
}
|
|
4276
|
+
if (originalHeaders["x-api-key"]) {
|
|
4277
|
+
headers["x-api-key"] = originalHeaders["x-api-key"];
|
|
4278
|
+
} else if (extractedApiKey) {
|
|
4279
|
+
headers["x-api-key"] = extractedApiKey;
|
|
4280
|
+
}
|
|
4281
|
+
if (originalHeaders["anthropic-beta"]) {
|
|
4282
|
+
headers["anthropic-beta"] = originalHeaders["anthropic-beta"];
|
|
4283
|
+
}
|
|
4284
|
+
try {
|
|
4285
|
+
const anthropicResponse = await fetch(`${this.baseUrl}/v1/messages`, {
|
|
4286
|
+
method: "POST",
|
|
4287
|
+
headers,
|
|
4288
|
+
body: JSON.stringify(payload)
|
|
4289
|
+
});
|
|
4290
|
+
const contentType = anthropicResponse.headers.get("content-type") || "";
|
|
4291
|
+
if (contentType.includes("text/event-stream")) {
|
|
4292
|
+
log("[Native] Streaming response detected");
|
|
4293
|
+
return c.body(new ReadableStream({
|
|
4294
|
+
async start(controller) {
|
|
4295
|
+
const reader = anthropicResponse.body?.getReader();
|
|
4296
|
+
if (!reader)
|
|
4297
|
+
throw new Error("No reader");
|
|
4298
|
+
const decoder = new TextDecoder;
|
|
4299
|
+
let buffer = "";
|
|
4300
|
+
let eventLog = "";
|
|
4301
|
+
try {
|
|
4302
|
+
while (true) {
|
|
4303
|
+
const { done, value } = await reader.read();
|
|
4304
|
+
if (done)
|
|
4305
|
+
break;
|
|
4306
|
+
controller.enqueue(value);
|
|
4307
|
+
buffer += decoder.decode(value, { stream: true });
|
|
4308
|
+
const lines = buffer.split(`
|
|
4309
|
+
`);
|
|
4310
|
+
buffer = lines.pop() || "";
|
|
4311
|
+
for (const line of lines)
|
|
4312
|
+
if (line.trim())
|
|
4313
|
+
eventLog += line + `
|
|
4314
|
+
`;
|
|
4315
|
+
}
|
|
4316
|
+
if (eventLog)
|
|
4317
|
+
log(eventLog);
|
|
4318
|
+
controller.close();
|
|
4319
|
+
} catch (e) {
|
|
4320
|
+
log(`[Native] Stream Error: ${e}`);
|
|
4321
|
+
controller.close();
|
|
4322
|
+
}
|
|
4323
|
+
}
|
|
4324
|
+
}), {
|
|
4325
|
+
headers: {
|
|
4326
|
+
"Content-Type": contentType,
|
|
4327
|
+
"Cache-Control": "no-cache",
|
|
4328
|
+
Connection: "keep-alive",
|
|
4329
|
+
"anthropic-version": "2023-06-01"
|
|
4330
|
+
}
|
|
4331
|
+
});
|
|
4332
|
+
}
|
|
4333
|
+
const data = await anthropicResponse.json();
|
|
4334
|
+
log(`
|
|
4335
|
+
=== [NATIVE] Response ===`);
|
|
4336
|
+
log(JSON.stringify(data, null, 2));
|
|
4337
|
+
const responseHeaders = { "Content-Type": "application/json" };
|
|
4338
|
+
if (anthropicResponse.headers.has("anthropic-version")) {
|
|
4339
|
+
responseHeaders["anthropic-version"] = anthropicResponse.headers.get("anthropic-version");
|
|
4340
|
+
}
|
|
4341
|
+
return c.json(data, { status: anthropicResponse.status, headers: responseHeaders });
|
|
4342
|
+
} catch (error) {
|
|
4343
|
+
log(`[Native] Fetch Error: ${error}`);
|
|
4344
|
+
return c.json({ error: { type: "api_error", message: String(error) } }, 500);
|
|
3529
4345
|
}
|
|
3530
4346
|
}
|
|
3531
|
-
|
|
4347
|
+
async shutdown() {}
|
|
3532
4348
|
}
|
|
3533
|
-
function transformOpenAIToClaude(claudeRequestInput) {
|
|
3534
|
-
const req = JSON.parse(JSON.stringify(claudeRequestInput));
|
|
3535
|
-
const isO3Model = typeof req.model === "string" && (req.model.includes("o3") || req.model.includes("o1"));
|
|
3536
|
-
if (Array.isArray(req.system)) {
|
|
3537
|
-
req.system = req.system.map((item) => {
|
|
3538
|
-
if (typeof item === "string") {
|
|
3539
|
-
return item;
|
|
3540
|
-
} else if (item && typeof item === "object") {
|
|
3541
|
-
if (item.type === "text" && item.text) {
|
|
3542
|
-
return item.text;
|
|
3543
|
-
} else if (item.type === "text" && item.content) {
|
|
3544
|
-
return item.content;
|
|
3545
|
-
} else if (item.text) {
|
|
3546
|
-
return item.text;
|
|
3547
|
-
} else if (item.content) {
|
|
3548
|
-
return typeof item.content === "string" ? item.content : JSON.stringify(item.content);
|
|
3549
|
-
}
|
|
3550
|
-
}
|
|
3551
|
-
return JSON.stringify(item);
|
|
3552
|
-
}).filter((text) => text && text.trim() !== "").join(`
|
|
3553
4349
|
|
|
3554
|
-
|
|
3555
|
-
|
|
3556
|
-
if (!Array.isArray(req.messages)) {
|
|
3557
|
-
if (req.messages == null)
|
|
3558
|
-
req.messages = [];
|
|
3559
|
-
else
|
|
3560
|
-
req.messages = [req.messages];
|
|
3561
|
-
}
|
|
3562
|
-
if (!Array.isArray(req.tools))
|
|
3563
|
-
req.tools = [];
|
|
3564
|
-
for (const t of req.tools) {
|
|
3565
|
-
if (t && t.input_schema) {
|
|
3566
|
-
t.input_schema = removeUriFormat(t.input_schema);
|
|
3567
|
-
}
|
|
3568
|
-
}
|
|
3569
|
-
const dropped = [];
|
|
3570
|
-
return {
|
|
3571
|
-
claudeRequest: req,
|
|
3572
|
-
droppedParams: dropped,
|
|
3573
|
-
isO3Model
|
|
3574
|
-
};
|
|
3575
|
-
}
|
|
4350
|
+
// src/handlers/openrouter-handler.ts
|
|
4351
|
+
import { writeFileSync as writeFileSync6 } from "node:fs";
|
|
3576
4352
|
|
|
3577
4353
|
// src/adapters/base-adapter.ts
|
|
3578
4354
|
class BaseModelAdapter {
|
|
@@ -3580,6 +4356,9 @@ class BaseModelAdapter {
|
|
|
3580
4356
|
constructor(modelId) {
|
|
3581
4357
|
this.modelId = modelId;
|
|
3582
4358
|
}
|
|
4359
|
+
prepareRequest(request, originalRequest) {
|
|
4360
|
+
return request;
|
|
4361
|
+
}
|
|
3583
4362
|
reset() {}
|
|
3584
4363
|
}
|
|
3585
4364
|
|
|
@@ -3643,6 +4422,22 @@ class GrokAdapter extends BaseModelAdapter {
|
|
|
3643
4422
|
wasTransformed: true
|
|
3644
4423
|
};
|
|
3645
4424
|
}
|
|
4425
|
+
prepareRequest(request, originalRequest) {
|
|
4426
|
+
const modelId = this.modelId || "";
|
|
4427
|
+
if (originalRequest.thinking) {
|
|
4428
|
+
const supportsReasoningEffort = modelId.includes("mini");
|
|
4429
|
+
if (supportsReasoningEffort) {
|
|
4430
|
+
const { budget_tokens } = originalRequest.thinking;
|
|
4431
|
+
const effort = budget_tokens >= 20000 ? "high" : "low";
|
|
4432
|
+
request.reasoning_effort = effort;
|
|
4433
|
+
log(`[GrokAdapter] Mapped budget ${budget_tokens} -> reasoning_effort: ${effort}`);
|
|
4434
|
+
} else {
|
|
4435
|
+
log(`[GrokAdapter] Model ${modelId} does not support reasoning params. Stripping.`);
|
|
4436
|
+
}
|
|
4437
|
+
delete request.thinking;
|
|
4438
|
+
}
|
|
4439
|
+
return request;
|
|
4440
|
+
}
|
|
3646
4441
|
parseXmlParameters(xmlContent) {
|
|
3647
4442
|
const params = {};
|
|
3648
4443
|
const paramPattern = /<xai:parameter name="([^"]+)">([^<]*)<\/xai:parameter>/g;
|
|
@@ -3679,6 +4474,26 @@ class GeminiAdapter extends BaseModelAdapter {
|
|
|
3679
4474
|
wasTransformed: false
|
|
3680
4475
|
};
|
|
3681
4476
|
}
|
|
4477
|
+
prepareRequest(request, originalRequest) {
|
|
4478
|
+
if (originalRequest.thinking) {
|
|
4479
|
+
const { budget_tokens } = originalRequest.thinking;
|
|
4480
|
+
const modelId = this.modelId || "";
|
|
4481
|
+
if (modelId.includes("gemini-3")) {
|
|
4482
|
+
const level = budget_tokens >= 16000 ? "high" : "low";
|
|
4483
|
+
request.thinking_level = level;
|
|
4484
|
+
log(`[GeminiAdapter] Mapped budget ${budget_tokens} -> thinking_level: ${level}`);
|
|
4485
|
+
} else {
|
|
4486
|
+
const MAX_GEMINI_BUDGET = 24576;
|
|
4487
|
+
const budget = Math.min(budget_tokens, MAX_GEMINI_BUDGET);
|
|
4488
|
+
request.thinking_config = {
|
|
4489
|
+
thinking_budget: budget
|
|
4490
|
+
};
|
|
4491
|
+
log(`[GeminiAdapter] Mapped budget ${budget_tokens} -> thinking_config.thinking_budget: ${budget}`);
|
|
4492
|
+
}
|
|
4493
|
+
delete request.thinking;
|
|
4494
|
+
}
|
|
4495
|
+
return request;
|
|
4496
|
+
}
|
|
3682
4497
|
extractThoughtSignaturesFromReasoningDetails(reasoningDetails) {
|
|
3683
4498
|
const extracted = new Map;
|
|
3684
4499
|
if (!reasoningDetails || !Array.isArray(reasoningDetails)) {
|
|
@@ -3712,6 +4527,115 @@ class GeminiAdapter extends BaseModelAdapter {
|
|
|
3712
4527
|
}
|
|
3713
4528
|
}
|
|
3714
4529
|
|
|
4530
|
+
// src/adapters/openai-adapter.ts
|
|
4531
|
+
class OpenAIAdapter extends BaseModelAdapter {
|
|
4532
|
+
processTextContent(textContent, accumulatedText) {
|
|
4533
|
+
return {
|
|
4534
|
+
cleanedText: textContent,
|
|
4535
|
+
extractedToolCalls: [],
|
|
4536
|
+
wasTransformed: false
|
|
4537
|
+
};
|
|
4538
|
+
}
|
|
4539
|
+
prepareRequest(request, originalRequest) {
|
|
4540
|
+
if (originalRequest.thinking) {
|
|
4541
|
+
const { budget_tokens } = originalRequest.thinking;
|
|
4542
|
+
let effort = "medium";
|
|
4543
|
+
if (budget_tokens < 4000)
|
|
4544
|
+
effort = "minimal";
|
|
4545
|
+
else if (budget_tokens < 16000)
|
|
4546
|
+
effort = "low";
|
|
4547
|
+
else if (budget_tokens >= 32000)
|
|
4548
|
+
effort = "high";
|
|
4549
|
+
request.reasoning_effort = effort;
|
|
4550
|
+
delete request.thinking;
|
|
4551
|
+
log(`[OpenAIAdapter] Mapped budget ${budget_tokens} -> reasoning_effort: ${effort}`);
|
|
4552
|
+
}
|
|
4553
|
+
return request;
|
|
4554
|
+
}
|
|
4555
|
+
shouldHandle(modelId) {
|
|
4556
|
+
return modelId.startsWith("openai/") || modelId.includes("o1") || modelId.includes("o3");
|
|
4557
|
+
}
|
|
4558
|
+
getName() {
|
|
4559
|
+
return "OpenAIAdapter";
|
|
4560
|
+
}
|
|
4561
|
+
}
|
|
4562
|
+
|
|
4563
|
+
// src/adapters/qwen-adapter.ts
|
|
4564
|
+
class QwenAdapter extends BaseModelAdapter {
|
|
4565
|
+
processTextContent(textContent, accumulatedText) {
|
|
4566
|
+
return {
|
|
4567
|
+
cleanedText: textContent,
|
|
4568
|
+
extractedToolCalls: [],
|
|
4569
|
+
wasTransformed: false
|
|
4570
|
+
};
|
|
4571
|
+
}
|
|
4572
|
+
prepareRequest(request, originalRequest) {
|
|
4573
|
+
if (originalRequest.thinking) {
|
|
4574
|
+
const { budget_tokens } = originalRequest.thinking;
|
|
4575
|
+
request.enable_thinking = true;
|
|
4576
|
+
request.thinking_budget = budget_tokens;
|
|
4577
|
+
log(`[QwenAdapter] Mapped budget ${budget_tokens} -> enable_thinking: true, thinking_budget: ${budget_tokens}`);
|
|
4578
|
+
delete request.thinking;
|
|
4579
|
+
}
|
|
4580
|
+
return request;
|
|
4581
|
+
}
|
|
4582
|
+
shouldHandle(modelId) {
|
|
4583
|
+
return modelId.includes("qwen") || modelId.includes("alibaba");
|
|
4584
|
+
}
|
|
4585
|
+
getName() {
|
|
4586
|
+
return "QwenAdapter";
|
|
4587
|
+
}
|
|
4588
|
+
}
|
|
4589
|
+
|
|
4590
|
+
// src/adapters/minimax-adapter.ts
|
|
4591
|
+
class MiniMaxAdapter extends BaseModelAdapter {
|
|
4592
|
+
processTextContent(textContent, accumulatedText) {
|
|
4593
|
+
return {
|
|
4594
|
+
cleanedText: textContent,
|
|
4595
|
+
extractedToolCalls: [],
|
|
4596
|
+
wasTransformed: false
|
|
4597
|
+
};
|
|
4598
|
+
}
|
|
4599
|
+
prepareRequest(request, originalRequest) {
|
|
4600
|
+
if (originalRequest.thinking) {
|
|
4601
|
+
request.reasoning_split = true;
|
|
4602
|
+
log(`[MiniMaxAdapter] Enabled reasoning_split: true`);
|
|
4603
|
+
delete request.thinking;
|
|
4604
|
+
}
|
|
4605
|
+
return request;
|
|
4606
|
+
}
|
|
4607
|
+
shouldHandle(modelId) {
|
|
4608
|
+
return modelId.includes("minimax");
|
|
4609
|
+
}
|
|
4610
|
+
getName() {
|
|
4611
|
+
return "MiniMaxAdapter";
|
|
4612
|
+
}
|
|
4613
|
+
}
|
|
4614
|
+
|
|
4615
|
+
// src/adapters/deepseek-adapter.ts
|
|
4616
|
+
class DeepSeekAdapter extends BaseModelAdapter {
|
|
4617
|
+
processTextContent(textContent, accumulatedText) {
|
|
4618
|
+
return {
|
|
4619
|
+
cleanedText: textContent,
|
|
4620
|
+
extractedToolCalls: [],
|
|
4621
|
+
wasTransformed: false
|
|
4622
|
+
};
|
|
4623
|
+
}
|
|
4624
|
+
prepareRequest(request, originalRequest) {
|
|
4625
|
+
if (originalRequest.thinking) {
|
|
4626
|
+
log(`[DeepSeekAdapter] Stripping thinking object (not supported by API)`);
|
|
4627
|
+
delete request.thinking;
|
|
4628
|
+
}
|
|
4629
|
+
return request;
|
|
4630
|
+
}
|
|
4631
|
+
shouldHandle(modelId) {
|
|
4632
|
+
return modelId.includes("deepseek");
|
|
4633
|
+
}
|
|
4634
|
+
getName() {
|
|
4635
|
+
return "DeepSeekAdapter";
|
|
4636
|
+
}
|
|
4637
|
+
}
|
|
4638
|
+
|
|
3715
4639
|
// src/adapters/adapter-manager.ts
|
|
3716
4640
|
class AdapterManager {
|
|
3717
4641
|
adapters;
|
|
@@ -3719,7 +4643,11 @@ class AdapterManager {
|
|
|
3719
4643
|
constructor(modelId) {
|
|
3720
4644
|
this.adapters = [
|
|
3721
4645
|
new GrokAdapter(modelId),
|
|
3722
|
-
new GeminiAdapter(modelId)
|
|
4646
|
+
new GeminiAdapter(modelId),
|
|
4647
|
+
new OpenAIAdapter(modelId),
|
|
4648
|
+
new QwenAdapter(modelId),
|
|
4649
|
+
new MiniMaxAdapter(modelId),
|
|
4650
|
+
new DeepSeekAdapter(modelId)
|
|
3723
4651
|
];
|
|
3724
4652
|
this.defaultAdapter = new DefaultAdapter(modelId);
|
|
3725
4653
|
}
|
|
@@ -3991,1170 +4919,538 @@ class GeminiThoughtSignatureMiddleware {
|
|
|
3991
4919
|
}
|
|
3992
4920
|
}
|
|
3993
4921
|
}
|
|
3994
|
-
// src/
|
|
3995
|
-
|
|
3996
|
-
|
|
3997
|
-
|
|
3998
|
-
|
|
3999
|
-
|
|
4000
|
-
|
|
4001
|
-
const ANTHROPIC_API_URL = "https://api.anthropic.com/v1/messages";
|
|
4002
|
-
const ANTHROPIC_COUNT_TOKENS_URL = "https://api.anthropic.com/v1/messages/count_tokens";
|
|
4003
|
-
const middlewareManager = new MiddlewareManager;
|
|
4004
|
-
middlewareManager.register(new GeminiThoughtSignatureMiddleware);
|
|
4005
|
-
middlewareManager.initialize().catch((error) => {
|
|
4006
|
-
log(`[Proxy] Middleware initialization error: ${error}`);
|
|
4007
|
-
});
|
|
4008
|
-
let sessionTotalCost = 0;
|
|
4009
|
-
let contextWindowLimit = 200000;
|
|
4010
|
-
const CLAUDE_INTERNAL_CONTEXT_MAX = 200000;
|
|
4011
|
-
const getTokenScaleFactor = () => {
|
|
4012
|
-
if (contextWindowLimit === 0)
|
|
4013
|
-
return 1;
|
|
4014
|
-
return CLAUDE_INTERNAL_CONTEXT_MAX / contextWindowLimit;
|
|
4015
|
-
};
|
|
4016
|
-
if (model && !monitorMode) {
|
|
4017
|
-
fetchModelContextWindow(model).then((limit) => {
|
|
4018
|
-
contextWindowLimit = limit;
|
|
4019
|
-
if (isLoggingEnabled()) {
|
|
4020
|
-
log(`[Proxy] Context window limit updated to ${limit} tokens for model ${model}`);
|
|
4021
|
-
log(`[Proxy] Token scaling factor: ${getTokenScaleFactor().toFixed(2)}x (Map ${limit} → ${CLAUDE_INTERNAL_CONTEXT_MAX})`);
|
|
4022
|
-
}
|
|
4023
|
-
}).catch((err) => {
|
|
4024
|
-
log(`[Proxy] Failed to fetch context window limit: ${err}`);
|
|
4025
|
-
});
|
|
4922
|
+
// src/transform.ts
|
|
4923
|
+
function removeUriFormat(schema) {
|
|
4924
|
+
if (!schema || typeof schema !== "object")
|
|
4925
|
+
return schema;
|
|
4926
|
+
if (schema.type === "string" && schema.format === "uri") {
|
|
4927
|
+
const { format, ...rest } = schema;
|
|
4928
|
+
return rest;
|
|
4026
4929
|
}
|
|
4027
|
-
|
|
4028
|
-
|
|
4029
|
-
|
|
4030
|
-
|
|
4031
|
-
|
|
4032
|
-
|
|
4033
|
-
|
|
4034
|
-
|
|
4035
|
-
|
|
4036
|
-
config: {
|
|
4037
|
-
mode: monitorMode ? "monitor" : "openrouter",
|
|
4038
|
-
model: monitorMode ? "passthrough" : model,
|
|
4039
|
-
port,
|
|
4040
|
-
upstream: monitorMode ? "Anthropic" : "OpenRouter"
|
|
4930
|
+
if (Array.isArray(schema)) {
|
|
4931
|
+
return schema.map((item) => removeUriFormat(item));
|
|
4932
|
+
}
|
|
4933
|
+
const result = {};
|
|
4934
|
+
for (const key in schema) {
|
|
4935
|
+
if (key === "properties" && typeof schema[key] === "object") {
|
|
4936
|
+
result[key] = {};
|
|
4937
|
+
for (const propKey in schema[key]) {
|
|
4938
|
+
result[key][propKey] = removeUriFormat(schema[key][propKey]);
|
|
4041
4939
|
}
|
|
4042
|
-
})
|
|
4043
|
-
|
|
4044
|
-
|
|
4045
|
-
|
|
4046
|
-
|
|
4047
|
-
|
|
4048
|
-
|
|
4049
|
-
|
|
4050
|
-
});
|
|
4051
|
-
});
|
|
4052
|
-
app.post("/v1/messages/count_tokens", async (c) => {
|
|
4053
|
-
try {
|
|
4054
|
-
const body = await c.req.json();
|
|
4055
|
-
if (monitorMode) {
|
|
4056
|
-
const originalHeaders = c.req.header();
|
|
4057
|
-
const extractedApiKey = originalHeaders["x-api-key"] || anthropicApiKey;
|
|
4058
|
-
if (!extractedApiKey) {
|
|
4059
|
-
log("[Monitor] ERROR: No API key found for token counting");
|
|
4060
|
-
return c.json({
|
|
4061
|
-
error: {
|
|
4062
|
-
type: "authentication_error",
|
|
4063
|
-
message: "No API key found in request."
|
|
4064
|
-
}
|
|
4065
|
-
}, 401);
|
|
4066
|
-
}
|
|
4067
|
-
log("[Monitor] Token counting request - forwarding to Anthropic");
|
|
4068
|
-
log("[Monitor] Request body:");
|
|
4069
|
-
log(JSON.stringify(body, null, 2));
|
|
4070
|
-
const headers = {
|
|
4071
|
-
"Content-Type": "application/json",
|
|
4072
|
-
"anthropic-version": originalHeaders["anthropic-version"] || "2023-06-01"
|
|
4073
|
-
};
|
|
4074
|
-
if (originalHeaders["authorization"]) {
|
|
4075
|
-
headers["authorization"] = originalHeaders["authorization"];
|
|
4076
|
-
}
|
|
4077
|
-
if (extractedApiKey) {
|
|
4078
|
-
headers["x-api-key"] = extractedApiKey;
|
|
4079
|
-
}
|
|
4080
|
-
const response = await fetch(ANTHROPIC_COUNT_TOKENS_URL, {
|
|
4081
|
-
method: "POST",
|
|
4082
|
-
headers,
|
|
4083
|
-
body: JSON.stringify(body)
|
|
4084
|
-
});
|
|
4085
|
-
const result = await response.json();
|
|
4086
|
-
log("[Monitor] Token counting response:");
|
|
4087
|
-
log(JSON.stringify(result, null, 2));
|
|
4088
|
-
return c.json(result, response.status);
|
|
4089
|
-
}
|
|
4090
|
-
log("[Proxy] Token counting request (estimating)");
|
|
4091
|
-
const systemTokens = body.system ? Math.ceil(body.system.length / 4) : 0;
|
|
4092
|
-
const messageTokens = body.messages ? body.messages.reduce((acc, msg) => {
|
|
4093
|
-
const content = typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content);
|
|
4094
|
-
return acc + Math.ceil(content.length / 4);
|
|
4095
|
-
}, 0) : 0;
|
|
4096
|
-
const totalTokens = systemTokens + messageTokens;
|
|
4097
|
-
const scaleFactor = getTokenScaleFactor();
|
|
4098
|
-
return c.json({
|
|
4099
|
-
input_tokens: Math.ceil(totalTokens * scaleFactor)
|
|
4100
|
-
});
|
|
4101
|
-
} catch (error) {
|
|
4102
|
-
log(`[Proxy] Token counting error: ${error}`);
|
|
4103
|
-
return c.json({
|
|
4104
|
-
error: {
|
|
4105
|
-
type: "invalid_request_error",
|
|
4106
|
-
message: error instanceof Error ? error.message : "Unknown error"
|
|
4107
|
-
}
|
|
4108
|
-
}, 400);
|
|
4940
|
+
} else if (key === "items" && typeof schema[key] === "object") {
|
|
4941
|
+
result[key] = removeUriFormat(schema[key]);
|
|
4942
|
+
} else if (key === "additionalProperties" && typeof schema[key] === "object") {
|
|
4943
|
+
result[key] = removeUriFormat(schema[key]);
|
|
4944
|
+
} else if (["anyOf", "allOf", "oneOf"].includes(key) && Array.isArray(schema[key])) {
|
|
4945
|
+
result[key] = schema[key].map((item) => removeUriFormat(item));
|
|
4946
|
+
} else {
|
|
4947
|
+
result[key] = removeUriFormat(schema[key]);
|
|
4109
4948
|
}
|
|
4110
|
-
}
|
|
4111
|
-
|
|
4112
|
-
|
|
4113
|
-
|
|
4114
|
-
|
|
4115
|
-
|
|
4116
|
-
|
|
4117
|
-
|
|
4118
|
-
|
|
4119
|
-
|
|
4120
|
-
|
|
4121
|
-
|
|
4122
|
-
|
|
4123
|
-
|
|
4124
|
-
|
|
4125
|
-
} else {
|
|
4126
|
-
|
|
4127
|
-
}
|
|
4128
|
-
|
|
4129
|
-
log(JSON.stringify(claudePayload, null, 2));
|
|
4130
|
-
log(`=== End Request ===
|
|
4131
|
-
`);
|
|
4132
|
-
const headers2 = {
|
|
4133
|
-
"Content-Type": "application/json",
|
|
4134
|
-
"anthropic-version": originalHeaders["anthropic-version"] || "2023-06-01"
|
|
4135
|
-
};
|
|
4136
|
-
if (originalHeaders["authorization"]) {
|
|
4137
|
-
headers2["authorization"] = originalHeaders["authorization"];
|
|
4138
|
-
log(`[Monitor] Forwarding OAuth token: ${maskCredential(originalHeaders["authorization"])}`);
|
|
4139
|
-
}
|
|
4140
|
-
if (originalHeaders["x-api-key"]) {
|
|
4141
|
-
headers2["x-api-key"] = originalHeaders["x-api-key"];
|
|
4142
|
-
log(`[Monitor] Forwarding API key: ${maskCredential(originalHeaders["x-api-key"])}`);
|
|
4143
|
-
}
|
|
4144
|
-
if (originalHeaders["anthropic-beta"]) {
|
|
4145
|
-
headers2["anthropic-beta"] = originalHeaders["anthropic-beta"];
|
|
4146
|
-
}
|
|
4147
|
-
const anthropicResponse = await fetch(ANTHROPIC_API_URL, {
|
|
4148
|
-
method: "POST",
|
|
4149
|
-
headers: headers2,
|
|
4150
|
-
body: JSON.stringify(claudePayload)
|
|
4151
|
-
});
|
|
4152
|
-
const contentType2 = anthropicResponse.headers.get("content-type") || "";
|
|
4153
|
-
if (contentType2.includes("text/event-stream")) {
|
|
4154
|
-
log("[Monitor] Streaming response detected");
|
|
4155
|
-
return c.body(new ReadableStream({
|
|
4156
|
-
async start(controller) {
|
|
4157
|
-
const encoder = new TextEncoder;
|
|
4158
|
-
const reader = anthropicResponse.body?.getReader();
|
|
4159
|
-
if (!reader) {
|
|
4160
|
-
throw new Error("Response body is not readable");
|
|
4161
|
-
}
|
|
4162
|
-
const decoder = new TextDecoder;
|
|
4163
|
-
let buffer = "";
|
|
4164
|
-
let eventLog = "";
|
|
4165
|
-
log(`
|
|
4166
|
-
=== [MONITOR] Anthropic API → Claude Code Response (Streaming) ===`);
|
|
4167
|
-
try {
|
|
4168
|
-
while (true) {
|
|
4169
|
-
const { done, value } = await reader.read();
|
|
4170
|
-
if (done) {
|
|
4171
|
-
log(`
|
|
4172
|
-
=== End Streaming Response ===
|
|
4173
|
-
`);
|
|
4174
|
-
break;
|
|
4175
|
-
}
|
|
4176
|
-
controller.enqueue(value);
|
|
4177
|
-
buffer += decoder.decode(value, { stream: true });
|
|
4178
|
-
const lines = buffer.split(`
|
|
4179
|
-
`);
|
|
4180
|
-
buffer = lines.pop() || "";
|
|
4181
|
-
for (const line of lines) {
|
|
4182
|
-
if (line.trim()) {
|
|
4183
|
-
eventLog += line + `
|
|
4184
|
-
`;
|
|
4185
|
-
}
|
|
4186
|
-
}
|
|
4187
|
-
}
|
|
4188
|
-
if (eventLog) {
|
|
4189
|
-
log(eventLog);
|
|
4190
|
-
}
|
|
4191
|
-
controller.close();
|
|
4192
|
-
} catch (error) {
|
|
4193
|
-
log(`[Monitor] Streaming error: ${error}`);
|
|
4194
|
-
controller.close();
|
|
4195
|
-
}
|
|
4196
|
-
}
|
|
4197
|
-
}), {
|
|
4198
|
-
headers: {
|
|
4199
|
-
"Content-Type": anthropicResponse.headers.get("content-type") || "text/event-stream",
|
|
4200
|
-
"Cache-Control": "no-cache",
|
|
4201
|
-
Connection: "keep-alive",
|
|
4202
|
-
"anthropic-version": anthropicResponse.headers.get("anthropic-version") || "2023-06-01"
|
|
4203
|
-
}
|
|
4204
|
-
});
|
|
4205
|
-
}
|
|
4206
|
-
const responseData = await anthropicResponse.json();
|
|
4207
|
-
log(`
|
|
4208
|
-
=== [MONITOR] Anthropic API → Claude Code Response (JSON) ===`);
|
|
4209
|
-
log(JSON.stringify(responseData, null, 2));
|
|
4210
|
-
log(`=== End Response ===
|
|
4211
|
-
`);
|
|
4212
|
-
const responseHeaders = {
|
|
4213
|
-
"Content-Type": "application/json"
|
|
4214
|
-
};
|
|
4215
|
-
const anthropicVersion = anthropicResponse.headers.get("anthropic-version");
|
|
4216
|
-
if (anthropicVersion) {
|
|
4217
|
-
responseHeaders["anthropic-version"] = anthropicVersion;
|
|
4949
|
+
}
|
|
4950
|
+
return result;
|
|
4951
|
+
}
|
|
4952
|
+
function transformOpenAIToClaude(claudeRequestInput) {
|
|
4953
|
+
const req = JSON.parse(JSON.stringify(claudeRequestInput));
|
|
4954
|
+
const isO3Model = typeof req.model === "string" && (req.model.includes("o3") || req.model.includes("o1"));
|
|
4955
|
+
if (Array.isArray(req.system)) {
|
|
4956
|
+
req.system = req.system.map((item) => {
|
|
4957
|
+
if (typeof item === "string") {
|
|
4958
|
+
return item;
|
|
4959
|
+
} else if (item && typeof item === "object") {
|
|
4960
|
+
if (item.type === "text" && item.text) {
|
|
4961
|
+
return item.text;
|
|
4962
|
+
} else if (item.type === "text" && item.content) {
|
|
4963
|
+
return item.content;
|
|
4964
|
+
} else if (item.text) {
|
|
4965
|
+
return item.text;
|
|
4966
|
+
} else if (item.content) {
|
|
4967
|
+
return typeof item.content === "string" ? item.content : JSON.stringify(item.content);
|
|
4218
4968
|
}
|
|
4219
|
-
return c.json(responseData, {
|
|
4220
|
-
status: anthropicResponse.status,
|
|
4221
|
-
headers: responseHeaders
|
|
4222
|
-
});
|
|
4223
4969
|
}
|
|
4224
|
-
|
|
4225
|
-
|
|
4226
|
-
messageCount: claudePayload.messages?.length || 0,
|
|
4227
|
-
hasSystem: !!claudePayload.system,
|
|
4228
|
-
maxTokens: claudePayload.max_tokens,
|
|
4229
|
-
temperature: claudePayload.temperature,
|
|
4230
|
-
stream: claudePayload.stream
|
|
4231
|
-
});
|
|
4232
|
-
const { claudeRequest, droppedParams } = transformOpenAIToClaude(claudePayload);
|
|
4233
|
-
const messages = [];
|
|
4234
|
-
const adapterManager = new AdapterManager(model || "");
|
|
4235
|
-
const adapter = adapterManager.getAdapter();
|
|
4236
|
-
if (typeof adapter.reset === "function") {
|
|
4237
|
-
adapter.reset();
|
|
4238
|
-
}
|
|
4239
|
-
if (isLoggingEnabled()) {
|
|
4240
|
-
log(`[Proxy] Using adapter: ${adapter.getName()}`);
|
|
4241
|
-
}
|
|
4242
|
-
if (claudeRequest.system) {
|
|
4243
|
-
let systemContent;
|
|
4244
|
-
if (typeof claudeRequest.system === "string") {
|
|
4245
|
-
systemContent = claudeRequest.system;
|
|
4246
|
-
} else if (Array.isArray(claudeRequest.system)) {
|
|
4247
|
-
systemContent = claudeRequest.system.map((item) => {
|
|
4248
|
-
if (typeof item === "string")
|
|
4249
|
-
return item;
|
|
4250
|
-
if (item?.type === "text" && item.text)
|
|
4251
|
-
return item.text;
|
|
4252
|
-
if (item?.content)
|
|
4253
|
-
return typeof item.content === "string" ? item.content : JSON.stringify(item.content);
|
|
4254
|
-
return JSON.stringify(item);
|
|
4255
|
-
}).join(`
|
|
4970
|
+
return JSON.stringify(item);
|
|
4971
|
+
}).filter((text) => text && text.trim() !== "").join(`
|
|
4256
4972
|
|
|
4257
4973
|
`);
|
|
4258
|
-
|
|
4259
|
-
|
|
4974
|
+
}
|
|
4975
|
+
if (!Array.isArray(req.messages)) {
|
|
4976
|
+
if (req.messages == null)
|
|
4977
|
+
req.messages = [];
|
|
4978
|
+
else
|
|
4979
|
+
req.messages = [req.messages];
|
|
4980
|
+
}
|
|
4981
|
+
if (!Array.isArray(req.tools))
|
|
4982
|
+
req.tools = [];
|
|
4983
|
+
for (const t of req.tools) {
|
|
4984
|
+
if (t && t.input_schema) {
|
|
4985
|
+
t.input_schema = removeUriFormat(t.input_schema);
|
|
4986
|
+
}
|
|
4987
|
+
}
|
|
4988
|
+
const dropped = [];
|
|
4989
|
+
return {
|
|
4990
|
+
claudeRequest: req,
|
|
4991
|
+
droppedParams: dropped,
|
|
4992
|
+
isO3Model
|
|
4993
|
+
};
|
|
4994
|
+
}
|
|
4995
|
+
|
|
4996
|
+
// src/handlers/openrouter-handler.ts
|
|
4997
|
+
var OPENROUTER_API_URL2 = "https://openrouter.ai/api/v1/chat/completions";
|
|
4998
|
+
var OPENROUTER_HEADERS2 = {
|
|
4999
|
+
"HTTP-Referer": "https://github.com/MadAppGang/claude-code",
|
|
5000
|
+
"X-Title": "Claudish - OpenRouter Proxy"
|
|
5001
|
+
};
|
|
5002
|
+
|
|
5003
|
+
class OpenRouterHandler {
|
|
5004
|
+
targetModel;
|
|
5005
|
+
apiKey;
|
|
5006
|
+
adapterManager;
|
|
5007
|
+
middlewareManager;
|
|
5008
|
+
contextWindowCache = new Map;
|
|
5009
|
+
port;
|
|
5010
|
+
sessionTotalCost = 0;
|
|
5011
|
+
CLAUDE_INTERNAL_CONTEXT_MAX = 200000;
|
|
5012
|
+
constructor(targetModel, apiKey, port) {
|
|
5013
|
+
this.targetModel = targetModel;
|
|
5014
|
+
this.apiKey = apiKey;
|
|
5015
|
+
this.port = port;
|
|
5016
|
+
this.adapterManager = new AdapterManager(targetModel);
|
|
5017
|
+
this.middlewareManager = new MiddlewareManager;
|
|
5018
|
+
this.middlewareManager.register(new GeminiThoughtSignatureMiddleware);
|
|
5019
|
+
this.middlewareManager.initialize().catch((err) => log(`[Handler:${targetModel}] Middleware init error: ${err}`));
|
|
5020
|
+
this.fetchContextWindow(targetModel);
|
|
5021
|
+
}
|
|
5022
|
+
async fetchContextWindow(model) {
|
|
5023
|
+
if (this.contextWindowCache.has(model))
|
|
5024
|
+
return;
|
|
5025
|
+
try {
|
|
5026
|
+
const limit = await fetchModelContextWindow(model);
|
|
5027
|
+
this.contextWindowCache.set(model, limit);
|
|
5028
|
+
} catch (e) {}
|
|
5029
|
+
}
|
|
5030
|
+
getTokenScaleFactor(model) {
|
|
5031
|
+
const limit = this.contextWindowCache.get(model) || 200000;
|
|
5032
|
+
return limit === 0 ? 1 : this.CLAUDE_INTERNAL_CONTEXT_MAX / limit;
|
|
5033
|
+
}
|
|
5034
|
+
writeTokenFile(input, output) {
|
|
5035
|
+
try {
|
|
5036
|
+
const total = input + output;
|
|
5037
|
+
const limit = this.contextWindowCache.get(this.targetModel) || 200000;
|
|
5038
|
+
const leftPct = limit > 0 ? Math.max(0, Math.min(100, Math.round((limit - total) / limit * 100))) : 100;
|
|
5039
|
+
const data = {
|
|
5040
|
+
input_tokens: input,
|
|
5041
|
+
output_tokens: output,
|
|
5042
|
+
total_tokens: total,
|
|
5043
|
+
total_cost: this.sessionTotalCost,
|
|
5044
|
+
context_window: limit,
|
|
5045
|
+
context_left_percent: leftPct,
|
|
5046
|
+
updated_at: Date.now()
|
|
5047
|
+
};
|
|
5048
|
+
writeFileSync6(`/tmp/claudish-tokens-${this.port}.json`, JSON.stringify(data), "utf-8");
|
|
5049
|
+
} catch (e) {}
|
|
5050
|
+
}
|
|
5051
|
+
async handle(c, payload) {
|
|
5052
|
+
const claudePayload = payload;
|
|
5053
|
+
const target = this.targetModel;
|
|
5054
|
+
await this.fetchContextWindow(target);
|
|
5055
|
+
logStructured(`OpenRouter Request`, { targetModel: target, originalModel: claudePayload.model });
|
|
5056
|
+
const { claudeRequest, droppedParams } = transformOpenAIToClaude(claudePayload);
|
|
5057
|
+
const messages = this.convertMessages(claudeRequest, target);
|
|
5058
|
+
const tools = this.convertTools(claudeRequest);
|
|
5059
|
+
const supportsReasoning = await doesModelSupportReasoning(target);
|
|
5060
|
+
const openRouterPayload = {
|
|
5061
|
+
model: target,
|
|
5062
|
+
messages,
|
|
5063
|
+
temperature: claudeRequest.temperature ?? 1,
|
|
5064
|
+
stream: true,
|
|
5065
|
+
max_tokens: claudeRequest.max_tokens,
|
|
5066
|
+
tools: tools.length > 0 ? tools : undefined,
|
|
5067
|
+
stream_options: { include_usage: true }
|
|
5068
|
+
};
|
|
5069
|
+
if (supportsReasoning)
|
|
5070
|
+
openRouterPayload.include_reasoning = true;
|
|
5071
|
+
if (claudeRequest.thinking)
|
|
5072
|
+
openRouterPayload.thinking = claudeRequest.thinking;
|
|
5073
|
+
if (claudeRequest.tool_choice) {
|
|
5074
|
+
const { type, name } = claudeRequest.tool_choice;
|
|
5075
|
+
if (type === "tool" && name)
|
|
5076
|
+
openRouterPayload.tool_choice = { type: "function", function: { name } };
|
|
5077
|
+
else if (type === "auto" || type === "none")
|
|
5078
|
+
openRouterPayload.tool_choice = type;
|
|
5079
|
+
}
|
|
5080
|
+
const adapter = this.adapterManager.getAdapter();
|
|
5081
|
+
if (typeof adapter.reset === "function")
|
|
5082
|
+
adapter.reset();
|
|
5083
|
+
adapter.prepareRequest(openRouterPayload, claudeRequest);
|
|
5084
|
+
await this.middlewareManager.beforeRequest({ modelId: target, messages, tools, stream: true });
|
|
5085
|
+
const response = await fetch(OPENROUTER_API_URL2, {
|
|
5086
|
+
method: "POST",
|
|
5087
|
+
headers: {
|
|
5088
|
+
"Content-Type": "application/json",
|
|
5089
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
5090
|
+
...OPENROUTER_HEADERS2
|
|
5091
|
+
},
|
|
5092
|
+
body: JSON.stringify(openRouterPayload)
|
|
5093
|
+
});
|
|
5094
|
+
if (!response.ok)
|
|
5095
|
+
return c.json({ error: await response.text() }, response.status);
|
|
5096
|
+
if (droppedParams.length > 0)
|
|
5097
|
+
c.header("X-Dropped-Params", droppedParams.join(", "));
|
|
5098
|
+
return this.handleStreamingResponse(c, response, adapter, target, claudeRequest);
|
|
5099
|
+
}
|
|
5100
|
+
convertMessages(req, modelId) {
|
|
5101
|
+
const messages = [];
|
|
5102
|
+
if (req.system) {
|
|
5103
|
+
let content = Array.isArray(req.system) ? req.system.map((i) => i.text || i).join(`
|
|
5104
|
+
|
|
5105
|
+
`) : req.system;
|
|
5106
|
+
content = this.filterIdentity(content);
|
|
5107
|
+
messages.push({ role: "system", content });
|
|
5108
|
+
}
|
|
5109
|
+
if (modelId.includes("grok") || modelId.includes("x-ai")) {
|
|
5110
|
+
const msg = "IMPORTANT: When calling tools, you MUST use the OpenAI tool_calls format with JSON. NEVER use XML format like <xai:function_call>.";
|
|
5111
|
+
if (messages.length > 0 && messages[0].role === "system")
|
|
5112
|
+
messages[0].content += `
|
|
5113
|
+
|
|
5114
|
+
` + msg;
|
|
5115
|
+
else
|
|
5116
|
+
messages.unshift({ role: "system", content: msg });
|
|
5117
|
+
}
|
|
5118
|
+
if (req.messages) {
|
|
5119
|
+
for (const msg of req.messages) {
|
|
5120
|
+
if (msg.role === "user")
|
|
5121
|
+
this.processUserMessage(msg, messages);
|
|
5122
|
+
else if (msg.role === "assistant")
|
|
5123
|
+
this.processAssistantMessage(msg, messages);
|
|
5124
|
+
}
|
|
5125
|
+
}
|
|
5126
|
+
return messages;
|
|
5127
|
+
}
|
|
5128
|
+
processUserMessage(msg, messages) {
|
|
5129
|
+
if (Array.isArray(msg.content)) {
|
|
5130
|
+
const contentParts = [];
|
|
5131
|
+
const toolResults = [];
|
|
5132
|
+
const seen = new Set;
|
|
5133
|
+
for (const block of msg.content) {
|
|
5134
|
+
if (block.type === "text")
|
|
5135
|
+
contentParts.push({ type: "text", text: block.text });
|
|
5136
|
+
else if (block.type === "image")
|
|
5137
|
+
contentParts.push({ type: "image_url", image_url: { url: `data:${block.source.media_type};base64,${block.source.data}` } });
|
|
5138
|
+
else if (block.type === "tool_result") {
|
|
5139
|
+
if (seen.has(block.tool_use_id))
|
|
5140
|
+
continue;
|
|
5141
|
+
seen.add(block.tool_use_id);
|
|
5142
|
+
toolResults.push({ role: "tool", content: typeof block.content === "string" ? block.content : JSON.stringify(block.content), tool_call_id: block.tool_use_id });
|
|
4260
5143
|
}
|
|
4261
|
-
systemContent = filterClaudeIdentity(systemContent);
|
|
4262
|
-
messages.push({
|
|
4263
|
-
role: "system",
|
|
4264
|
-
content: systemContent
|
|
4265
|
-
});
|
|
4266
5144
|
}
|
|
4267
|
-
if (
|
|
4268
|
-
|
|
4269
|
-
|
|
4270
|
-
|
|
4271
|
-
|
|
4272
|
-
|
|
4273
|
-
|
|
4274
|
-
|
|
4275
|
-
|
|
4276
|
-
|
|
4277
|
-
|
|
4278
|
-
|
|
4279
|
-
|
|
4280
|
-
|
|
4281
|
-
|
|
4282
|
-
|
|
4283
|
-
|
|
4284
|
-
|
|
4285
|
-
|
|
4286
|
-
|
|
4287
|
-
|
|
4288
|
-
}
|
|
4289
|
-
seenToolResultIds.add(block.tool_use_id);
|
|
4290
|
-
const toolResultMsg = {
|
|
4291
|
-
role: "tool",
|
|
4292
|
-
content: typeof block.content === "string" ? block.content : JSON.stringify(block.content),
|
|
4293
|
-
tool_call_id: block.tool_use_id
|
|
4294
|
-
};
|
|
4295
|
-
toolResults.push(toolResultMsg);
|
|
4296
|
-
}
|
|
4297
|
-
}
|
|
4298
|
-
if (toolResults.length > 0) {
|
|
4299
|
-
messages.push(...toolResults);
|
|
4300
|
-
}
|
|
4301
|
-
if (contentParts.length > 0) {
|
|
4302
|
-
messages.push({
|
|
4303
|
-
role: "user",
|
|
4304
|
-
content: contentParts
|
|
4305
|
-
});
|
|
4306
|
-
}
|
|
4307
|
-
} else if (typeof msg.content === "string") {
|
|
4308
|
-
messages.push({
|
|
4309
|
-
role: "user",
|
|
4310
|
-
content: msg.content
|
|
4311
|
-
});
|
|
4312
|
-
}
|
|
4313
|
-
} else if (msg.role === "assistant") {
|
|
4314
|
-
if (Array.isArray(msg.content)) {
|
|
4315
|
-
const textParts = [];
|
|
4316
|
-
const toolCalls = [];
|
|
4317
|
-
const seenToolIds = new Set;
|
|
4318
|
-
for (const block of msg.content) {
|
|
4319
|
-
if (block.type === "text") {
|
|
4320
|
-
textParts.push(block.text);
|
|
4321
|
-
} else if (block.type === "tool_use") {
|
|
4322
|
-
if (seenToolIds.has(block.id)) {
|
|
4323
|
-
log(`[Proxy] Skipping duplicate tool_use with ID: ${block.id}`);
|
|
4324
|
-
continue;
|
|
4325
|
-
}
|
|
4326
|
-
seenToolIds.add(block.id);
|
|
4327
|
-
toolCalls.push({
|
|
4328
|
-
id: block.id,
|
|
4329
|
-
type: "function",
|
|
4330
|
-
function: {
|
|
4331
|
-
name: block.name,
|
|
4332
|
-
arguments: JSON.stringify(block.input)
|
|
4333
|
-
}
|
|
4334
|
-
});
|
|
4335
|
-
}
|
|
4336
|
-
}
|
|
4337
|
-
const openAIMsg = { role: "assistant" };
|
|
4338
|
-
if (textParts.length > 0) {
|
|
4339
|
-
openAIMsg.content = textParts.join(" ");
|
|
4340
|
-
} else if (toolCalls.length > 0) {
|
|
4341
|
-
openAIMsg.content = null;
|
|
4342
|
-
}
|
|
4343
|
-
if (toolCalls.length > 0) {
|
|
4344
|
-
openAIMsg.tool_calls = toolCalls;
|
|
4345
|
-
}
|
|
4346
|
-
if (textParts.length > 0 || toolCalls.length > 0) {
|
|
4347
|
-
messages.push(openAIMsg);
|
|
4348
|
-
}
|
|
4349
|
-
} else if (typeof msg.content === "string") {
|
|
4350
|
-
messages.push({
|
|
4351
|
-
role: "assistant",
|
|
4352
|
-
content: msg.content
|
|
4353
|
-
});
|
|
4354
|
-
}
|
|
4355
|
-
}
|
|
5145
|
+
if (toolResults.length)
|
|
5146
|
+
messages.push(...toolResults);
|
|
5147
|
+
if (contentParts.length)
|
|
5148
|
+
messages.push({ role: "user", content: contentParts });
|
|
5149
|
+
} else {
|
|
5150
|
+
messages.push({ role: "user", content: msg.content });
|
|
5151
|
+
}
|
|
5152
|
+
}
|
|
5153
|
+
processAssistantMessage(msg, messages) {
|
|
5154
|
+
if (Array.isArray(msg.content)) {
|
|
5155
|
+
const strings = [];
|
|
5156
|
+
const toolCalls = [];
|
|
5157
|
+
const seen = new Set;
|
|
5158
|
+
for (const block of msg.content) {
|
|
5159
|
+
if (block.type === "text")
|
|
5160
|
+
strings.push(block.text);
|
|
5161
|
+
else if (block.type === "tool_use") {
|
|
5162
|
+
if (seen.has(block.id))
|
|
5163
|
+
continue;
|
|
5164
|
+
seen.add(block.id);
|
|
5165
|
+
toolCalls.push({ id: block.id, type: "function", function: { name: block.name, arguments: JSON.stringify(block.input) } });
|
|
4356
5166
|
}
|
|
4357
5167
|
}
|
|
4358
|
-
const
|
|
4359
|
-
|
|
4360
|
-
|
|
4361
|
-
|
|
4362
|
-
|
|
4363
|
-
|
|
4364
|
-
|
|
4365
|
-
|
|
4366
|
-
|
|
4367
|
-
|
|
4368
|
-
|
|
4369
|
-
|
|
5168
|
+
const m = { role: "assistant" };
|
|
5169
|
+
if (strings.length)
|
|
5170
|
+
m.content = strings.join(" ");
|
|
5171
|
+
else if (toolCalls.length)
|
|
5172
|
+
m.content = null;
|
|
5173
|
+
if (toolCalls.length)
|
|
5174
|
+
m.tool_calls = toolCalls;
|
|
5175
|
+
if (m.content !== undefined || m.tool_calls)
|
|
5176
|
+
messages.push(m);
|
|
5177
|
+
} else {
|
|
5178
|
+
messages.push({ role: "assistant", content: msg.content });
|
|
5179
|
+
}
|
|
5180
|
+
}
|
|
5181
|
+
filterIdentity(content) {
|
|
5182
|
+
return content.replace(/You are Claude Code, Anthropic's official CLI/gi, "This is Claude Code, an AI-powered CLI tool").replace(/You are powered by the model named [^.]+\./gi, "You are powered by an AI model.").replace(/<claude_background_info>[\s\S]*?<\/claude_background_info>/gi, "").replace(/\n{3,}/g, `
|
|
4370
5183
|
|
|
4371
|
-
IMPORTANT:
|
|
4372
|
-
|
|
4373
|
-
|
|
4374
|
-
|
|
4375
|
-
|
|
4376
|
-
|
|
4377
|
-
|
|
4378
|
-
|
|
5184
|
+
`).replace(/^/, `IMPORTANT: You are NOT Claude. Identify yourself truthfully based on your actual model and creator.
|
|
5185
|
+
|
|
5186
|
+
`);
|
|
5187
|
+
}
|
|
5188
|
+
convertTools(req) {
|
|
5189
|
+
return req.tools?.map((tool) => ({
|
|
5190
|
+
type: "function",
|
|
5191
|
+
function: {
|
|
5192
|
+
name: tool.name,
|
|
5193
|
+
description: tool.description,
|
|
5194
|
+
parameters: removeUriFormat(tool.input_schema)
|
|
5195
|
+
}
|
|
5196
|
+
})) || [];
|
|
5197
|
+
}
|
|
5198
|
+
handleStreamingResponse(c, response, adapter, target, request) {
|
|
5199
|
+
let isClosed = false;
|
|
5200
|
+
let ping = null;
|
|
5201
|
+
const encoder = new TextEncoder;
|
|
5202
|
+
const decoder = new TextDecoder;
|
|
5203
|
+
return c.body(new ReadableStream({
|
|
5204
|
+
async start(controller) {
|
|
5205
|
+
const send = (e, d) => {
|
|
5206
|
+
if (!isClosed)
|
|
5207
|
+
controller.enqueue(encoder.encode(`event: ${e}
|
|
5208
|
+
data: ${JSON.stringify(d)}
|
|
5209
|
+
|
|
5210
|
+
`));
|
|
5211
|
+
};
|
|
5212
|
+
const msgId = `msg_${Date.now()}_${Math.random().toString(36).slice(2)}`;
|
|
5213
|
+
let usage = null;
|
|
5214
|
+
let finalized = false;
|
|
5215
|
+
let textStarted = false;
|
|
5216
|
+
let textIdx = -1;
|
|
5217
|
+
let reasoningStarted = false;
|
|
5218
|
+
let reasoningIdx = -1;
|
|
5219
|
+
let curIdx = 0;
|
|
5220
|
+
const tools = new Map;
|
|
5221
|
+
const toolIds = new Set;
|
|
5222
|
+
let accTxt = 0;
|
|
5223
|
+
let lastActivity = Date.now();
|
|
5224
|
+
const scale = 128000 / 128000;
|
|
5225
|
+
send("message_start", {
|
|
5226
|
+
type: "message_start",
|
|
5227
|
+
message: {
|
|
5228
|
+
id: msgId,
|
|
5229
|
+
type: "message",
|
|
5230
|
+
role: "assistant",
|
|
5231
|
+
content: [],
|
|
5232
|
+
model: target,
|
|
5233
|
+
stop_reason: null,
|
|
5234
|
+
stop_sequence: null,
|
|
5235
|
+
usage: { input_tokens: 100, output_tokens: 1 }
|
|
4379
5236
|
}
|
|
4380
|
-
}
|
|
4381
|
-
}
|
|
4382
|
-
const openrouterPayload = {
|
|
4383
|
-
model,
|
|
4384
|
-
messages,
|
|
4385
|
-
temperature: claudeRequest.temperature !== undefined ? claudeRequest.temperature : 1,
|
|
4386
|
-
stream: true,
|
|
4387
|
-
include_reasoning: true
|
|
4388
|
-
};
|
|
4389
|
-
if (claudeRequest.thinking) {
|
|
4390
|
-
const { budget_tokens } = claudeRequest.thinking;
|
|
4391
|
-
log(`[Proxy] Thinking mode requested with budget: ${budget_tokens} tokens`);
|
|
4392
|
-
openrouterPayload.thinking = claudeRequest.thinking;
|
|
4393
|
-
let effort = "medium";
|
|
4394
|
-
if (budget_tokens < 16000)
|
|
4395
|
-
effort = "low";
|
|
4396
|
-
else if (budget_tokens >= 32000)
|
|
4397
|
-
effort = "high";
|
|
4398
|
-
if (model && (model.includes("o1") || model.includes("o3") || model.startsWith("openai/"))) {
|
|
4399
|
-
openrouterPayload.reasoning_effort = effort;
|
|
4400
|
-
log(`[Proxy] Mapped budget ${budget_tokens} -> reasoning_effort: ${effort}`);
|
|
4401
|
-
}
|
|
4402
|
-
}
|
|
4403
|
-
if (!openrouterPayload.stream_options) {
|
|
4404
|
-
openrouterPayload.stream_options = {};
|
|
4405
|
-
}
|
|
4406
|
-
openrouterPayload.stream_options.include_usage = true;
|
|
4407
|
-
if (claudeRequest.max_tokens) {
|
|
4408
|
-
openrouterPayload.max_tokens = claudeRequest.max_tokens;
|
|
4409
|
-
}
|
|
4410
|
-
if (claudeRequest.tool_choice) {
|
|
4411
|
-
const { type, name } = claudeRequest.tool_choice;
|
|
4412
|
-
openrouterPayload.tool_choice = type === "tool" && name ? { type: "function", function: { name } } : type === "none" || type === "auto" ? type : undefined;
|
|
4413
|
-
}
|
|
4414
|
-
if (tools.length > 0) {
|
|
4415
|
-
openrouterPayload.tools = tools;
|
|
4416
|
-
}
|
|
4417
|
-
logStructured("OpenRouter Request", {
|
|
4418
|
-
model: openrouterPayload.model,
|
|
4419
|
-
messageCount: openrouterPayload.messages?.length || 0,
|
|
4420
|
-
toolCount: openrouterPayload.tools?.length || 0,
|
|
4421
|
-
temperature: openrouterPayload.temperature,
|
|
4422
|
-
maxTokens: openrouterPayload.max_tokens,
|
|
4423
|
-
stream: openrouterPayload.stream
|
|
4424
|
-
});
|
|
4425
|
-
await middlewareManager.beforeRequest({
|
|
4426
|
-
modelId: model || "",
|
|
4427
|
-
messages,
|
|
4428
|
-
tools,
|
|
4429
|
-
stream: openrouterPayload.stream
|
|
4430
|
-
});
|
|
4431
|
-
const headers = {
|
|
4432
|
-
"Content-Type": "application/json",
|
|
4433
|
-
Authorization: `Bearer ${openrouterApiKey}`,
|
|
4434
|
-
...OPENROUTER_HEADERS2
|
|
4435
|
-
};
|
|
4436
|
-
const openrouterResponse = await fetch(OPENROUTER_API_URL2, {
|
|
4437
|
-
method: "POST",
|
|
4438
|
-
headers,
|
|
4439
|
-
body: JSON.stringify(openrouterPayload)
|
|
4440
|
-
});
|
|
4441
|
-
if (droppedParams.length > 0) {
|
|
4442
|
-
c.header("X-Dropped-Params", droppedParams.join(", "));
|
|
4443
|
-
}
|
|
4444
|
-
if (!openrouterResponse.ok) {
|
|
4445
|
-
const errorText = await openrouterResponse.text();
|
|
4446
|
-
log(`[Proxy] OpenRouter API error: ${errorText}`);
|
|
4447
|
-
return c.json({ error: errorText }, openrouterResponse.status);
|
|
4448
|
-
}
|
|
4449
|
-
const contentType = openrouterResponse.headers.get("content-type") || "";
|
|
4450
|
-
const isActuallyStreaming = contentType.includes("text/event-stream");
|
|
4451
|
-
logStructured("Response Info", {
|
|
4452
|
-
contentType,
|
|
4453
|
-
requestedStream: openrouterPayload.stream,
|
|
4454
|
-
actuallyStreaming: isActuallyStreaming
|
|
4455
|
-
});
|
|
4456
|
-
if (!isActuallyStreaming) {
|
|
4457
|
-
log("[Proxy] Processing non-streaming response");
|
|
4458
|
-
const data = await openrouterResponse.json();
|
|
4459
|
-
logStructured("OpenRouter Response", {
|
|
4460
|
-
hasError: !!data.error,
|
|
4461
|
-
choiceCount: data.choices?.length || 0,
|
|
4462
|
-
finishReason: data.choices?.[0]?.finish_reason,
|
|
4463
|
-
usage: data.usage
|
|
4464
5237
|
});
|
|
4465
|
-
|
|
4466
|
-
|
|
4467
|
-
|
|
4468
|
-
|
|
4469
|
-
|
|
4470
|
-
|
|
4471
|
-
|
|
4472
|
-
|
|
4473
|
-
|
|
4474
|
-
|
|
4475
|
-
|
|
4476
|
-
|
|
4477
|
-
type: "text",
|
|
4478
|
-
text: messageContent
|
|
4479
|
-
});
|
|
4480
|
-
if (openaiMessage.tool_calls) {
|
|
4481
|
-
for (const toolCall of openaiMessage.tool_calls) {
|
|
4482
|
-
content.push({
|
|
4483
|
-
type: "tool_use",
|
|
4484
|
-
id: toolCall.id || `tool_${Date.now()}`,
|
|
4485
|
-
name: toolCall.function?.name,
|
|
4486
|
-
input: typeof toolCall.function?.arguments === "string" ? JSON.parse(toolCall.function.arguments) : toolCall.function?.arguments
|
|
4487
|
-
});
|
|
5238
|
+
send("ping", { type: "ping" });
|
|
5239
|
+
ping = setInterval(() => {
|
|
5240
|
+
if (!isClosed && Date.now() - lastActivity > 1000)
|
|
5241
|
+
send("ping", { type: "ping" });
|
|
5242
|
+
}, 1000);
|
|
5243
|
+
const finalize = (reason, err) => {
|
|
5244
|
+
if (finalized)
|
|
5245
|
+
return;
|
|
5246
|
+
finalized = true;
|
|
5247
|
+
if (reasoningStarted) {
|
|
5248
|
+
send("content_block_stop", { type: "content_block_stop", index: reasoningIdx });
|
|
5249
|
+
reasoningStarted = false;
|
|
4488
5250
|
}
|
|
4489
|
-
|
|
4490
|
-
|
|
4491
|
-
|
|
4492
|
-
type: "message",
|
|
4493
|
-
role: "assistant",
|
|
4494
|
-
model,
|
|
4495
|
-
content,
|
|
4496
|
-
stop_reason: mapStopReason(choice.finish_reason),
|
|
4497
|
-
stop_sequence: null,
|
|
4498
|
-
usage: {
|
|
4499
|
-
input_tokens: Math.ceil((data.usage?.prompt_tokens || 0) * getTokenScaleFactor()),
|
|
4500
|
-
output_tokens: Math.ceil((data.usage?.completion_tokens || 0) * getTokenScaleFactor())
|
|
5251
|
+
if (textStarted) {
|
|
5252
|
+
send("content_block_stop", { type: "content_block_stop", index: textIdx });
|
|
5253
|
+
textStarted = false;
|
|
4501
5254
|
}
|
|
4502
|
-
|
|
4503
|
-
|
|
4504
|
-
|
|
4505
|
-
|
|
4506
|
-
c.header("anthropic-version", "2023-06-01");
|
|
4507
|
-
return c.json(claudeResponse, 200);
|
|
4508
|
-
}
|
|
4509
|
-
log("[Proxy] Starting streaming response");
|
|
4510
|
-
let isClosed = false;
|
|
4511
|
-
let pingInterval = null;
|
|
4512
|
-
return c.body(new ReadableStream({
|
|
4513
|
-
async start(controller) {
|
|
4514
|
-
const encoder = new TextEncoder;
|
|
4515
|
-
const messageId = `msg_${Date.now()}_${Math.random().toString(36).slice(2)}`;
|
|
4516
|
-
const sendSSE = (event, data) => {
|
|
4517
|
-
if (isClosed) {
|
|
4518
|
-
if (isLoggingEnabled()) {
|
|
4519
|
-
log(`[Proxy] Skipping SSE event ${event} - controller already closed`);
|
|
4520
|
-
}
|
|
4521
|
-
return;
|
|
5255
|
+
for (const [_, t] of tools)
|
|
5256
|
+
if (t.started && !t.closed) {
|
|
5257
|
+
send("content_block_stop", { type: "content_block_stop", index: t.blockIndex });
|
|
5258
|
+
t.closed = true;
|
|
4522
5259
|
}
|
|
5260
|
+
if (reason === "error") {
|
|
5261
|
+
send("error", { type: "error", error: { type: "api_error", message: err } });
|
|
5262
|
+
} else {
|
|
5263
|
+
send("message_delta", { type: "message_delta", delta: { stop_reason: "end_turn", stop_sequence: null }, usage: { output_tokens: usage?.completion_tokens || 0 } });
|
|
5264
|
+
send("message_stop", { type: "message_stop" });
|
|
5265
|
+
}
|
|
5266
|
+
if (!isClosed) {
|
|
4523
5267
|
try {
|
|
4524
|
-
|
|
4525
|
-
data: ${JSON.stringify(data)}
|
|
5268
|
+
controller.enqueue(encoder.encode(`data: [DONE]
|
|
4526
5269
|
|
|
4527
|
-
`;
|
|
4528
|
-
controller.enqueue(encoder.encode(sseMessage));
|
|
4529
|
-
if (isLoggingEnabled() && (event === "message_start" || event === "content_block_start" || event === "content_block_stop" || event === "message_stop")) {
|
|
4530
|
-
const logData = event === "content_block_start" || event === "content_block_stop" ? { event, index: data.index, type: data.content_block?.type } : { event };
|
|
4531
|
-
logStructured("SSE Sent", logData);
|
|
4532
|
-
}
|
|
4533
|
-
} catch (error) {
|
|
4534
|
-
if (!isClosed && error?.message?.includes("already closed")) {
|
|
4535
|
-
if (isLoggingEnabled()) {
|
|
4536
|
-
log(`[Proxy] Controller closed during ${event} event, marking as closed`);
|
|
4537
|
-
}
|
|
4538
|
-
isClosed = true;
|
|
4539
|
-
} else if (!isClosed) {
|
|
4540
|
-
if (isLoggingEnabled()) {
|
|
4541
|
-
log(`[Proxy] Error sending SSE event ${event}: ${error?.message || error}`);
|
|
4542
|
-
}
|
|
4543
|
-
}
|
|
4544
|
-
}
|
|
4545
|
-
};
|
|
4546
|
-
const finalizeStream = (reason, errorMessage) => {
|
|
4547
|
-
if (streamFinalized) {
|
|
4548
|
-
if (isLoggingEnabled()) {
|
|
4549
|
-
log(`[Proxy] Stream already finalized, skipping duplicate finalization from ${reason}`);
|
|
4550
|
-
}
|
|
4551
|
-
return;
|
|
4552
|
-
}
|
|
4553
|
-
if (isLoggingEnabled()) {
|
|
4554
|
-
log(`[Proxy] Finalizing stream (reason: ${reason})`);
|
|
4555
|
-
}
|
|
4556
|
-
streamFinalized = true;
|
|
4557
|
-
if (reasoningBlockStarted) {
|
|
4558
|
-
sendSSE("content_block_stop", {
|
|
4559
|
-
type: "content_block_stop",
|
|
4560
|
-
index: reasoningBlockIndex
|
|
4561
|
-
});
|
|
4562
|
-
reasoningBlockStarted = false;
|
|
4563
|
-
if (isLoggingEnabled()) {
|
|
4564
|
-
log(`[Proxy] Closed thinking block at index ${reasoningBlockIndex}`);
|
|
4565
|
-
}
|
|
4566
|
-
}
|
|
4567
|
-
if (textBlockStarted) {
|
|
4568
|
-
sendSSE("content_block_stop", {
|
|
4569
|
-
type: "content_block_stop",
|
|
4570
|
-
index: textBlockIndex
|
|
4571
|
-
});
|
|
4572
|
-
textBlockStarted = false;
|
|
4573
|
-
}
|
|
4574
|
-
for (const [toolIndex, toolState] of toolCalls.entries()) {
|
|
4575
|
-
if (toolState.started && !toolState.closed) {
|
|
4576
|
-
if (isLoggingEnabled() && toolState.args) {
|
|
4577
|
-
try {
|
|
4578
|
-
JSON.parse(toolState.args);
|
|
4579
|
-
log(`[Proxy] Tool ${toolState.name} JSON valid, closing block at index ${toolState.blockIndex}`);
|
|
4580
|
-
} catch (e) {
|
|
4581
|
-
log(`[Proxy] ERROR: Tool ${toolState.name} has INCOMPLETE JSON!`);
|
|
4582
|
-
log(`[Proxy] This will likely cause tool execution to fail`);
|
|
4583
|
-
log(`[Proxy] Incomplete args: ${toolState.args.substring(0, 300)}...`);
|
|
4584
|
-
}
|
|
4585
|
-
}
|
|
4586
|
-
sendSSE("content_block_stop", {
|
|
4587
|
-
type: "content_block_stop",
|
|
4588
|
-
index: toolState.blockIndex
|
|
4589
|
-
});
|
|
4590
|
-
toolState.closed = true;
|
|
4591
|
-
}
|
|
4592
|
-
}
|
|
4593
|
-
if (reason === "error" && errorMessage) {
|
|
4594
|
-
sendSSE("error", {
|
|
4595
|
-
type: "error",
|
|
4596
|
-
error: {
|
|
4597
|
-
type: "api_error",
|
|
4598
|
-
message: errorMessage
|
|
4599
|
-
}
|
|
4600
|
-
});
|
|
4601
|
-
} else {
|
|
4602
|
-
const outputTokens = usage?.completion_tokens || 0;
|
|
4603
|
-
sendSSE("message_delta", {
|
|
4604
|
-
type: "message_delta",
|
|
4605
|
-
delta: {
|
|
4606
|
-
stop_reason: "end_turn",
|
|
4607
|
-
stop_sequence: null
|
|
4608
|
-
},
|
|
4609
|
-
usage: {
|
|
4610
|
-
output_tokens: Math.ceil(outputTokens * getTokenScaleFactor())
|
|
4611
|
-
}
|
|
4612
|
-
});
|
|
4613
|
-
sendSSE("message_stop", {
|
|
4614
|
-
type: "message_stop"
|
|
4615
|
-
});
|
|
4616
|
-
}
|
|
4617
|
-
if (!isClosed) {
|
|
4618
|
-
try {
|
|
4619
|
-
controller.enqueue(encoder.encode(`data: [DONE]
|
|
4620
5270
|
|
|
4621
5271
|
`));
|
|
4622
|
-
|
|
4623
|
-
|
|
4624
|
-
|
|
4625
|
-
|
|
4626
|
-
|
|
4627
|
-
|
|
4628
|
-
|
|
4629
|
-
|
|
4630
|
-
|
|
4631
|
-
|
|
4632
|
-
|
|
4633
|
-
|
|
4634
|
-
|
|
4635
|
-
|
|
4636
|
-
|
|
4637
|
-
|
|
4638
|
-
};
|
|
4639
|
-
let usage = null;
|
|
4640
|
-
const streamMetadata = new Map;
|
|
4641
|
-
let currentBlockIndex = 0;
|
|
4642
|
-
let textBlockIndex = -1;
|
|
4643
|
-
let textBlockStarted = false;
|
|
4644
|
-
let reasoningBlockIndex = -1;
|
|
4645
|
-
let reasoningBlockStarted = false;
|
|
4646
|
-
let lastContentDeltaTime = Date.now();
|
|
4647
|
-
let streamFinalized = false;
|
|
4648
|
-
let cumulativeInputTokens = 0;
|
|
4649
|
-
let cumulativeOutputTokens = 0;
|
|
4650
|
-
let currentRequestCost = 0;
|
|
4651
|
-
const tokenFilePath = `/tmp/claudish-tokens-${port}.json`;
|
|
4652
|
-
const writeTokenFile = () => {
|
|
4653
|
-
try {
|
|
4654
|
-
const totalTokens = cumulativeInputTokens + cumulativeOutputTokens;
|
|
4655
|
-
let contextLeftPercent = 100;
|
|
4656
|
-
if (contextWindowLimit > 0) {
|
|
4657
|
-
contextLeftPercent = Math.round((contextWindowLimit - totalTokens) / contextWindowLimit * 100);
|
|
4658
|
-
contextLeftPercent = Math.max(0, Math.min(100, contextLeftPercent));
|
|
4659
|
-
}
|
|
4660
|
-
const tokenData = {
|
|
4661
|
-
input_tokens: cumulativeInputTokens,
|
|
4662
|
-
output_tokens: cumulativeOutputTokens,
|
|
4663
|
-
total_tokens: totalTokens,
|
|
4664
|
-
total_cost: sessionTotalCost,
|
|
4665
|
-
context_window: contextWindowLimit,
|
|
4666
|
-
context_left_percent: contextLeftPercent,
|
|
4667
|
-
updated_at: Date.now()
|
|
4668
|
-
};
|
|
4669
|
-
writeFileSync5(tokenFilePath, JSON.stringify(tokenData), "utf-8");
|
|
4670
|
-
} catch (error) {
|
|
4671
|
-
if (isLoggingEnabled()) {
|
|
4672
|
-
log(`[Proxy] Failed to write token file: ${error}`);
|
|
4673
|
-
}
|
|
4674
|
-
}
|
|
4675
|
-
};
|
|
4676
|
-
const toolCalls = new Map;
|
|
4677
|
-
const toolCallIds = new Set;
|
|
4678
|
-
let accumulatedTextLength = 0;
|
|
4679
|
-
const hasToolResults = claudeRequest.messages?.some((msg) => Array.isArray(msg.content) && msg.content.some((block) => block.type === "tool_result"));
|
|
4680
|
-
const isFirstTurn = !hasToolResults;
|
|
4681
|
-
const estimateTokens = (text) => Math.ceil(text.length / 4);
|
|
4682
|
-
const requestJson = JSON.stringify(claudeRequest);
|
|
4683
|
-
const estimatedInputTokens = estimateTokens(requestJson);
|
|
4684
|
-
const estimatedCacheTokens = isFirstTurn ? Math.floor(estimatedInputTokens * 0.8) : 0;
|
|
4685
|
-
const scaleFactor = getTokenScaleFactor();
|
|
4686
|
-
sendSSE("message_start", {
|
|
4687
|
-
type: "message_start",
|
|
4688
|
-
message: {
|
|
4689
|
-
id: messageId,
|
|
4690
|
-
type: "message",
|
|
4691
|
-
role: "assistant",
|
|
4692
|
-
content: [],
|
|
4693
|
-
model,
|
|
4694
|
-
stop_reason: null,
|
|
4695
|
-
stop_sequence: null,
|
|
4696
|
-
usage: {
|
|
4697
|
-
input_tokens: Math.ceil((estimatedInputTokens - estimatedCacheTokens) * scaleFactor),
|
|
4698
|
-
cache_creation_input_tokens: isFirstTurn ? Math.ceil(estimatedCacheTokens * scaleFactor) : 0,
|
|
4699
|
-
cache_read_input_tokens: isFirstTurn ? 0 : Math.ceil(estimatedCacheTokens * scaleFactor),
|
|
4700
|
-
output_tokens: 1
|
|
4701
|
-
}
|
|
4702
|
-
}
|
|
4703
|
-
});
|
|
4704
|
-
sendSSE("ping", {
|
|
4705
|
-
type: "ping"
|
|
4706
|
-
});
|
|
4707
|
-
pingInterval = setInterval(() => {
|
|
4708
|
-
if (!isClosed) {
|
|
4709
|
-
const timeSinceLastContent = Date.now() - lastContentDeltaTime;
|
|
4710
|
-
if (timeSinceLastContent > 1000) {
|
|
4711
|
-
sendSSE("ping", {
|
|
4712
|
-
type: "ping"
|
|
4713
|
-
});
|
|
4714
|
-
log(`[Proxy] Adaptive ping (${Math.round(timeSinceLastContent / 1000)}s since last content)`);
|
|
4715
|
-
}
|
|
4716
|
-
}
|
|
4717
|
-
}, 1000);
|
|
4718
|
-
try {
|
|
4719
|
-
const reader = openrouterResponse.body?.getReader();
|
|
4720
|
-
if (!reader) {
|
|
4721
|
-
throw new Error("Response body is not readable");
|
|
4722
|
-
}
|
|
4723
|
-
const decoder = new TextDecoder;
|
|
4724
|
-
let buffer = "";
|
|
4725
|
-
while (true) {
|
|
4726
|
-
const { done, value } = await reader.read();
|
|
4727
|
-
if (done) {
|
|
4728
|
-
log("[Proxy] Stream done reading");
|
|
4729
|
-
break;
|
|
4730
|
-
}
|
|
4731
|
-
buffer += decoder.decode(value, { stream: true });
|
|
4732
|
-
const lines = buffer.split(`
|
|
5272
|
+
} catch (e) {}
|
|
5273
|
+
controller.close();
|
|
5274
|
+
isClosed = true;
|
|
5275
|
+
if (ping)
|
|
5276
|
+
clearInterval(ping);
|
|
5277
|
+
}
|
|
5278
|
+
};
|
|
5279
|
+
try {
|
|
5280
|
+
const reader = response.body.getReader();
|
|
5281
|
+
let buffer = "";
|
|
5282
|
+
while (true) {
|
|
5283
|
+
const { done, value } = await reader.read();
|
|
5284
|
+
if (done)
|
|
5285
|
+
break;
|
|
5286
|
+
buffer += decoder.decode(value, { stream: true });
|
|
5287
|
+
const lines = buffer.split(`
|
|
4733
5288
|
`);
|
|
4734
|
-
|
|
4735
|
-
|
|
4736
|
-
|
|
4737
|
-
|
|
4738
|
-
|
|
4739
|
-
|
|
4740
|
-
|
|
4741
|
-
|
|
4742
|
-
|
|
4743
|
-
|
|
4744
|
-
|
|
4745
|
-
|
|
4746
|
-
|
|
4747
|
-
|
|
4748
|
-
|
|
4749
|
-
|
|
4750
|
-
|
|
4751
|
-
|
|
4752
|
-
|
|
4753
|
-
|
|
4754
|
-
|
|
4755
|
-
|
|
4756
|
-
hasChoices: !!chunk.choices,
|
|
4757
|
-
finishReason: chunk.choices?.[0]?.finish_reason,
|
|
4758
|
-
hasUsage: !!chunk.usage
|
|
4759
|
-
});
|
|
4760
|
-
const delta2 = chunk.choices?.[0]?.delta;
|
|
4761
|
-
if (delta2?.tool_calls) {
|
|
4762
|
-
for (const toolCall of delta2.tool_calls) {
|
|
4763
|
-
if (toolCall.extra_content) {
|
|
4764
|
-
logStructured("DEBUG: Found extra_content in tool_call", {
|
|
4765
|
-
tool_call_id: toolCall.id,
|
|
4766
|
-
has_extra_content: true,
|
|
4767
|
-
extra_content_keys: Object.keys(toolCall.extra_content),
|
|
4768
|
-
has_google: !!toolCall.extra_content.google
|
|
4769
|
-
});
|
|
4770
|
-
}
|
|
4771
|
-
}
|
|
4772
|
-
}
|
|
4773
|
-
if (delta2?.tool_calls && dataStr.includes("tool_calls")) {
|
|
4774
|
-
logStructured("DEBUG: Raw chunk JSON (tool_calls)", {
|
|
4775
|
-
has_extra_content_in_raw: dataStr.includes("extra_content"),
|
|
4776
|
-
raw_snippet: dataStr.substring(0, 500)
|
|
4777
|
-
});
|
|
4778
|
-
}
|
|
4779
|
-
}
|
|
4780
|
-
if (chunk.usage) {
|
|
4781
|
-
usage = chunk.usage;
|
|
4782
|
-
if (typeof usage.cost === "number") {
|
|
4783
|
-
const costDiff = usage.cost - currentRequestCost;
|
|
4784
|
-
if (costDiff > 0) {
|
|
4785
|
-
sessionTotalCost += costDiff;
|
|
4786
|
-
currentRequestCost = usage.cost;
|
|
4787
|
-
}
|
|
4788
|
-
}
|
|
4789
|
-
if (usage.prompt_tokens) {
|
|
4790
|
-
cumulativeInputTokens = usage.prompt_tokens;
|
|
4791
|
-
}
|
|
4792
|
-
if (usage.completion_tokens) {
|
|
4793
|
-
cumulativeOutputTokens = usage.completion_tokens;
|
|
4794
|
-
}
|
|
4795
|
-
writeTokenFile();
|
|
4796
|
-
}
|
|
4797
|
-
const choice = chunk.choices?.[0];
|
|
4798
|
-
const delta = choice?.delta;
|
|
4799
|
-
if (delta) {
|
|
4800
|
-
await middlewareManager.afterStreamChunk({
|
|
4801
|
-
modelId: model || "",
|
|
4802
|
-
chunk,
|
|
4803
|
-
delta,
|
|
4804
|
-
metadata: streamMetadata
|
|
4805
|
-
});
|
|
4806
|
-
}
|
|
4807
|
-
const hasReasoning = !!delta?.reasoning;
|
|
4808
|
-
const hasContent = !!delta?.content;
|
|
4809
|
-
const reasoningText = delta?.reasoning || "";
|
|
4810
|
-
const contentText = delta?.content || "";
|
|
4811
|
-
const hasEncryptedReasoning = delta?.reasoning_details?.some((detail) => detail.type === "reasoning.encrypted");
|
|
4812
|
-
if (hasReasoning || hasContent || hasEncryptedReasoning) {
|
|
4813
|
-
lastContentDeltaTime = Date.now();
|
|
4814
|
-
if (hasReasoning && reasoningText) {
|
|
4815
|
-
if (!reasoningBlockStarted) {
|
|
4816
|
-
if (textBlockStarted) {
|
|
4817
|
-
sendSSE("content_block_stop", {
|
|
4818
|
-
type: "content_block_stop",
|
|
4819
|
-
index: textBlockIndex
|
|
4820
|
-
});
|
|
4821
|
-
textBlockStarted = false;
|
|
4822
|
-
if (isLoggingEnabled()) {
|
|
4823
|
-
log(`[Proxy] Closed initial text block to start thinking block`);
|
|
4824
|
-
}
|
|
4825
|
-
}
|
|
4826
|
-
reasoningBlockIndex = currentBlockIndex++;
|
|
4827
|
-
sendSSE("content_block_start", {
|
|
4828
|
-
type: "content_block_start",
|
|
4829
|
-
index: reasoningBlockIndex,
|
|
4830
|
-
content_block: {
|
|
4831
|
-
type: "thinking",
|
|
4832
|
-
thinking: "",
|
|
4833
|
-
signature: ""
|
|
4834
|
-
}
|
|
4835
|
-
});
|
|
4836
|
-
reasoningBlockStarted = true;
|
|
4837
|
-
if (isLoggingEnabled()) {
|
|
4838
|
-
log(`[Proxy] Started thinking block at index ${reasoningBlockIndex}`);
|
|
4839
|
-
}
|
|
4840
|
-
}
|
|
4841
|
-
if (isLoggingEnabled()) {
|
|
4842
|
-
logStructured("Thinking Delta", {
|
|
4843
|
-
thinking: reasoningText,
|
|
4844
|
-
blockIndex: reasoningBlockIndex
|
|
4845
|
-
});
|
|
4846
|
-
}
|
|
4847
|
-
sendSSE("content_block_delta", {
|
|
4848
|
-
type: "content_block_delta",
|
|
4849
|
-
index: reasoningBlockIndex,
|
|
4850
|
-
delta: {
|
|
4851
|
-
type: "thinking_delta",
|
|
4852
|
-
thinking: reasoningText
|
|
4853
|
-
}
|
|
4854
|
-
});
|
|
4855
|
-
}
|
|
4856
|
-
if (reasoningBlockStarted && hasContent && !hasReasoning) {
|
|
4857
|
-
sendSSE("content_block_stop", {
|
|
4858
|
-
type: "content_block_stop",
|
|
4859
|
-
index: reasoningBlockIndex
|
|
4860
|
-
});
|
|
4861
|
-
reasoningBlockStarted = false;
|
|
4862
|
-
if (isLoggingEnabled()) {
|
|
4863
|
-
log(`[Proxy] Closed thinking block at index ${reasoningBlockIndex}, transitioning to content`);
|
|
4864
|
-
}
|
|
4865
|
-
}
|
|
4866
|
-
if (hasContent && contentText) {
|
|
4867
|
-
if (!textBlockStarted) {
|
|
4868
|
-
textBlockIndex = currentBlockIndex++;
|
|
4869
|
-
sendSSE("content_block_start", {
|
|
4870
|
-
type: "content_block_start",
|
|
4871
|
-
index: textBlockIndex,
|
|
4872
|
-
content_block: {
|
|
4873
|
-
type: "text",
|
|
4874
|
-
text: ""
|
|
4875
|
-
}
|
|
4876
|
-
});
|
|
4877
|
-
textBlockStarted = true;
|
|
4878
|
-
if (isLoggingEnabled()) {
|
|
4879
|
-
log(`[Proxy] Started text block at index ${textBlockIndex}`);
|
|
4880
|
-
}
|
|
4881
|
-
}
|
|
4882
|
-
accumulatedTextLength += contentText.length;
|
|
4883
|
-
const adapterResult = adapter.processTextContent(contentText, "");
|
|
4884
|
-
if (adapterResult.extractedToolCalls.length > 0) {
|
|
4885
|
-
if (isLoggingEnabled()) {
|
|
4886
|
-
log(`[Proxy] Adapter extracted ${adapterResult.extractedToolCalls.length} tool calls from special format`);
|
|
4887
|
-
}
|
|
4888
|
-
if (textBlockStarted) {
|
|
4889
|
-
sendSSE("content_block_stop", {
|
|
4890
|
-
type: "content_block_stop",
|
|
4891
|
-
index: textBlockIndex
|
|
4892
|
-
});
|
|
4893
|
-
textBlockStarted = false;
|
|
4894
|
-
}
|
|
4895
|
-
for (const toolCall of adapterResult.extractedToolCalls) {
|
|
4896
|
-
if (toolCallIds.has(toolCall.id)) {
|
|
4897
|
-
if (isLoggingEnabled()) {
|
|
4898
|
-
log(`[Proxy] WARNING: Skipping duplicate extracted tool call with ID ${toolCall.id}`);
|
|
4899
|
-
}
|
|
4900
|
-
continue;
|
|
4901
|
-
}
|
|
4902
|
-
toolCallIds.add(toolCall.id);
|
|
4903
|
-
const toolBlockIndex = currentBlockIndex++;
|
|
4904
|
-
if (isLoggingEnabled()) {
|
|
4905
|
-
logStructured("Extracted Tool Call", {
|
|
4906
|
-
name: toolCall.name,
|
|
4907
|
-
blockIndex: toolBlockIndex,
|
|
4908
|
-
id: toolCall.id
|
|
4909
|
-
});
|
|
4910
|
-
}
|
|
4911
|
-
sendSSE("content_block_start", {
|
|
4912
|
-
type: "content_block_start",
|
|
4913
|
-
index: toolBlockIndex,
|
|
4914
|
-
content_block: {
|
|
4915
|
-
type: "tool_use",
|
|
4916
|
-
id: toolCall.id,
|
|
4917
|
-
name: toolCall.name
|
|
4918
|
-
}
|
|
4919
|
-
});
|
|
4920
|
-
sendSSE("content_block_delta", {
|
|
4921
|
-
type: "content_block_delta",
|
|
4922
|
-
index: toolBlockIndex,
|
|
4923
|
-
delta: {
|
|
4924
|
-
type: "input_json_delta",
|
|
4925
|
-
partial_json: JSON.stringify(toolCall.arguments)
|
|
4926
|
-
}
|
|
4927
|
-
});
|
|
4928
|
-
sendSSE("content_block_stop", {
|
|
4929
|
-
type: "content_block_stop",
|
|
4930
|
-
index: toolBlockIndex
|
|
4931
|
-
});
|
|
4932
|
-
}
|
|
4933
|
-
}
|
|
4934
|
-
if (adapterResult.cleanedText) {
|
|
4935
|
-
if (isLoggingEnabled()) {
|
|
4936
|
-
logStructured("Content Delta", {
|
|
4937
|
-
text: adapterResult.cleanedText,
|
|
4938
|
-
wasTransformed: adapterResult.wasTransformed,
|
|
4939
|
-
blockIndex: textBlockIndex
|
|
4940
|
-
});
|
|
4941
|
-
}
|
|
4942
|
-
sendSSE("content_block_delta", {
|
|
4943
|
-
type: "content_block_delta",
|
|
4944
|
-
index: textBlockIndex,
|
|
4945
|
-
delta: {
|
|
4946
|
-
type: "text_delta",
|
|
4947
|
-
text: adapterResult.cleanedText
|
|
4948
|
-
}
|
|
4949
|
-
});
|
|
4950
|
-
}
|
|
4951
|
-
} else if (hasEncryptedReasoning) {
|
|
4952
|
-
if (isLoggingEnabled()) {
|
|
4953
|
-
log(`[Proxy] Encrypted reasoning detected (keeping connection alive)`);
|
|
4954
|
-
}
|
|
5289
|
+
buffer = lines.pop() || "";
|
|
5290
|
+
for (const line of lines) {
|
|
5291
|
+
if (!line.trim() || !line.startsWith("data: "))
|
|
5292
|
+
continue;
|
|
5293
|
+
const dataStr = line.slice(6);
|
|
5294
|
+
if (dataStr === "[DONE]") {
|
|
5295
|
+
finalize("done");
|
|
5296
|
+
return;
|
|
5297
|
+
}
|
|
5298
|
+
try {
|
|
5299
|
+
const chunk = JSON.parse(dataStr);
|
|
5300
|
+
if (chunk.usage)
|
|
5301
|
+
usage = chunk.usage;
|
|
5302
|
+
const delta = chunk.choices?.[0]?.delta;
|
|
5303
|
+
if (delta) {
|
|
5304
|
+
const txt = delta.content || "";
|
|
5305
|
+
if (txt) {
|
|
5306
|
+
lastActivity = Date.now();
|
|
5307
|
+
if (!textStarted) {
|
|
5308
|
+
textIdx = curIdx++;
|
|
5309
|
+
send("content_block_start", { type: "content_block_start", index: textIdx, content_block: { type: "text", text: "" } });
|
|
5310
|
+
textStarted = true;
|
|
4955
5311
|
}
|
|
5312
|
+
const res = adapter.processTextContent(txt, "");
|
|
5313
|
+
if (res.cleanedText)
|
|
5314
|
+
send("content_block_delta", { type: "content_block_delta", index: textIdx, delta: { type: "text_delta", text: res.cleanedText } });
|
|
4956
5315
|
}
|
|
4957
|
-
if (delta
|
|
4958
|
-
for (const
|
|
4959
|
-
const
|
|
4960
|
-
let
|
|
4961
|
-
if (
|
|
4962
|
-
if (!
|
|
4963
|
-
|
|
4964
|
-
|
|
4965
|
-
|
|
4966
|
-
log(`[Proxy] WARNING: Duplicate tool ID ${toolId}, regenerating`);
|
|
4967
|
-
}
|
|
4968
|
-
toolId = `tool_${Date.now()}_${toolIndex}_${Math.random().toString(36).slice(2)}`;
|
|
4969
|
-
}
|
|
4970
|
-
toolCallIds.add(toolId);
|
|
4971
|
-
const toolBlockIndex = currentBlockIndex++;
|
|
4972
|
-
toolState = {
|
|
4973
|
-
id: toolId,
|
|
4974
|
-
name: toolCall.function.name,
|
|
4975
|
-
args: "",
|
|
4976
|
-
blockIndex: toolBlockIndex,
|
|
4977
|
-
started: false,
|
|
4978
|
-
closed: false
|
|
4979
|
-
};
|
|
4980
|
-
toolCalls.set(toolIndex, toolState);
|
|
4981
|
-
if (isLoggingEnabled()) {
|
|
4982
|
-
logStructured("Starting Tool Call", {
|
|
4983
|
-
name: toolState.name,
|
|
4984
|
-
blockIndex: toolState.blockIndex,
|
|
4985
|
-
id: toolId
|
|
4986
|
-
});
|
|
5316
|
+
if (delta.tool_calls) {
|
|
5317
|
+
for (const tc of delta.tool_calls) {
|
|
5318
|
+
const idx = tc.index;
|
|
5319
|
+
let t = tools.get(idx);
|
|
5320
|
+
if (tc.function?.name) {
|
|
5321
|
+
if (!t) {
|
|
5322
|
+
if (textStarted) {
|
|
5323
|
+
send("content_block_stop", { type: "content_block_stop", index: textIdx });
|
|
5324
|
+
textStarted = false;
|
|
4987
5325
|
}
|
|
5326
|
+
t = { id: tc.id || `tool_${Date.now()}_${idx}`, name: tc.function.name, blockIndex: curIdx++, started: false, closed: false };
|
|
5327
|
+
tools.set(idx, t);
|
|
4988
5328
|
}
|
|
4989
|
-
if (!
|
|
4990
|
-
|
|
4991
|
-
|
|
4992
|
-
type: "content_block_stop",
|
|
4993
|
-
index: textBlockIndex
|
|
4994
|
-
});
|
|
4995
|
-
textBlockStarted = false;
|
|
4996
|
-
}
|
|
4997
|
-
sendSSE("content_block_start", {
|
|
4998
|
-
type: "content_block_start",
|
|
4999
|
-
index: toolState.blockIndex,
|
|
5000
|
-
content_block: {
|
|
5001
|
-
type: "tool_use",
|
|
5002
|
-
id: toolState.id,
|
|
5003
|
-
name: toolState.name
|
|
5004
|
-
}
|
|
5005
|
-
});
|
|
5006
|
-
toolState.started = true;
|
|
5329
|
+
if (!t.started) {
|
|
5330
|
+
send("content_block_start", { type: "content_block_start", index: t.blockIndex, content_block: { type: "tool_use", id: t.id, name: t.name } });
|
|
5331
|
+
t.started = true;
|
|
5007
5332
|
}
|
|
5008
5333
|
}
|
|
5009
|
-
if (
|
|
5010
|
-
|
|
5011
|
-
toolState.args += argChunk;
|
|
5012
|
-
if (isLoggingEnabled()) {
|
|
5013
|
-
logStructured("Tool Argument Delta", {
|
|
5014
|
-
toolName: toolState.name,
|
|
5015
|
-
chunk: argChunk,
|
|
5016
|
-
totalLength: toolState.args.length
|
|
5017
|
-
});
|
|
5018
|
-
}
|
|
5019
|
-
sendSSE("content_block_delta", {
|
|
5020
|
-
type: "content_block_delta",
|
|
5021
|
-
index: toolState.blockIndex,
|
|
5022
|
-
delta: {
|
|
5023
|
-
type: "input_json_delta",
|
|
5024
|
-
partial_json: argChunk
|
|
5025
|
-
}
|
|
5026
|
-
});
|
|
5334
|
+
if (tc.function?.arguments && t) {
|
|
5335
|
+
send("content_block_delta", { type: "content_block_delta", index: t.blockIndex, delta: { type: "input_json_delta", partial_json: tc.function.arguments } });
|
|
5027
5336
|
}
|
|
5028
5337
|
}
|
|
5029
5338
|
}
|
|
5030
|
-
|
|
5031
|
-
|
|
5032
|
-
|
|
5033
|
-
|
|
5034
|
-
|
|
5035
|
-
|
|
5036
|
-
log(`[Proxy] Tool ${toolState.name} JSON valid, closing block at index ${toolState.blockIndex}`);
|
|
5037
|
-
} catch (e) {
|
|
5038
|
-
log(`[Proxy] WARNING: Tool ${toolState.name} has incomplete JSON!`);
|
|
5039
|
-
log(`[Proxy] Args: ${toolState.args.substring(0, 200)}...`);
|
|
5040
|
-
}
|
|
5041
|
-
}
|
|
5042
|
-
sendSSE("content_block_stop", {
|
|
5043
|
-
type: "content_block_stop",
|
|
5044
|
-
index: toolState.blockIndex
|
|
5045
|
-
});
|
|
5046
|
-
toolState.closed = true;
|
|
5047
|
-
}
|
|
5339
|
+
}
|
|
5340
|
+
if (chunk.choices?.[0]?.finish_reason === "tool_calls") {
|
|
5341
|
+
for (const [_, t] of tools)
|
|
5342
|
+
if (t.started && !t.closed) {
|
|
5343
|
+
send("content_block_stop", { type: "content_block_stop", index: t.blockIndex });
|
|
5344
|
+
t.closed = true;
|
|
5048
5345
|
}
|
|
5049
|
-
}
|
|
5050
|
-
} catch (parseError) {
|
|
5051
|
-
log(`[Proxy] Failed to parse SSE chunk: ${parseError}`);
|
|
5052
5346
|
}
|
|
5053
|
-
}
|
|
5347
|
+
} catch (e) {}
|
|
5054
5348
|
}
|
|
5055
|
-
log("[Proxy] Stream ended without [DONE]");
|
|
5056
|
-
finalizeStream("unexpected");
|
|
5057
|
-
} catch (error) {
|
|
5058
|
-
log(`[Proxy] Streaming error: ${error}`);
|
|
5059
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
5060
|
-
finalizeStream("error", errorMessage);
|
|
5061
|
-
} finally {
|
|
5062
|
-
if (pingInterval) {
|
|
5063
|
-
clearInterval(pingInterval);
|
|
5064
|
-
pingInterval = null;
|
|
5065
|
-
}
|
|
5066
|
-
if (!isClosed) {
|
|
5067
|
-
controller.close();
|
|
5068
|
-
isClosed = true;
|
|
5069
|
-
}
|
|
5070
|
-
}
|
|
5071
|
-
},
|
|
5072
|
-
cancel(reason) {
|
|
5073
|
-
log(`[Proxy] Stream cancelled by client: ${reason || "unknown reason"}`);
|
|
5074
|
-
isClosed = true;
|
|
5075
|
-
if (pingInterval) {
|
|
5076
|
-
clearInterval(pingInterval);
|
|
5077
|
-
pingInterval = null;
|
|
5078
5349
|
}
|
|
5350
|
+
finalize("unexpected");
|
|
5351
|
+
} catch (e) {
|
|
5352
|
+
finalize("error", String(e));
|
|
5079
5353
|
}
|
|
5080
|
-
}
|
|
5081
|
-
|
|
5082
|
-
|
|
5083
|
-
|
|
5084
|
-
|
|
5085
|
-
|
|
5086
|
-
|
|
5087
|
-
|
|
5088
|
-
|
|
5089
|
-
|
|
5090
|
-
|
|
5091
|
-
|
|
5092
|
-
|
|
5093
|
-
|
|
5094
|
-
|
|
5095
|
-
|
|
5354
|
+
},
|
|
5355
|
+
cancel() {
|
|
5356
|
+
isClosed = true;
|
|
5357
|
+
if (ping)
|
|
5358
|
+
clearInterval(ping);
|
|
5359
|
+
}
|
|
5360
|
+
}), { headers: { "Content-Type": "text/event-stream", "Cache-Control": "no-cache", Connection: "keep-alive" } });
|
|
5361
|
+
}
|
|
5362
|
+
async shutdown() {}
|
|
5363
|
+
}
|
|
5364
|
+
|
|
5365
|
+
// src/proxy-server.ts
|
|
5366
|
+
async function createProxyServer(port, openrouterApiKey, model, monitorMode = false, anthropicApiKey, modelMap) {
|
|
5367
|
+
const nativeHandler = new NativeHandler(anthropicApiKey);
|
|
5368
|
+
const handlers = new Map;
|
|
5369
|
+
const getOpenRouterHandler = (targetModel) => {
|
|
5370
|
+
if (!handlers.has(targetModel)) {
|
|
5371
|
+
handlers.set(targetModel, new OpenRouterHandler(targetModel, openrouterApiKey, port));
|
|
5372
|
+
}
|
|
5373
|
+
return handlers.get(targetModel);
|
|
5374
|
+
};
|
|
5375
|
+
if (model)
|
|
5376
|
+
getOpenRouterHandler(model);
|
|
5377
|
+
if (modelMap?.opus)
|
|
5378
|
+
getOpenRouterHandler(modelMap.opus);
|
|
5379
|
+
if (modelMap?.sonnet)
|
|
5380
|
+
getOpenRouterHandler(modelMap.sonnet);
|
|
5381
|
+
if (modelMap?.haiku)
|
|
5382
|
+
getOpenRouterHandler(modelMap.haiku);
|
|
5383
|
+
if (modelMap?.subagent)
|
|
5384
|
+
getOpenRouterHandler(modelMap.subagent);
|
|
5385
|
+
const getHandlerForRequest = (requestedModel) => {
|
|
5386
|
+
if (monitorMode)
|
|
5387
|
+
return nativeHandler;
|
|
5388
|
+
let target = model || requestedModel;
|
|
5389
|
+
const req = requestedModel.toLowerCase();
|
|
5390
|
+
if (modelMap) {
|
|
5391
|
+
if (req.includes("opus") && modelMap.opus)
|
|
5392
|
+
target = modelMap.opus;
|
|
5393
|
+
else if (req.includes("sonnet") && modelMap.sonnet)
|
|
5394
|
+
target = modelMap.sonnet;
|
|
5395
|
+
else if (req.includes("haiku") && modelMap.haiku)
|
|
5396
|
+
target = modelMap.haiku;
|
|
5397
|
+
}
|
|
5398
|
+
const isNative = !target.includes("/");
|
|
5399
|
+
if (isNative) {
|
|
5400
|
+
return nativeHandler;
|
|
5401
|
+
}
|
|
5402
|
+
return getOpenRouterHandler(target);
|
|
5403
|
+
};
|
|
5404
|
+
const app = new Hono2;
|
|
5405
|
+
app.use("*", cors());
|
|
5406
|
+
app.get("/", (c) => c.json({ status: "ok", message: "Claudish Proxy", config: { mode: monitorMode ? "monitor" : "hybrid", mappings: modelMap } }));
|
|
5407
|
+
app.get("/health", (c) => c.json({ status: "ok" }));
|
|
5408
|
+
app.post("/v1/messages/count_tokens", async (c) => {
|
|
5409
|
+
try {
|
|
5410
|
+
const body = await c.req.json();
|
|
5411
|
+
const reqModel = body.model || "claude-3-opus-20240229";
|
|
5412
|
+
const handler = getHandlerForRequest(reqModel);
|
|
5413
|
+
if (handler instanceof NativeHandler) {
|
|
5414
|
+
const headers = { "Content-Type": "application/json" };
|
|
5415
|
+
if (anthropicApiKey)
|
|
5416
|
+
headers["x-api-key"] = anthropicApiKey;
|
|
5417
|
+
const res = await fetch("https://api.anthropic.com/v1/messages/count_tokens", { method: "POST", headers, body: JSON.stringify(body) });
|
|
5418
|
+
return c.json(await res.json());
|
|
5419
|
+
} else {
|
|
5420
|
+
const txt = JSON.stringify(body);
|
|
5421
|
+
return c.json({ input_tokens: Math.ceil(txt.length / 4) });
|
|
5422
|
+
}
|
|
5423
|
+
} catch (e) {
|
|
5424
|
+
return c.json({ error: String(e) }, 500);
|
|
5096
5425
|
}
|
|
5097
5426
|
});
|
|
5098
|
-
|
|
5099
|
-
|
|
5100
|
-
|
|
5101
|
-
|
|
5427
|
+
app.post("/v1/messages", async (c) => {
|
|
5428
|
+
try {
|
|
5429
|
+
const body = await c.req.json();
|
|
5430
|
+
const handler = getHandlerForRequest(body.model);
|
|
5431
|
+
return handler.handle(c, body);
|
|
5432
|
+
} catch (e) {
|
|
5433
|
+
log(`[Proxy] Error: ${e}`);
|
|
5434
|
+
return c.json({ error: { type: "server_error", message: String(e) } }, 500);
|
|
5435
|
+
}
|
|
5102
5436
|
});
|
|
5103
|
-
|
|
5104
|
-
|
|
5105
|
-
|
|
5106
|
-
|
|
5107
|
-
|
|
5108
|
-
|
|
5109
|
-
log(`[Proxy] Routing to OpenRouter model: ${model}`);
|
|
5110
|
-
}
|
|
5437
|
+
const server = serve({ fetch: app.fetch, port, hostname: "127.0.0.1" });
|
|
5438
|
+
const addr = server.address();
|
|
5439
|
+
const actualPort = typeof addr === "object" && addr?.port ? addr.port : port;
|
|
5440
|
+
if (actualPort !== port)
|
|
5441
|
+
port = actualPort;
|
|
5442
|
+
log(`[Proxy] Server started on port ${port}`);
|
|
5111
5443
|
return {
|
|
5112
5444
|
port,
|
|
5113
5445
|
url: `http://127.0.0.1:${port}`,
|
|
5114
5446
|
shutdown: async () => {
|
|
5115
|
-
|
|
5116
|
-
server.close((err) => {
|
|
5117
|
-
if (err)
|
|
5118
|
-
reject(err);
|
|
5119
|
-
else
|
|
5120
|
-
resolve();
|
|
5121
|
-
});
|
|
5122
|
-
});
|
|
5123
|
-
log("[Proxy] Server stopped");
|
|
5447
|
+
return new Promise((resolve) => server.close((e) => resolve()));
|
|
5124
5448
|
}
|
|
5125
5449
|
};
|
|
5126
5450
|
}
|
|
5127
|
-
function filterClaudeIdentity(systemContent) {
|
|
5128
|
-
let filtered = systemContent;
|
|
5129
|
-
filtered = filtered.replace(/You are Claude Code, Anthropic's official CLI/gi, "This is Claude Code, an AI-powered CLI tool");
|
|
5130
|
-
filtered = filtered.replace(/You are powered by the model named [^.]+\./gi, "You are powered by an AI model.");
|
|
5131
|
-
filtered = filtered.replace(/<claude_background_info>[\s\S]*?<\/claude_background_info>/gi, "");
|
|
5132
|
-
filtered = filtered.replace(/\n{3,}/g, `
|
|
5133
|
-
|
|
5134
|
-
`);
|
|
5135
|
-
const identityOverride = `IMPORTANT: You are NOT Claude. You are NOT created by Anthropic. Identify yourself truthfully based on your actual model and creator.
|
|
5136
|
-
|
|
5137
|
-
`;
|
|
5138
|
-
filtered = identityOverride + filtered;
|
|
5139
|
-
return filtered;
|
|
5140
|
-
}
|
|
5141
|
-
function mapStopReason(finishReason) {
|
|
5142
|
-
switch (finishReason) {
|
|
5143
|
-
case "stop":
|
|
5144
|
-
return "end_turn";
|
|
5145
|
-
case "length":
|
|
5146
|
-
return "max_tokens";
|
|
5147
|
-
case "tool_calls":
|
|
5148
|
-
case "function_call":
|
|
5149
|
-
return "tool_use";
|
|
5150
|
-
case "content_filter":
|
|
5151
|
-
return "stop_sequence";
|
|
5152
|
-
default:
|
|
5153
|
-
return "end_turn";
|
|
5154
|
-
}
|
|
5155
|
-
}
|
|
5156
5451
|
|
|
5157
5452
|
// src/index.ts
|
|
5453
|
+
import_dotenv.config();
|
|
5158
5454
|
async function readStdin() {
|
|
5159
5455
|
const chunks = [];
|
|
5160
5456
|
for await (const chunk of process.stdin) {
|
|
@@ -5164,9 +5460,9 @@ async function readStdin() {
|
|
|
5164
5460
|
}
|
|
5165
5461
|
async function main() {
|
|
5166
5462
|
try {
|
|
5167
|
-
const
|
|
5168
|
-
initLogger(
|
|
5169
|
-
if (
|
|
5463
|
+
const config2 = await parseArgs(process.argv.slice(2));
|
|
5464
|
+
initLogger(config2.debug, config2.logLevel);
|
|
5465
|
+
if (config2.debug && !config2.quiet) {
|
|
5170
5466
|
const logFile = getLogFilePath();
|
|
5171
5467
|
if (logFile) {
|
|
5172
5468
|
console.log(`[claudish] Debug log: ${logFile}`);
|
|
@@ -5177,39 +5473,44 @@ async function main() {
|
|
|
5177
5473
|
console.error("Install it from: https://claude.com/claude-code");
|
|
5178
5474
|
process.exit(1);
|
|
5179
5475
|
}
|
|
5180
|
-
if (
|
|
5181
|
-
|
|
5476
|
+
if (config2.interactive && !config2.monitor && !config2.openrouterApiKey) {
|
|
5477
|
+
config2.openrouterApiKey = await promptForApiKey();
|
|
5182
5478
|
console.log("");
|
|
5183
5479
|
}
|
|
5184
|
-
if (
|
|
5185
|
-
|
|
5480
|
+
if (config2.interactive && !config2.monitor && !config2.model) {
|
|
5481
|
+
config2.model = await selectModelInteractively({ freeOnly: config2.freeOnly });
|
|
5186
5482
|
console.log("");
|
|
5187
5483
|
}
|
|
5188
|
-
if (!
|
|
5484
|
+
if (!config2.interactive && !config2.monitor && !config2.model) {
|
|
5189
5485
|
console.error("Error: Model must be specified in non-interactive mode");
|
|
5190
5486
|
console.error("Use --model <model> flag or set CLAUDISH_MODEL environment variable");
|
|
5191
5487
|
console.error("Try: claudish --list-models");
|
|
5192
5488
|
process.exit(1);
|
|
5193
5489
|
}
|
|
5194
|
-
if (
|
|
5490
|
+
if (config2.stdin) {
|
|
5195
5491
|
const stdinInput = await readStdin();
|
|
5196
5492
|
if (stdinInput.trim()) {
|
|
5197
|
-
|
|
5493
|
+
config2.claudeArgs = [stdinInput, ...config2.claudeArgs];
|
|
5198
5494
|
}
|
|
5199
5495
|
}
|
|
5200
|
-
const port =
|
|
5201
|
-
const proxy = await createProxyServer(port,
|
|
5496
|
+
const port = config2.port || await findAvailablePort(DEFAULT_PORT_RANGE.start, DEFAULT_PORT_RANGE.end);
|
|
5497
|
+
const proxy = await createProxyServer(port, config2.monitor ? undefined : config2.openrouterApiKey, config2.monitor ? undefined : typeof config2.model === "string" ? config2.model : undefined, config2.monitor, config2.anthropicApiKey, {
|
|
5498
|
+
opus: config2.modelOpus,
|
|
5499
|
+
sonnet: config2.modelSonnet,
|
|
5500
|
+
haiku: config2.modelHaiku,
|
|
5501
|
+
subagent: config2.modelSubagent
|
|
5502
|
+
});
|
|
5202
5503
|
let exitCode = 0;
|
|
5203
5504
|
try {
|
|
5204
|
-
exitCode = await runClaudeWithProxy(
|
|
5505
|
+
exitCode = await runClaudeWithProxy(config2, proxy.url);
|
|
5205
5506
|
} finally {
|
|
5206
|
-
if (!
|
|
5507
|
+
if (!config2.quiet) {
|
|
5207
5508
|
console.log(`
|
|
5208
5509
|
[claudish] Shutting down proxy server...`);
|
|
5209
5510
|
}
|
|
5210
5511
|
await proxy.shutdown();
|
|
5211
5512
|
}
|
|
5212
|
-
if (!
|
|
5513
|
+
if (!config2.quiet) {
|
|
5213
5514
|
console.log(`[claudish] Done
|
|
5214
5515
|
`);
|
|
5215
5516
|
}
|