@halilertekin/claude-code-router-config 2.0.0 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/README.md +3 -2
- package/package.json +3 -1
- package/router/config.js +82 -0
- package/router/format.js +209 -0
- package/router/http.js +55 -0
- package/router/providers.js +53 -0
- package/router/route.js +93 -0
- package/router/server.js +520 -0
- package/router/stream.js +158 -0
package/CHANGELOG.md
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
# Changelog
|
|
2
|
+
|
|
3
|
+
## 2.0.1
|
|
4
|
+
- Include router files in the published npm package.
|
|
5
|
+
|
|
6
|
+
## 2.0.0
|
|
7
|
+
- Unified router service built into this package (no external router dependency).
|
|
8
|
+
- Native CLI lifecycle commands (start/stop/restart/status/code/ui).
|
|
9
|
+
- Streaming translation between Anthropic and OpenAI-style endpoints.
|
|
10
|
+
- Dashboard now served by the router at `/ui`.
|
|
11
|
+
- Default config uses `smart-intent-router.js` and adds HOST/PORT.
|
package/README.md
CHANGED
|
@@ -1,15 +1,16 @@
|
|
|
1
1
|
# Claude Code Router Config - Advanced Multi-Provider Setup
|
|
2
2
|
|
|
3
|
-
🚀 **v2.0.
|
|
3
|
+
🚀 **v2.0.1** - Unified router + config package with z.ai (GLM 4.7) support, advanced CLI tools, analytics, smart routing, and configuration templates!
|
|
4
4
|
|
|
5
5
|
Use Claude Code as a single interface to access multiple AI providers with intelligent routing for optimal performance, cost, and quality.
|
|
6
6
|
|
|
7
|
-
## ✨ New in v2.0.
|
|
7
|
+
## ✨ New in v2.0.1
|
|
8
8
|
- **z.ai Support**: Native integration for GLM-4.7 via z.ai (PPInfra).
|
|
9
9
|
- **Lightweight Mode**: New `ccc` function for zero-dependency routing.
|
|
10
10
|
- **Direct GLM Alias**: Type `glm` to launch Claude Code with GLM-4.7 immediately.
|
|
11
11
|
- **Non-interactive install**: CI-friendly installer flags and env controls.
|
|
12
12
|
- **Unified router**: Built-in router service, no external dependency required.
|
|
13
|
+
- **Packaging fix**: Router files are bundled in the npm package.
|
|
13
14
|
|
|
14
15
|
## 🚀 Setup on Another Machine (Fastest Way)
|
|
15
16
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@halilertekin/claude-code-router-config",
|
|
3
|
-
"version": "2.0.
|
|
3
|
+
"version": "2.0.1",
|
|
4
4
|
"description": "Multi-provider configuration for Claude Code Router with intent-based routing, advanced CLI tools, analytics, and smart routing. Setup OpenAI, Anthropic, Gemini, Qwen, GLM, OpenRouter, and GitHub Copilot with intelligent routing.",
|
|
5
5
|
"main": "install.js",
|
|
6
6
|
"bin": {
|
|
@@ -25,6 +25,7 @@
|
|
|
25
25
|
"bin/",
|
|
26
26
|
"config/",
|
|
27
27
|
"cli/",
|
|
28
|
+
"router/",
|
|
28
29
|
"logging/",
|
|
29
30
|
"templates/",
|
|
30
31
|
"plugins/",
|
|
@@ -35,6 +36,7 @@
|
|
|
35
36
|
"postinstall.js",
|
|
36
37
|
".env.example",
|
|
37
38
|
"docs/",
|
|
39
|
+
"CHANGELOG.md",
|
|
38
40
|
"LICENSE",
|
|
39
41
|
"README.md",
|
|
40
42
|
"NPM_README.md"
|
package/router/config.js
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const os = require('os');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
|
|
5
|
+
const DEFAULT_CONFIG_DIR = path.join(os.homedir(), '.claude-code-router');
|
|
6
|
+
const DEFAULT_CONFIG_PATH = path.join(DEFAULT_CONFIG_DIR, 'config.json');
|
|
7
|
+
|
|
8
|
+
function resolveEnv(value) {
|
|
9
|
+
if (typeof value !== 'string') return value;
|
|
10
|
+
|
|
11
|
+
const withHome = value
|
|
12
|
+
.replace(/\$HOME/g, os.homedir())
|
|
13
|
+
.replace(/\${HOME}/g, os.homedir());
|
|
14
|
+
|
|
15
|
+
if (!withHome.includes('$')) return withHome;
|
|
16
|
+
|
|
17
|
+
return withHome.replace(/\$([A-Z0-9_]+)/gi, (_, key) => {
|
|
18
|
+
return process.env[key] ?? '';
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function resolveConfigValue(value) {
|
|
23
|
+
if (Array.isArray(value)) {
|
|
24
|
+
return value.map(resolveConfigValue);
|
|
25
|
+
}
|
|
26
|
+
if (value && typeof value === 'object') {
|
|
27
|
+
return Object.fromEntries(
|
|
28
|
+
Object.entries(value).map(([key, val]) => [key, resolveConfigValue(val)])
|
|
29
|
+
);
|
|
30
|
+
}
|
|
31
|
+
return resolveEnv(value);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function applyDefaults(config) {
|
|
35
|
+
const defaults = {
|
|
36
|
+
HOST: '127.0.0.1',
|
|
37
|
+
PORT: 3456,
|
|
38
|
+
LOG: true,
|
|
39
|
+
LOG_LEVEL: 'info',
|
|
40
|
+
API_TIMEOUT_MS: 300000
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
return {
|
|
44
|
+
...defaults,
|
|
45
|
+
...config
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
function loadConfig() {
|
|
50
|
+
const configPath = process.env.CCR_CONFIG_PATH || DEFAULT_CONFIG_PATH;
|
|
51
|
+
if (!fs.existsSync(configPath)) {
|
|
52
|
+
throw new Error(`Config file not found at ${configPath}`);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const raw = JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
|
56
|
+
const resolved = resolveConfigValue(raw);
|
|
57
|
+
return applyDefaults(resolved);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
function getConfigPath() {
|
|
61
|
+
return process.env.CCR_CONFIG_PATH || DEFAULT_CONFIG_PATH;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
function getConfigDir() {
|
|
65
|
+
return process.env.CCR_CONFIG_DIR || DEFAULT_CONFIG_DIR;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
function resolveProviderKey(provider) {
|
|
69
|
+
if (!provider?.api_key) return null;
|
|
70
|
+
if (typeof provider.api_key === 'string' && provider.api_key.startsWith('$')) {
|
|
71
|
+
const envKey = provider.api_key.slice(1);
|
|
72
|
+
return process.env[envKey] || null;
|
|
73
|
+
}
|
|
74
|
+
return provider.api_key;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
module.exports = {
|
|
78
|
+
loadConfig,
|
|
79
|
+
getConfigPath,
|
|
80
|
+
getConfigDir,
|
|
81
|
+
resolveProviderKey
|
|
82
|
+
};
|
package/router/format.js
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
const crypto = require('crypto');
|
|
2
|
+
|
|
3
|
+
function toText(value) {
|
|
4
|
+
if (typeof value === 'string') return value;
|
|
5
|
+
if (value == null) return '';
|
|
6
|
+
return JSON.stringify(value);
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
function normalizeAnthropicSystem(system) {
|
|
10
|
+
if (!system) return '';
|
|
11
|
+
if (typeof system === 'string') return system;
|
|
12
|
+
if (Array.isArray(system)) {
|
|
13
|
+
return system
|
|
14
|
+
.map((item) => {
|
|
15
|
+
if (typeof item === 'string') return item;
|
|
16
|
+
if (item?.type === 'text') return item.text || '';
|
|
17
|
+
return toText(item);
|
|
18
|
+
})
|
|
19
|
+
.join('\n');
|
|
20
|
+
}
|
|
21
|
+
return toText(system);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function anthropicMessagesToOpenAI(messages = []) {
|
|
25
|
+
return messages.map((message) => {
|
|
26
|
+
if (typeof message?.content === 'string') {
|
|
27
|
+
return { role: message.role, content: message.content };
|
|
28
|
+
}
|
|
29
|
+
if (Array.isArray(message?.content)) {
|
|
30
|
+
const text = message.content
|
|
31
|
+
.map((part) => {
|
|
32
|
+
if (part?.type === 'text') return part.text || '';
|
|
33
|
+
return toText(part);
|
|
34
|
+
})
|
|
35
|
+
.join('');
|
|
36
|
+
return { role: message.role, content: text };
|
|
37
|
+
}
|
|
38
|
+
return { role: message.role, content: toText(message?.content) };
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
function openAIMessagesToAnthropic(messages = []) {
|
|
43
|
+
return messages
|
|
44
|
+
.filter((message) => message.role !== 'system')
|
|
45
|
+
.map((message) => {
|
|
46
|
+
if (typeof message.content === 'string') {
|
|
47
|
+
return { role: message.role, content: message.content };
|
|
48
|
+
}
|
|
49
|
+
if (Array.isArray(message.content)) {
|
|
50
|
+
const parts = message.content
|
|
51
|
+
.map((part) => {
|
|
52
|
+
if (part?.type === 'text') {
|
|
53
|
+
return { type: 'text', text: part.text || '' };
|
|
54
|
+
}
|
|
55
|
+
return { type: 'text', text: toText(part) };
|
|
56
|
+
});
|
|
57
|
+
return { role: message.role, content: parts };
|
|
58
|
+
}
|
|
59
|
+
return { role: message.role, content: toText(message.content) };
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function openAIToolsToAnthropic(tools = []) {
|
|
64
|
+
return tools
|
|
65
|
+
.map((tool) => {
|
|
66
|
+
if (tool?.type === 'function' && tool.function) {
|
|
67
|
+
return {
|
|
68
|
+
name: tool.function.name,
|
|
69
|
+
description: tool.function.description || '',
|
|
70
|
+
input_schema: tool.function.parameters || {}
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
return null;
|
|
74
|
+
})
|
|
75
|
+
.filter(Boolean);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
function anthropicToolsToOpenAI(tools = []) {
|
|
79
|
+
return tools
|
|
80
|
+
.map((tool) => {
|
|
81
|
+
if (!tool?.name) return null;
|
|
82
|
+
return {
|
|
83
|
+
type: 'function',
|
|
84
|
+
function: {
|
|
85
|
+
name: tool.name,
|
|
86
|
+
description: tool.description || '',
|
|
87
|
+
parameters: tool.input_schema || {}
|
|
88
|
+
}
|
|
89
|
+
};
|
|
90
|
+
})
|
|
91
|
+
.filter(Boolean);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
function anthropicToOpenAI(body) {
|
|
95
|
+
const system = normalizeAnthropicSystem(body.system);
|
|
96
|
+
const messages = anthropicMessagesToOpenAI(body.messages || []);
|
|
97
|
+
const openAIMessages = system
|
|
98
|
+
? [{ role: 'system', content: system }, ...messages]
|
|
99
|
+
: messages;
|
|
100
|
+
|
|
101
|
+
const openai = {
|
|
102
|
+
model: body.model,
|
|
103
|
+
messages: openAIMessages,
|
|
104
|
+
stream: Boolean(body.stream),
|
|
105
|
+
temperature: body.temperature,
|
|
106
|
+
max_tokens: body.max_tokens
|
|
107
|
+
};
|
|
108
|
+
|
|
109
|
+
if (body.top_p != null) openai.top_p = body.top_p;
|
|
110
|
+
if (body.tools) openai.tools = anthropicToolsToOpenAI(body.tools);
|
|
111
|
+
if (body.tool_choice) openai.tool_choice = body.tool_choice;
|
|
112
|
+
if (body.metadata) openai.metadata = body.metadata;
|
|
113
|
+
|
|
114
|
+
return openai;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
function openAIToAnthropic(body) {
|
|
118
|
+
const systemMessages = (body.messages || []).filter((m) => m.role === 'system');
|
|
119
|
+
const system = systemMessages.map((m) => toText(m.content)).join('\n');
|
|
120
|
+
const messages = openAIMessagesToAnthropic(body.messages || []);
|
|
121
|
+
|
|
122
|
+
const anthropic = {
|
|
123
|
+
model: body.model,
|
|
124
|
+
messages,
|
|
125
|
+
system: system || undefined,
|
|
126
|
+
stream: Boolean(body.stream),
|
|
127
|
+
max_tokens: body.max_tokens,
|
|
128
|
+
temperature: body.temperature
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
if (body.top_p != null) anthropic.top_p = body.top_p;
|
|
132
|
+
if (body.tools) anthropic.tools = openAIToolsToAnthropic(body.tools);
|
|
133
|
+
if (body.tool_choice) anthropic.tool_choice = body.tool_choice;
|
|
134
|
+
if (body.metadata) anthropic.metadata = body.metadata;
|
|
135
|
+
|
|
136
|
+
return anthropic;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
function extractOpenAIText(response) {
|
|
140
|
+
const choice = response?.choices?.[0];
|
|
141
|
+
if (!choice) return '';
|
|
142
|
+
if (choice.message?.content) return choice.message.content;
|
|
143
|
+
if (choice.delta?.content) return choice.delta.content;
|
|
144
|
+
return '';
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
function extractAnthropicText(response) {
|
|
148
|
+
if (!Array.isArray(response?.content)) return toText(response?.content);
|
|
149
|
+
return response.content
|
|
150
|
+
.map((part) => (part?.type === 'text' ? part.text || '' : toText(part)))
|
|
151
|
+
.join('');
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
function openAIResponseToAnthropic(response, fallbackModel) {
|
|
155
|
+
const text = extractOpenAIText(response);
|
|
156
|
+
const id = response?.id || `msg_${crypto.randomUUID()}`;
|
|
157
|
+
const model = response?.model || fallbackModel;
|
|
158
|
+
const finish = response?.choices?.[0]?.finish_reason || 'stop';
|
|
159
|
+
|
|
160
|
+
return {
|
|
161
|
+
id,
|
|
162
|
+
type: 'message',
|
|
163
|
+
role: 'assistant',
|
|
164
|
+
model,
|
|
165
|
+
content: [{ type: 'text', text }],
|
|
166
|
+
stop_reason: finish,
|
|
167
|
+
usage: {
|
|
168
|
+
input_tokens: response?.usage?.prompt_tokens || 0,
|
|
169
|
+
output_tokens: response?.usage?.completion_tokens || 0
|
|
170
|
+
}
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
function anthropicResponseToOpenAI(response, fallbackModel) {
|
|
175
|
+
const text = extractAnthropicText(response);
|
|
176
|
+
const model = response?.model || fallbackModel;
|
|
177
|
+
const created = Math.floor(Date.now() / 1000);
|
|
178
|
+
const finish = response?.stop_reason || 'stop';
|
|
179
|
+
const promptTokens = response?.usage?.input_tokens || 0;
|
|
180
|
+
const completionTokens = response?.usage?.output_tokens || 0;
|
|
181
|
+
|
|
182
|
+
return {
|
|
183
|
+
id: response?.id || `chatcmpl_${crypto.randomUUID()}`,
|
|
184
|
+
object: 'chat.completion',
|
|
185
|
+
created,
|
|
186
|
+
model,
|
|
187
|
+
choices: [
|
|
188
|
+
{
|
|
189
|
+
index: 0,
|
|
190
|
+
message: { role: 'assistant', content: text },
|
|
191
|
+
finish_reason: finish
|
|
192
|
+
}
|
|
193
|
+
],
|
|
194
|
+
usage: {
|
|
195
|
+
prompt_tokens: promptTokens,
|
|
196
|
+
completion_tokens: completionTokens,
|
|
197
|
+
total_tokens: promptTokens + completionTokens
|
|
198
|
+
}
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
module.exports = {
|
|
203
|
+
anthropicToOpenAI,
|
|
204
|
+
openAIToAnthropic,
|
|
205
|
+
openAIResponseToAnthropic,
|
|
206
|
+
anthropicResponseToOpenAI,
|
|
207
|
+
anthropicToolsToOpenAI,
|
|
208
|
+
openAIToolsToAnthropic
|
|
209
|
+
};
|
package/router/http.js
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
const { request } = require('undici');
|
|
2
|
+
|
|
3
|
+
async function readStream(stream) {
|
|
4
|
+
const chunks = [];
|
|
5
|
+
for await (const chunk of stream) {
|
|
6
|
+
chunks.push(Buffer.from(chunk));
|
|
7
|
+
}
|
|
8
|
+
return Buffer.concat(chunks).toString('utf8');
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
async function sendJsonRequest({ url, headers, body, timeoutMs }) {
|
|
12
|
+
const controller = new AbortController();
|
|
13
|
+
const timeout = setTimeout(() => controller.abort(), timeoutMs);
|
|
14
|
+
|
|
15
|
+
try {
|
|
16
|
+
const response = await request(url, {
|
|
17
|
+
method: 'POST',
|
|
18
|
+
headers,
|
|
19
|
+
body: JSON.stringify(body),
|
|
20
|
+
signal: controller.signal
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
const rawBody = await readStream(response.body);
|
|
24
|
+
return {
|
|
25
|
+
statusCode: response.statusCode,
|
|
26
|
+
headers: response.headers,
|
|
27
|
+
rawBody
|
|
28
|
+
};
|
|
29
|
+
} finally {
|
|
30
|
+
clearTimeout(timeout);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async function sendStreamRequest({ url, headers, body, timeoutMs }) {
|
|
35
|
+
const controller = new AbortController();
|
|
36
|
+
const timeout = setTimeout(() => controller.abort(), timeoutMs);
|
|
37
|
+
|
|
38
|
+
const response = await request(url, {
|
|
39
|
+
method: 'POST',
|
|
40
|
+
headers,
|
|
41
|
+
body: JSON.stringify(body),
|
|
42
|
+
signal: controller.signal
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
response.body.on('end', () => clearTimeout(timeout));
|
|
46
|
+
response.body.on('error', () => clearTimeout(timeout));
|
|
47
|
+
|
|
48
|
+
return response;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
module.exports = {
|
|
52
|
+
sendJsonRequest,
|
|
53
|
+
sendStreamRequest,
|
|
54
|
+
readStream
|
|
55
|
+
};
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
const { resolveProviderKey } = require('./config');
|
|
2
|
+
|
|
3
|
+
function inferProviderFormat(provider) {
|
|
4
|
+
const name = (provider?.name || '').toLowerCase();
|
|
5
|
+
const baseUrl = provider?.api_base_url || '';
|
|
6
|
+
const transformers = provider?.transformer?.use || [];
|
|
7
|
+
|
|
8
|
+
if (name === 'anthropic' || name === 'glm') return 'anthropic';
|
|
9
|
+
if (baseUrl.includes('/v1/messages') || baseUrl.includes('/anthropic')) return 'anthropic';
|
|
10
|
+
if (transformers.some((t) => t.toLowerCase() === 'anthropic')) return 'anthropic';
|
|
11
|
+
|
|
12
|
+
return 'openai';
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function resolveAuthHeader(provider, apiKey) {
|
|
16
|
+
if (!apiKey) return {};
|
|
17
|
+
|
|
18
|
+
const name = (provider?.name || '').toLowerCase();
|
|
19
|
+
const headerName = provider.api_key_header
|
|
20
|
+
|| (name === 'gemini' ? 'x-goog-api-key' : 'authorization');
|
|
21
|
+
const headerValue = headerName.toLowerCase() === 'authorization'
|
|
22
|
+
? `Bearer ${apiKey}`
|
|
23
|
+
: apiKey;
|
|
24
|
+
|
|
25
|
+
return { [headerName]: headerValue };
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function buildProviderHeaders(provider) {
|
|
29
|
+
const apiKey = resolveProviderKey(provider);
|
|
30
|
+
const headers = {
|
|
31
|
+
'content-type': 'application/json',
|
|
32
|
+
...resolveAuthHeader(provider, apiKey),
|
|
33
|
+
...(provider.headers || {})
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
if (inferProviderFormat(provider) === 'anthropic') {
|
|
37
|
+
headers['anthropic-version'] = headers['anthropic-version'] || '2023-06-01';
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if ((provider?.name || '').toLowerCase() === 'openrouter') {
|
|
41
|
+
const referer = provider.referer || process.env.OPENROUTER_REFERRER;
|
|
42
|
+
const title = provider.app_name || process.env.OPENROUTER_APP_NAME;
|
|
43
|
+
if (referer) headers['http-referer'] = referer;
|
|
44
|
+
if (title) headers['x-title'] = title;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return { headers, apiKey };
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
module.exports = {
|
|
51
|
+
inferProviderFormat,
|
|
52
|
+
buildProviderHeaders
|
|
53
|
+
};
|
package/router/route.js
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
|
|
3
|
+
function estimateTokens(messages = [], system) {
|
|
4
|
+
const systemText = Array.isArray(system)
|
|
5
|
+
? system.map((s) => (typeof s === 'string' ? s : s?.text || '')).join(' ')
|
|
6
|
+
: (system || '');
|
|
7
|
+
const messageText = (messages || [])
|
|
8
|
+
.map((m) => (typeof m.content === 'string' ? m.content : JSON.stringify(m.content || '')))
|
|
9
|
+
.join(' ');
|
|
10
|
+
const text = `${systemText} ${messageText}`;
|
|
11
|
+
return Math.max(1, Math.ceil(text.length / 4));
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function parseExplicitRoute(model) {
|
|
15
|
+
if (!model || !model.includes(',')) return null;
|
|
16
|
+
const [provider, ...rest] = model.split(',');
|
|
17
|
+
const modelName = rest.join(',');
|
|
18
|
+
return provider && modelName ? { provider, model: modelName } : null;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function extractSubagentModel(system) {
|
|
22
|
+
const systemText = Array.isArray(system)
|
|
23
|
+
? system.map((item) => (item?.text ? item.text : '')).join(' ')
|
|
24
|
+
: (system || '');
|
|
25
|
+
const match = systemText.match(/<CCR-SUBAGENT-MODEL>(.*?)<\/CCR-SUBAGENT-MODEL>/s);
|
|
26
|
+
return match ? match[1].trim() : null;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function loadCustomRouter(routerPath) {
|
|
30
|
+
try {
|
|
31
|
+
if (!routerPath) return null;
|
|
32
|
+
if (!fs.existsSync(routerPath)) return null;
|
|
33
|
+
delete require.cache[require.resolve(routerPath)];
|
|
34
|
+
return require(routerPath);
|
|
35
|
+
} catch {
|
|
36
|
+
return null;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
async function resolveRoute(req, config) {
|
|
41
|
+
if (!req?.body) return config.Router?.default || null;
|
|
42
|
+
|
|
43
|
+
const explicit = parseExplicitRoute(req.body.model);
|
|
44
|
+
if (explicit) {
|
|
45
|
+
return `${explicit.provider},${explicit.model}`;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const tokenCount = estimateTokens(req.body.messages || [], req.body.system);
|
|
49
|
+
req.tokenCount = tokenCount;
|
|
50
|
+
|
|
51
|
+
const subagentModel = extractSubagentModel(req.body.system);
|
|
52
|
+
if (subagentModel) {
|
|
53
|
+
return subagentModel;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
const customRouter = loadCustomRouter(config.CUSTOM_ROUTER_PATH);
|
|
57
|
+
if (customRouter) {
|
|
58
|
+
try {
|
|
59
|
+
const route = await customRouter(req, config);
|
|
60
|
+
if (route) return route;
|
|
61
|
+
} catch {
|
|
62
|
+
// Fall through to defaults
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
const routerConfig = config.Router || {};
|
|
67
|
+
const longContextThreshold = routerConfig.longContextThreshold || 60000;
|
|
68
|
+
if (tokenCount > longContextThreshold && routerConfig.longContext) {
|
|
69
|
+
return routerConfig.longContext;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
if (req.body.thinking && routerConfig.think) {
|
|
73
|
+
return routerConfig.think;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
if (Array.isArray(req.body.tools) && req.body.tools.some((tool) => {
|
|
77
|
+
return typeof tool?.type === 'string' && tool.type.startsWith('web_search');
|
|
78
|
+
}) && routerConfig.webSearch) {
|
|
79
|
+
return routerConfig.webSearch;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
if (req.body.model?.includes('haiku') && routerConfig.background) {
|
|
83
|
+
return routerConfig.background;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
return routerConfig.default || null;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
module.exports = {
|
|
90
|
+
resolveRoute,
|
|
91
|
+
parseExplicitRoute,
|
|
92
|
+
estimateTokens
|
|
93
|
+
};
|
package/router/server.js
ADDED
|
@@ -0,0 +1,520 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const express = require('express');
|
|
4
|
+
const cors = require('cors');
|
|
5
|
+
const fs = require('fs');
|
|
6
|
+
const os = require('os');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
|
|
9
|
+
const { loadConfig, getConfigPath, getConfigDir } = require('./config');
|
|
10
|
+
const { resolveRoute, estimateTokens } = require('./route');
|
|
11
|
+
const {
|
|
12
|
+
anthropicToOpenAI,
|
|
13
|
+
openAIToAnthropic,
|
|
14
|
+
openAIResponseToAnthropic,
|
|
15
|
+
anthropicResponseToOpenAI
|
|
16
|
+
} = require('./format');
|
|
17
|
+
const { inferProviderFormat, buildProviderHeaders } = require('./providers');
|
|
18
|
+
const { sendJsonRequest, sendStreamRequest } = require('./http');
|
|
19
|
+
const { readStream } = require('./http');
|
|
20
|
+
const {
|
|
21
|
+
pipeStream,
|
|
22
|
+
streamOpenAIToAnthropic,
|
|
23
|
+
streamAnthropicToOpenAI
|
|
24
|
+
} = require('./stream');
|
|
25
|
+
const { recordRequest, calculateCost, getTodayAnalytics, getAnalyticsSummary, exportAnalytics } = require('../cli/analytics');
|
|
26
|
+
const { logger } = require('../logging/enhanced-logger');
|
|
27
|
+
const { HealthMonitor } = require('../logging/health-monitor');
|
|
28
|
+
|
|
29
|
+
function getRequestFormat(req) {
|
|
30
|
+
return req.path.startsWith('/v1/messages') ? 'anthropic' : 'openai';
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function pickProvider(route, config) {
|
|
34
|
+
if (!route) return null;
|
|
35
|
+
const [providerName, ...modelParts] = route.split(',');
|
|
36
|
+
const model = modelParts.join(',').trim();
|
|
37
|
+
const provider = config.Providers.find(
|
|
38
|
+
(p) => p.name.toLowerCase() === providerName.toLowerCase()
|
|
39
|
+
);
|
|
40
|
+
if (!provider) return null;
|
|
41
|
+
return {
|
|
42
|
+
provider,
|
|
43
|
+
model: model || provider.models?.[0]
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function ensureAnthropicDefaults(body) {
|
|
48
|
+
if (!body.max_tokens) {
|
|
49
|
+
body.max_tokens = 1024;
|
|
50
|
+
}
|
|
51
|
+
return body;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function extractUsage(providerFormat, response) {
|
|
55
|
+
if (providerFormat === 'anthropic') {
|
|
56
|
+
return {
|
|
57
|
+
inputTokens: response?.usage?.input_tokens || 0,
|
|
58
|
+
outputTokens: response?.usage?.output_tokens || 0
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
return {
|
|
62
|
+
inputTokens: response?.usage?.prompt_tokens || 0,
|
|
63
|
+
outputTokens: response?.usage?.completion_tokens || 0
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function buildFallbackRoutes(config, usedRoute) {
|
|
68
|
+
const fallbackRoutes = [];
|
|
69
|
+
const routerFallbacks = config.Router?.fallbacks || [];
|
|
70
|
+
for (const route of routerFallbacks) {
|
|
71
|
+
if (route && route !== usedRoute) fallbackRoutes.push(route);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (config.Router?.default && config.Router.default !== usedRoute) {
|
|
75
|
+
fallbackRoutes.push(config.Router.default);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
config.Providers.forEach((provider) => {
|
|
79
|
+
const model = provider.models?.[0];
|
|
80
|
+
const route = model ? `${provider.name},${model}` : null;
|
|
81
|
+
if (route && route !== usedRoute && !fallbackRoutes.includes(route)) {
|
|
82
|
+
fallbackRoutes.push(route);
|
|
83
|
+
}
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
return fallbackRoutes;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
async function handleProxy(req, res) {
|
|
90
|
+
let config;
|
|
91
|
+
try {
|
|
92
|
+
config = loadConfig();
|
|
93
|
+
} catch (error) {
|
|
94
|
+
res.status(500).json({ error: error.message });
|
|
95
|
+
return;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
const requestFormat = getRequestFormat(req);
|
|
99
|
+
const route = await resolveRoute(req, config);
|
|
100
|
+
let selection = pickProvider(route, config);
|
|
101
|
+
|
|
102
|
+
if (!selection) {
|
|
103
|
+
res.status(400).json({ error: 'No matching provider found' });
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
let providerFormat = inferProviderFormat(selection.provider);
|
|
108
|
+
let { headers, apiKey } = buildProviderHeaders(selection.provider);
|
|
109
|
+
|
|
110
|
+
if (!apiKey) {
|
|
111
|
+
res.status(400).json({ error: `Missing API key for provider ${selection.provider.name}` });
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
let outgoingBody;
|
|
116
|
+
if (requestFormat === providerFormat) {
|
|
117
|
+
outgoingBody = { ...req.body };
|
|
118
|
+
} else if (providerFormat === 'openai') {
|
|
119
|
+
outgoingBody = anthropicToOpenAI(req.body);
|
|
120
|
+
} else {
|
|
121
|
+
outgoingBody = openAIToAnthropic(req.body);
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
outgoingBody.model = selection.model;
|
|
125
|
+
if (providerFormat === 'anthropic') {
|
|
126
|
+
ensureAnthropicDefaults(outgoingBody);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
const timeoutMs = config.API_TIMEOUT_MS || 300000;
|
|
130
|
+
const upstreamUrl = selection.provider.api_base_url;
|
|
131
|
+
|
|
132
|
+
const start = Date.now();
|
|
133
|
+
const shouldStream = Boolean(outgoingBody.stream);
|
|
134
|
+
|
|
135
|
+
const attemptRequest = async () => {
|
|
136
|
+
if (shouldStream) {
|
|
137
|
+
return sendStreamRequest({
|
|
138
|
+
url: upstreamUrl,
|
|
139
|
+
headers,
|
|
140
|
+
body: outgoingBody,
|
|
141
|
+
timeoutMs
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
return sendJsonRequest({
|
|
145
|
+
url: upstreamUrl,
|
|
146
|
+
headers,
|
|
147
|
+
body: outgoingBody,
|
|
148
|
+
timeoutMs
|
|
149
|
+
});
|
|
150
|
+
};
|
|
151
|
+
|
|
152
|
+
let upstream;
|
|
153
|
+
let usedRoute = route;
|
|
154
|
+
|
|
155
|
+
try {
|
|
156
|
+
upstream = await attemptRequest();
|
|
157
|
+
} catch (error) {
|
|
158
|
+
const fallbacks = buildFallbackRoutes(config, usedRoute);
|
|
159
|
+
for (const fallback of fallbacks) {
|
|
160
|
+
const fallbackSelection = pickProvider(fallback, config);
|
|
161
|
+
if (!fallbackSelection) continue;
|
|
162
|
+
|
|
163
|
+
const fallbackFormat = inferProviderFormat(fallbackSelection.provider);
|
|
164
|
+
const fallbackHeaders = buildProviderHeaders(fallbackSelection.provider).headers;
|
|
165
|
+
|
|
166
|
+
outgoingBody.model = fallbackSelection.model;
|
|
167
|
+
if (fallbackFormat === 'anthropic') {
|
|
168
|
+
ensureAnthropicDefaults(outgoingBody);
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
try {
|
|
172
|
+
upstream = shouldStream
|
|
173
|
+
? await sendStreamRequest({
|
|
174
|
+
url: fallbackSelection.provider.api_base_url,
|
|
175
|
+
headers: fallbackHeaders,
|
|
176
|
+
body: outgoingBody,
|
|
177
|
+
timeoutMs
|
|
178
|
+
})
|
|
179
|
+
: await sendJsonRequest({
|
|
180
|
+
url: fallbackSelection.provider.api_base_url,
|
|
181
|
+
headers: fallbackHeaders,
|
|
182
|
+
body: outgoingBody,
|
|
183
|
+
timeoutMs
|
|
184
|
+
});
|
|
185
|
+
usedRoute = fallback;
|
|
186
|
+
selection = fallbackSelection;
|
|
187
|
+
providerFormat = fallbackFormat;
|
|
188
|
+
headers = fallbackHeaders;
|
|
189
|
+
break;
|
|
190
|
+
} catch {
|
|
191
|
+
continue;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
if (!upstream) {
|
|
197
|
+
res.status(502).json({ error: 'Failed to reach provider' });
|
|
198
|
+
return;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
if (shouldStream) {
|
|
202
|
+
if (upstream.statusCode >= 400) {
|
|
203
|
+
const errorText = await readStream(upstream.body);
|
|
204
|
+
res.status(upstream.statusCode).json({ error: errorText || 'Upstream error' });
|
|
205
|
+
return;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
res.status(200);
|
|
209
|
+
if (requestFormat === providerFormat) {
|
|
210
|
+
await pipeStream(res, upstream.body, upstream.headers);
|
|
211
|
+
return;
|
|
212
|
+
}
|
|
213
|
+
if (providerFormat === 'openai' && requestFormat === 'anthropic') {
|
|
214
|
+
await streamOpenAIToAnthropic(upstream.body, res, { model: selection.model });
|
|
215
|
+
return;
|
|
216
|
+
}
|
|
217
|
+
if (providerFormat === 'anthropic' && requestFormat === 'openai') {
|
|
218
|
+
await streamAnthropicToOpenAI(upstream.body, res, { model: selection.model });
|
|
219
|
+
return;
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (upstream.statusCode >= 400) {
|
|
224
|
+
res.status(upstream.statusCode).send(upstream.rawBody || { error: 'Upstream error' });
|
|
225
|
+
return;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
let responsePayload;
|
|
229
|
+
try {
|
|
230
|
+
responsePayload = JSON.parse(upstream.rawBody || '{}');
|
|
231
|
+
} catch {
|
|
232
|
+
responsePayload = { error: 'Invalid upstream response' };
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
const latency = Date.now() - start;
|
|
236
|
+
const usage = extractUsage(providerFormat, responsePayload);
|
|
237
|
+
const cost = calculateCost(selection.provider.name, selection.model, usage.inputTokens, usage.outputTokens);
|
|
238
|
+
|
|
239
|
+
logger.logRequest(selection.provider.name, selection.model, usage.inputTokens, usage.outputTokens, latency, true, cost);
|
|
240
|
+
recordRequest(selection.provider.name, selection.model, usage.inputTokens, usage.outputTokens, latency, true);
|
|
241
|
+
|
|
242
|
+
if (requestFormat !== providerFormat) {
|
|
243
|
+
if (requestFormat === 'anthropic') {
|
|
244
|
+
responsePayload = openAIResponseToAnthropic(responsePayload, selection.model);
|
|
245
|
+
} else {
|
|
246
|
+
responsePayload = anthropicResponseToOpenAI(responsePayload, selection.model);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
res.status(200).json(responsePayload);
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
function setupApi(app) {
|
|
254
|
+
app.get('/health', (req, res) => {
|
|
255
|
+
res.json({ status: 'ok', timestamp: new Date().toISOString() });
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
app.post('/v1/messages', handleProxy);
|
|
259
|
+
app.post('/v1/messages/count_tokens', (req, res) => {
|
|
260
|
+
const tokenCount = estimateTokens(req.body?.messages || [], req.body?.system);
|
|
261
|
+
res.json({ input_tokens: tokenCount });
|
|
262
|
+
});
|
|
263
|
+
app.post('/v1/chat/completions', handleProxy);
|
|
264
|
+
|
|
265
|
+
app.post('/v1/responses', (req, res) => {
|
|
266
|
+
req.body = req.body || {};
|
|
267
|
+
if (!req.body.messages && req.body.input) {
|
|
268
|
+
if (typeof req.body.input === 'string') {
|
|
269
|
+
req.body.messages = [{ role: 'user', content: req.body.input }];
|
|
270
|
+
} else if (Array.isArray(req.body.input)) {
|
|
271
|
+
req.body.messages = req.body.input;
|
|
272
|
+
} else {
|
|
273
|
+
req.body.messages = [];
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
handleProxy(req, res);
|
|
277
|
+
});
|
|
278
|
+
|
|
279
|
+
app.get('/api/status', (req, res) => {
|
|
280
|
+
res.json({
|
|
281
|
+
success: true,
|
|
282
|
+
data: {
|
|
283
|
+
uptime: process.uptime(),
|
|
284
|
+
timestamp: new Date().toISOString(),
|
|
285
|
+
version: require('../package.json').version,
|
|
286
|
+
nodeVersion: process.version,
|
|
287
|
+
platform: os.platform(),
|
|
288
|
+
arch: os.arch()
|
|
289
|
+
}
|
|
290
|
+
});
|
|
291
|
+
});
|
|
292
|
+
|
|
293
|
+
app.get('/api/config', (req, res) => {
|
|
294
|
+
try {
|
|
295
|
+
const config = loadConfig();
|
|
296
|
+
res.json({ success: true, data: config });
|
|
297
|
+
} catch (error) {
|
|
298
|
+
res.status(500).json({ success: false, error: error.message });
|
|
299
|
+
}
|
|
300
|
+
});
|
|
301
|
+
|
|
302
|
+
app.post('/api/config', (req, res) => {
|
|
303
|
+
try {
|
|
304
|
+
const configPath = getConfigPath();
|
|
305
|
+
const backupDir = path.join(getConfigDir(), 'backups');
|
|
306
|
+
if (!fs.existsSync(backupDir)) fs.mkdirSync(backupDir, { recursive: true });
|
|
307
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
308
|
+
const backupPath = path.join(backupDir, `config-${timestamp}.json`);
|
|
309
|
+
fs.copyFileSync(configPath, backupPath);
|
|
310
|
+
|
|
311
|
+
fs.writeFileSync(configPath, JSON.stringify(req.body, null, 2));
|
|
312
|
+
res.json({ success: true, message: 'Config saved', backup: backupPath });
|
|
313
|
+
} catch (error) {
|
|
314
|
+
res.status(500).json({ success: false, error: error.message });
|
|
315
|
+
}
|
|
316
|
+
});
|
|
317
|
+
|
|
318
|
+
app.get('/api/config/templates', (req, res) => {
|
|
319
|
+
try {
|
|
320
|
+
const templatesDir = path.join(__dirname, '..', 'templates');
|
|
321
|
+
const files = fs.readdirSync(templatesDir).filter((file) => file.endsWith('.json'));
|
|
322
|
+
const templates = files.map((file) => {
|
|
323
|
+
const content = JSON.parse(fs.readFileSync(path.join(templatesDir, file), 'utf8'));
|
|
324
|
+
return {
|
|
325
|
+
name: file.replace('.json', ''),
|
|
326
|
+
description: content._description || content._comment || '',
|
|
327
|
+
config: content
|
|
328
|
+
};
|
|
329
|
+
});
|
|
330
|
+
res.json({ success: true, data: templates });
|
|
331
|
+
} catch (error) {
|
|
332
|
+
res.status(500).json({ success: false, error: error.message });
|
|
333
|
+
}
|
|
334
|
+
});
|
|
335
|
+
|
|
336
|
+
app.get('/api/providers', (req, res) => {
|
|
337
|
+
try {
|
|
338
|
+
const config = loadConfig();
|
|
339
|
+
res.json({ success: true, data: config.Providers || [] });
|
|
340
|
+
} catch (error) {
|
|
341
|
+
res.status(500).json({ success: false, error: error.message });
|
|
342
|
+
}
|
|
343
|
+
});
|
|
344
|
+
|
|
345
|
+
app.get('/api/analytics/today', (req, res) => {
|
|
346
|
+
try {
|
|
347
|
+
res.json({ success: true, data: getTodayAnalytics() });
|
|
348
|
+
} catch (error) {
|
|
349
|
+
res.status(500).json({ success: false, error: error.message });
|
|
350
|
+
}
|
|
351
|
+
});
|
|
352
|
+
|
|
353
|
+
app.get('/api/analytics/summary', (req, res) => {
|
|
354
|
+
try {
|
|
355
|
+
const period = req.query.period || 'week';
|
|
356
|
+
res.json({ success: true, data: getAnalyticsSummary(period) });
|
|
357
|
+
} catch (error) {
|
|
358
|
+
res.status(500).json({ success: false, error: error.message });
|
|
359
|
+
}
|
|
360
|
+
});
|
|
361
|
+
|
|
362
|
+
app.get('/api/analytics/export', (req, res) => {
|
|
363
|
+
try {
|
|
364
|
+
const format = req.query.format || 'json';
|
|
365
|
+
const period = req.query.period || 'all';
|
|
366
|
+
const filepath = exportAnalytics(format, period);
|
|
367
|
+
res.download(filepath, (err) => {
|
|
368
|
+
if (err) {
|
|
369
|
+
res.status(500).json({ success: false, error: 'Export failed' });
|
|
370
|
+
}
|
|
371
|
+
fs.unlink(filepath, () => {});
|
|
372
|
+
});
|
|
373
|
+
} catch (error) {
|
|
374
|
+
res.status(500).json({ success: false, error: error.message });
|
|
375
|
+
}
|
|
376
|
+
});
|
|
377
|
+
|
|
378
|
+
app.get('/api/health/providers', async (req, res) => {
|
|
379
|
+
try {
|
|
380
|
+
const config = loadConfig();
|
|
381
|
+
const monitor = new HealthMonitor({ enabled: true });
|
|
382
|
+
config.Providers.forEach((provider) => monitor.addProvider(provider.name, provider));
|
|
383
|
+
const results = await monitor.checkAllProviders();
|
|
384
|
+
res.json({ success: true, data: Object.values(results) });
|
|
385
|
+
} catch (error) {
|
|
386
|
+
res.status(500).json({ success: false, error: error.message });
|
|
387
|
+
}
|
|
388
|
+
});
|
|
389
|
+
|
|
390
|
+
app.get('/api/health/system', (req, res) => {
|
|
391
|
+
const monitor = new HealthMonitor({ enabled: false });
|
|
392
|
+
res.json({
|
|
393
|
+
success: true,
|
|
394
|
+
data: {
|
|
395
|
+
uptime: process.uptime(),
|
|
396
|
+
memory: process.memoryUsage(),
|
|
397
|
+
cpu: monitor.getCPUUsage(),
|
|
398
|
+
nodeVersion: process.version
|
|
399
|
+
}
|
|
400
|
+
});
|
|
401
|
+
});
|
|
402
|
+
|
|
403
|
+
app.get('/api/logs/files', (req, res) => {
|
|
404
|
+
try {
|
|
405
|
+
const logDir = path.join(os.homedir(), '.claude-code-router', 'logs');
|
|
406
|
+
const logFiles = [];
|
|
407
|
+
if (fs.existsSync(logDir)) {
|
|
408
|
+
const files = fs.readdirSync(logDir);
|
|
409
|
+
for (const file of files) {
|
|
410
|
+
if (file.endsWith('.log') || file.endsWith('.json')) {
|
|
411
|
+
const filePath = path.join(logDir, file);
|
|
412
|
+
const stats = fs.statSync(filePath);
|
|
413
|
+
logFiles.push({
|
|
414
|
+
name: file,
|
|
415
|
+
path: filePath,
|
|
416
|
+
size: stats.size,
|
|
417
|
+
lastModified: stats.mtime.toISOString()
|
|
418
|
+
});
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
res.json({ success: true, data: logFiles });
|
|
423
|
+
} catch (error) {
|
|
424
|
+
res.status(500).json({ success: false, error: error.message });
|
|
425
|
+
}
|
|
426
|
+
});
|
|
427
|
+
|
|
428
|
+
app.get('/api/logs', (req, res) => {
|
|
429
|
+
try {
|
|
430
|
+
const filePath = req.query.file
|
|
431
|
+
? req.query.file
|
|
432
|
+
: path.join(os.homedir(), '.claude-code-router', 'logs', 'app.log');
|
|
433
|
+
if (!fs.existsSync(filePath)) {
|
|
434
|
+
res.json({ success: true, data: [] });
|
|
435
|
+
return;
|
|
436
|
+
}
|
|
437
|
+
const logContent = fs.readFileSync(filePath, 'utf8');
|
|
438
|
+
const logLines = logContent.split('\n').filter((line) => line.trim());
|
|
439
|
+
res.json({ success: true, data: logLines });
|
|
440
|
+
} catch (error) {
|
|
441
|
+
res.status(500).json({ success: false, error: error.message });
|
|
442
|
+
}
|
|
443
|
+
});
|
|
444
|
+
|
|
445
|
+
app.delete('/api/logs', (req, res) => {
|
|
446
|
+
try {
|
|
447
|
+
const filePath = req.query.file
|
|
448
|
+
? req.query.file
|
|
449
|
+
: path.join(os.homedir(), '.claude-code-router', 'logs', 'app.log');
|
|
450
|
+
if (fs.existsSync(filePath)) {
|
|
451
|
+
fs.writeFileSync(filePath, '', 'utf8');
|
|
452
|
+
}
|
|
453
|
+
res.json({ success: true, message: 'Logs cleared' });
|
|
454
|
+
} catch (error) {
|
|
455
|
+
res.status(500).json({ success: false, error: error.message });
|
|
456
|
+
}
|
|
457
|
+
});
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
function setupUi(app) {
|
|
461
|
+
const uiRoot = path.join(__dirname, '..', 'web-dashboard', 'public');
|
|
462
|
+
app.use('/ui', express.static(uiRoot));
|
|
463
|
+
app.get('/', (req, res) => res.redirect('/ui'));
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
function startServer() {
|
|
467
|
+
const app = express();
|
|
468
|
+
app.use(cors());
|
|
469
|
+
app.use(express.json({ limit: '50mb' }));
|
|
470
|
+
|
|
471
|
+
app.use((req, res, next) => {
|
|
472
|
+
if (req.path === '/health' || req.path.startsWith('/ui')) {
|
|
473
|
+
return next();
|
|
474
|
+
}
|
|
475
|
+
let config;
|
|
476
|
+
try {
|
|
477
|
+
config = loadConfig();
|
|
478
|
+
} catch {
|
|
479
|
+
return next();
|
|
480
|
+
}
|
|
481
|
+
if (!config.APIKEY) {
|
|
482
|
+
return next();
|
|
483
|
+
}
|
|
484
|
+
const authHeader = req.headers.authorization || req.headers['x-api-key'];
|
|
485
|
+
if (!authHeader) {
|
|
486
|
+
res.status(401).json({ error: 'API key missing' });
|
|
487
|
+
return;
|
|
488
|
+
}
|
|
489
|
+
const token = authHeader.startsWith('Bearer ')
|
|
490
|
+
? authHeader.slice('Bearer '.length)
|
|
491
|
+
: authHeader;
|
|
492
|
+
if (token !== config.APIKEY) {
|
|
493
|
+
res.status(401).json({ error: 'Invalid API key' });
|
|
494
|
+
return;
|
|
495
|
+
}
|
|
496
|
+
next();
|
|
497
|
+
});
|
|
498
|
+
|
|
499
|
+
setupApi(app);
|
|
500
|
+
setupUi(app);
|
|
501
|
+
|
|
502
|
+
const config = loadConfig();
|
|
503
|
+
logger.level = config.LOG_LEVEL || logger.level;
|
|
504
|
+
logger.enableConsole = config.LOG !== false;
|
|
505
|
+
|
|
506
|
+
const host = config.HOST || '127.0.0.1';
|
|
507
|
+
const port = parseInt(config.PORT || 3456, 10);
|
|
508
|
+
|
|
509
|
+
app.listen(port, host, () => {
|
|
510
|
+
logger.info(`Router server listening on http://${host}:${port}`);
|
|
511
|
+
});
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
if (require.main === module) {
|
|
515
|
+
startServer();
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
module.exports = {
|
|
519
|
+
startServer
|
|
520
|
+
};
|
package/router/stream.js
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
const crypto = require('crypto');
|
|
2
|
+
const { createParser } = require('eventsource-parser');
|
|
3
|
+
|
|
4
|
+
function setStreamHeaders(res) {
|
|
5
|
+
res.setHeader('Content-Type', 'text/event-stream; charset=utf-8');
|
|
6
|
+
res.setHeader('Cache-Control', 'no-cache, no-transform');
|
|
7
|
+
res.setHeader('Connection', 'keep-alive');
|
|
8
|
+
res.flushHeaders?.();
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
function writeAnthropicEvent(res, event, data) {
|
|
12
|
+
res.write(`event: ${event}\n`);
|
|
13
|
+
res.write(`data: ${JSON.stringify(data)}\n\n`);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function writeOpenAIEvent(res, data) {
|
|
17
|
+
res.write(`data: ${JSON.stringify(data)}\n\n`);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async function pipeStream(res, upstream, headers) {
|
|
21
|
+
if (headers?.['content-type']) {
|
|
22
|
+
res.setHeader('Content-Type', headers['content-type']);
|
|
23
|
+
} else {
|
|
24
|
+
setStreamHeaders(res);
|
|
25
|
+
}
|
|
26
|
+
for await (const chunk of upstream) {
|
|
27
|
+
res.write(chunk);
|
|
28
|
+
}
|
|
29
|
+
res.end();
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
async function streamOpenAIToAnthropic(upstream, res, { model }) {
|
|
33
|
+
setStreamHeaders(res);
|
|
34
|
+
|
|
35
|
+
const messageId = `msg_${crypto.randomUUID()}`;
|
|
36
|
+
let started = false;
|
|
37
|
+
let finished = false;
|
|
38
|
+
|
|
39
|
+
const parser = createParser((event) => {
|
|
40
|
+
if (event.type !== 'event') return;
|
|
41
|
+
if (!event.data) return;
|
|
42
|
+
|
|
43
|
+
if (event.data === '[DONE]') {
|
|
44
|
+
if (!finished) {
|
|
45
|
+
writeAnthropicEvent(res, 'message_stop', {
|
|
46
|
+
type: 'message_stop'
|
|
47
|
+
});
|
|
48
|
+
finished = true;
|
|
49
|
+
}
|
|
50
|
+
return;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
let payload;
|
|
54
|
+
try {
|
|
55
|
+
payload = JSON.parse(event.data);
|
|
56
|
+
} catch {
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const delta = payload?.choices?.[0]?.delta;
|
|
61
|
+
const text = delta?.content;
|
|
62
|
+
if (text == null) return;
|
|
63
|
+
|
|
64
|
+
if (!started) {
|
|
65
|
+
writeAnthropicEvent(res, 'message_start', {
|
|
66
|
+
type: 'message_start',
|
|
67
|
+
message: {
|
|
68
|
+
id: messageId,
|
|
69
|
+
type: 'message',
|
|
70
|
+
role: 'assistant',
|
|
71
|
+
model,
|
|
72
|
+
content: []
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
writeAnthropicEvent(res, 'content_block_start', {
|
|
76
|
+
type: 'content_block_start',
|
|
77
|
+
index: 0,
|
|
78
|
+
content_block: { type: 'text', text: '' }
|
|
79
|
+
});
|
|
80
|
+
started = true;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
writeAnthropicEvent(res, 'content_block_delta', {
|
|
84
|
+
type: 'content_block_delta',
|
|
85
|
+
index: 0,
|
|
86
|
+
delta: { type: 'text_delta', text }
|
|
87
|
+
});
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
for await (const chunk of upstream) {
|
|
91
|
+
parser.feed(chunk.toString());
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (!finished) {
|
|
95
|
+
writeAnthropicEvent(res, 'message_stop', { type: 'message_stop' });
|
|
96
|
+
}
|
|
97
|
+
res.end();
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
async function streamAnthropicToOpenAI(upstream, res, { model }) {
|
|
101
|
+
setStreamHeaders(res);
|
|
102
|
+
|
|
103
|
+
const responseId = `chatcmpl_${crypto.randomUUID()}`;
|
|
104
|
+
let started = false;
|
|
105
|
+
|
|
106
|
+
const parser = createParser((event) => {
|
|
107
|
+
if (event.type !== 'event') return;
|
|
108
|
+
if (!event.data) return;
|
|
109
|
+
|
|
110
|
+
let payload;
|
|
111
|
+
try {
|
|
112
|
+
payload = JSON.parse(event.data);
|
|
113
|
+
} catch {
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
if (payload.type === 'content_block_delta') {
|
|
118
|
+
const text = payload.delta?.text || '';
|
|
119
|
+
if (!started) {
|
|
120
|
+
writeOpenAIEvent(res, {
|
|
121
|
+
id: responseId,
|
|
122
|
+
object: 'chat.completion.chunk',
|
|
123
|
+
created: Math.floor(Date.now() / 1000),
|
|
124
|
+
model,
|
|
125
|
+
choices: [{ index: 0, delta: { role: 'assistant' }, finish_reason: null }]
|
|
126
|
+
});
|
|
127
|
+
started = true;
|
|
128
|
+
}
|
|
129
|
+
if (text) {
|
|
130
|
+
writeOpenAIEvent(res, {
|
|
131
|
+
id: responseId,
|
|
132
|
+
object: 'chat.completion.chunk',
|
|
133
|
+
created: Math.floor(Date.now() / 1000),
|
|
134
|
+
model,
|
|
135
|
+
choices: [{ index: 0, delta: { content: text }, finish_reason: null }]
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
if (payload.type === 'message_stop') {
|
|
141
|
+
res.write('data: [DONE]\n\n');
|
|
142
|
+
}
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
for await (const chunk of upstream) {
|
|
146
|
+
parser.feed(chunk.toString());
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
res.write('data: [DONE]\n\n');
|
|
150
|
+
res.end();
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
module.exports = {
|
|
154
|
+
pipeStream,
|
|
155
|
+
streamOpenAIToAnthropic,
|
|
156
|
+
streamAnthropicToOpenAI,
|
|
157
|
+
setStreamHeaders
|
|
158
|
+
};
|