@halilertekin/claude-code-router-config 2.0.0 → 2.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md ADDED
@@ -0,0 +1,14 @@
1
+ # Changelog
2
+
3
+ ## 2.0.2
4
+ - UI sadeleştirildi, responsive hale getirildi ve Türkçe/Hollandaca desteği eklendi.
5
+
6
+ ## 2.0.1
7
+ - Include router files in the published npm package.
8
+
9
+ ## 2.0.0
10
+ - Unified router service built into this package (no external router dependency).
11
+ - Native CLI lifecycle commands (start/stop/restart/status/code/ui).
12
+ - Streaming translation between Anthropic and OpenAI-style endpoints.
13
+ - Dashboard now served by the router at `/ui`.
14
+ - Default config uses `smart-intent-router.js` and adds HOST/PORT.
package/README.md CHANGED
@@ -1,15 +1,16 @@
1
1
  # Claude Code Router Config - Advanced Multi-Provider Setup
2
2
 
3
- 🚀 **v2.0.0** - Unified router + config package with z.ai (GLM 4.7) support, advanced CLI tools, analytics, smart routing, and configuration templates!
3
+ 🚀 **v2.0.2** - Unified router + config package with z.ai (GLM 4.7) support, advanced CLI tools, analytics, smart routing, and configuration templates!
4
4
 
5
5
  Use Claude Code as a single interface to access multiple AI providers with intelligent routing for optimal performance, cost, and quality.
6
6
 
7
- ## ✨ New in v2.0.0
7
+ ## ✨ New in v2.0.2
8
8
  - **z.ai Support**: Native integration for GLM-4.7 via z.ai (PPInfra).
9
9
  - **Lightweight Mode**: New `ccc` function for zero-dependency routing.
10
10
  - **Direct GLM Alias**: Type `glm` to launch Claude Code with GLM-4.7 immediately.
11
11
  - **Non-interactive install**: CI-friendly installer flags and env controls.
12
12
  - **Unified router**: Built-in router service, no external dependency required.
13
+ - **UI refresh**: Daha sade ve responsive tasarım, TR/NL dil desteği.
13
14
 
14
15
  ## 🚀 Setup on Another Machine (Fastest Way)
15
16
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@halilertekin/claude-code-router-config",
3
- "version": "2.0.0",
3
+ "version": "2.0.2",
4
4
  "description": "Multi-provider configuration for Claude Code Router with intent-based routing, advanced CLI tools, analytics, and smart routing. Setup OpenAI, Anthropic, Gemini, Qwen, GLM, OpenRouter, and GitHub Copilot with intelligent routing.",
5
5
  "main": "install.js",
6
6
  "bin": {
@@ -25,6 +25,7 @@
25
25
  "bin/",
26
26
  "config/",
27
27
  "cli/",
28
+ "router/",
28
29
  "logging/",
29
30
  "templates/",
30
31
  "plugins/",
@@ -35,6 +36,7 @@
35
36
  "postinstall.js",
36
37
  ".env.example",
37
38
  "docs/",
39
+ "CHANGELOG.md",
38
40
  "LICENSE",
39
41
  "README.md",
40
42
  "NPM_README.md"
@@ -0,0 +1,82 @@
1
+ const fs = require('fs');
2
+ const os = require('os');
3
+ const path = require('path');
4
+
5
+ const DEFAULT_CONFIG_DIR = path.join(os.homedir(), '.claude-code-router');
6
+ const DEFAULT_CONFIG_PATH = path.join(DEFAULT_CONFIG_DIR, 'config.json');
7
+
8
+ function resolveEnv(value) {
9
+ if (typeof value !== 'string') return value;
10
+
11
+ const withHome = value
12
+ .replace(/\$HOME/g, os.homedir())
13
+ .replace(/\${HOME}/g, os.homedir());
14
+
15
+ if (!withHome.includes('$')) return withHome;
16
+
17
+ return withHome.replace(/\$([A-Z0-9_]+)/gi, (_, key) => {
18
+ return process.env[key] ?? '';
19
+ });
20
+ }
21
+
22
+ function resolveConfigValue(value) {
23
+ if (Array.isArray(value)) {
24
+ return value.map(resolveConfigValue);
25
+ }
26
+ if (value && typeof value === 'object') {
27
+ return Object.fromEntries(
28
+ Object.entries(value).map(([key, val]) => [key, resolveConfigValue(val)])
29
+ );
30
+ }
31
+ return resolveEnv(value);
32
+ }
33
+
34
+ function applyDefaults(config) {
35
+ const defaults = {
36
+ HOST: '127.0.0.1',
37
+ PORT: 3456,
38
+ LOG: true,
39
+ LOG_LEVEL: 'info',
40
+ API_TIMEOUT_MS: 300000
41
+ };
42
+
43
+ return {
44
+ ...defaults,
45
+ ...config
46
+ };
47
+ }
48
+
49
+ function loadConfig() {
50
+ const configPath = process.env.CCR_CONFIG_PATH || DEFAULT_CONFIG_PATH;
51
+ if (!fs.existsSync(configPath)) {
52
+ throw new Error(`Config file not found at ${configPath}`);
53
+ }
54
+
55
+ const raw = JSON.parse(fs.readFileSync(configPath, 'utf8'));
56
+ const resolved = resolveConfigValue(raw);
57
+ return applyDefaults(resolved);
58
+ }
59
+
60
+ function getConfigPath() {
61
+ return process.env.CCR_CONFIG_PATH || DEFAULT_CONFIG_PATH;
62
+ }
63
+
64
+ function getConfigDir() {
65
+ return process.env.CCR_CONFIG_DIR || DEFAULT_CONFIG_DIR;
66
+ }
67
+
68
+ function resolveProviderKey(provider) {
69
+ if (!provider?.api_key) return null;
70
+ if (typeof provider.api_key === 'string' && provider.api_key.startsWith('$')) {
71
+ const envKey = provider.api_key.slice(1);
72
+ return process.env[envKey] || null;
73
+ }
74
+ return provider.api_key;
75
+ }
76
+
77
+ module.exports = {
78
+ loadConfig,
79
+ getConfigPath,
80
+ getConfigDir,
81
+ resolveProviderKey
82
+ };
@@ -0,0 +1,209 @@
1
+ const crypto = require('crypto');
2
+
3
+ function toText(value) {
4
+ if (typeof value === 'string') return value;
5
+ if (value == null) return '';
6
+ return JSON.stringify(value);
7
+ }
8
+
9
+ function normalizeAnthropicSystem(system) {
10
+ if (!system) return '';
11
+ if (typeof system === 'string') return system;
12
+ if (Array.isArray(system)) {
13
+ return system
14
+ .map((item) => {
15
+ if (typeof item === 'string') return item;
16
+ if (item?.type === 'text') return item.text || '';
17
+ return toText(item);
18
+ })
19
+ .join('\n');
20
+ }
21
+ return toText(system);
22
+ }
23
+
24
+ function anthropicMessagesToOpenAI(messages = []) {
25
+ return messages.map((message) => {
26
+ if (typeof message?.content === 'string') {
27
+ return { role: message.role, content: message.content };
28
+ }
29
+ if (Array.isArray(message?.content)) {
30
+ const text = message.content
31
+ .map((part) => {
32
+ if (part?.type === 'text') return part.text || '';
33
+ return toText(part);
34
+ })
35
+ .join('');
36
+ return { role: message.role, content: text };
37
+ }
38
+ return { role: message.role, content: toText(message?.content) };
39
+ });
40
+ }
41
+
42
+ function openAIMessagesToAnthropic(messages = []) {
43
+ return messages
44
+ .filter((message) => message.role !== 'system')
45
+ .map((message) => {
46
+ if (typeof message.content === 'string') {
47
+ return { role: message.role, content: message.content };
48
+ }
49
+ if (Array.isArray(message.content)) {
50
+ const parts = message.content
51
+ .map((part) => {
52
+ if (part?.type === 'text') {
53
+ return { type: 'text', text: part.text || '' };
54
+ }
55
+ return { type: 'text', text: toText(part) };
56
+ });
57
+ return { role: message.role, content: parts };
58
+ }
59
+ return { role: message.role, content: toText(message.content) };
60
+ });
61
+ }
62
+
63
+ function openAIToolsToAnthropic(tools = []) {
64
+ return tools
65
+ .map((tool) => {
66
+ if (tool?.type === 'function' && tool.function) {
67
+ return {
68
+ name: tool.function.name,
69
+ description: tool.function.description || '',
70
+ input_schema: tool.function.parameters || {}
71
+ };
72
+ }
73
+ return null;
74
+ })
75
+ .filter(Boolean);
76
+ }
77
+
78
+ function anthropicToolsToOpenAI(tools = []) {
79
+ return tools
80
+ .map((tool) => {
81
+ if (!tool?.name) return null;
82
+ return {
83
+ type: 'function',
84
+ function: {
85
+ name: tool.name,
86
+ description: tool.description || '',
87
+ parameters: tool.input_schema || {}
88
+ }
89
+ };
90
+ })
91
+ .filter(Boolean);
92
+ }
93
+
94
+ function anthropicToOpenAI(body) {
95
+ const system = normalizeAnthropicSystem(body.system);
96
+ const messages = anthropicMessagesToOpenAI(body.messages || []);
97
+ const openAIMessages = system
98
+ ? [{ role: 'system', content: system }, ...messages]
99
+ : messages;
100
+
101
+ const openai = {
102
+ model: body.model,
103
+ messages: openAIMessages,
104
+ stream: Boolean(body.stream),
105
+ temperature: body.temperature,
106
+ max_tokens: body.max_tokens
107
+ };
108
+
109
+ if (body.top_p != null) openai.top_p = body.top_p;
110
+ if (body.tools) openai.tools = anthropicToolsToOpenAI(body.tools);
111
+ if (body.tool_choice) openai.tool_choice = body.tool_choice;
112
+ if (body.metadata) openai.metadata = body.metadata;
113
+
114
+ return openai;
115
+ }
116
+
117
+ function openAIToAnthropic(body) {
118
+ const systemMessages = (body.messages || []).filter((m) => m.role === 'system');
119
+ const system = systemMessages.map((m) => toText(m.content)).join('\n');
120
+ const messages = openAIMessagesToAnthropic(body.messages || []);
121
+
122
+ const anthropic = {
123
+ model: body.model,
124
+ messages,
125
+ system: system || undefined,
126
+ stream: Boolean(body.stream),
127
+ max_tokens: body.max_tokens,
128
+ temperature: body.temperature
129
+ };
130
+
131
+ if (body.top_p != null) anthropic.top_p = body.top_p;
132
+ if (body.tools) anthropic.tools = openAIToolsToAnthropic(body.tools);
133
+ if (body.tool_choice) anthropic.tool_choice = body.tool_choice;
134
+ if (body.metadata) anthropic.metadata = body.metadata;
135
+
136
+ return anthropic;
137
+ }
138
+
139
+ function extractOpenAIText(response) {
140
+ const choice = response?.choices?.[0];
141
+ if (!choice) return '';
142
+ if (choice.message?.content) return choice.message.content;
143
+ if (choice.delta?.content) return choice.delta.content;
144
+ return '';
145
+ }
146
+
147
+ function extractAnthropicText(response) {
148
+ if (!Array.isArray(response?.content)) return toText(response?.content);
149
+ return response.content
150
+ .map((part) => (part?.type === 'text' ? part.text || '' : toText(part)))
151
+ .join('');
152
+ }
153
+
154
+ function openAIResponseToAnthropic(response, fallbackModel) {
155
+ const text = extractOpenAIText(response);
156
+ const id = response?.id || `msg_${crypto.randomUUID()}`;
157
+ const model = response?.model || fallbackModel;
158
+ const finish = response?.choices?.[0]?.finish_reason || 'stop';
159
+
160
+ return {
161
+ id,
162
+ type: 'message',
163
+ role: 'assistant',
164
+ model,
165
+ content: [{ type: 'text', text }],
166
+ stop_reason: finish,
167
+ usage: {
168
+ input_tokens: response?.usage?.prompt_tokens || 0,
169
+ output_tokens: response?.usage?.completion_tokens || 0
170
+ }
171
+ };
172
+ }
173
+
174
+ function anthropicResponseToOpenAI(response, fallbackModel) {
175
+ const text = extractAnthropicText(response);
176
+ const model = response?.model || fallbackModel;
177
+ const created = Math.floor(Date.now() / 1000);
178
+ const finish = response?.stop_reason || 'stop';
179
+ const promptTokens = response?.usage?.input_tokens || 0;
180
+ const completionTokens = response?.usage?.output_tokens || 0;
181
+
182
+ return {
183
+ id: response?.id || `chatcmpl_${crypto.randomUUID()}`,
184
+ object: 'chat.completion',
185
+ created,
186
+ model,
187
+ choices: [
188
+ {
189
+ index: 0,
190
+ message: { role: 'assistant', content: text },
191
+ finish_reason: finish
192
+ }
193
+ ],
194
+ usage: {
195
+ prompt_tokens: promptTokens,
196
+ completion_tokens: completionTokens,
197
+ total_tokens: promptTokens + completionTokens
198
+ }
199
+ };
200
+ }
201
+
202
+ module.exports = {
203
+ anthropicToOpenAI,
204
+ openAIToAnthropic,
205
+ openAIResponseToAnthropic,
206
+ anthropicResponseToOpenAI,
207
+ anthropicToolsToOpenAI,
208
+ openAIToolsToAnthropic
209
+ };
package/router/http.js ADDED
@@ -0,0 +1,55 @@
1
+ const { request } = require('undici');
2
+
3
+ async function readStream(stream) {
4
+ const chunks = [];
5
+ for await (const chunk of stream) {
6
+ chunks.push(Buffer.from(chunk));
7
+ }
8
+ return Buffer.concat(chunks).toString('utf8');
9
+ }
10
+
11
+ async function sendJsonRequest({ url, headers, body, timeoutMs }) {
12
+ const controller = new AbortController();
13
+ const timeout = setTimeout(() => controller.abort(), timeoutMs);
14
+
15
+ try {
16
+ const response = await request(url, {
17
+ method: 'POST',
18
+ headers,
19
+ body: JSON.stringify(body),
20
+ signal: controller.signal
21
+ });
22
+
23
+ const rawBody = await readStream(response.body);
24
+ return {
25
+ statusCode: response.statusCode,
26
+ headers: response.headers,
27
+ rawBody
28
+ };
29
+ } finally {
30
+ clearTimeout(timeout);
31
+ }
32
+ }
33
+
34
+ async function sendStreamRequest({ url, headers, body, timeoutMs }) {
35
+ const controller = new AbortController();
36
+ const timeout = setTimeout(() => controller.abort(), timeoutMs);
37
+
38
+ const response = await request(url, {
39
+ method: 'POST',
40
+ headers,
41
+ body: JSON.stringify(body),
42
+ signal: controller.signal
43
+ });
44
+
45
+ response.body.on('end', () => clearTimeout(timeout));
46
+ response.body.on('error', () => clearTimeout(timeout));
47
+
48
+ return response;
49
+ }
50
+
51
+ module.exports = {
52
+ sendJsonRequest,
53
+ sendStreamRequest,
54
+ readStream
55
+ };
@@ -0,0 +1,53 @@
1
+ const { resolveProviderKey } = require('./config');
2
+
3
+ function inferProviderFormat(provider) {
4
+ const name = (provider?.name || '').toLowerCase();
5
+ const baseUrl = provider?.api_base_url || '';
6
+ const transformers = provider?.transformer?.use || [];
7
+
8
+ if (name === 'anthropic' || name === 'glm') return 'anthropic';
9
+ if (baseUrl.includes('/v1/messages') || baseUrl.includes('/anthropic')) return 'anthropic';
10
+ if (transformers.some((t) => t.toLowerCase() === 'anthropic')) return 'anthropic';
11
+
12
+ return 'openai';
13
+ }
14
+
15
+ function resolveAuthHeader(provider, apiKey) {
16
+ if (!apiKey) return {};
17
+
18
+ const name = (provider?.name || '').toLowerCase();
19
+ const headerName = provider.api_key_header
20
+ || (name === 'gemini' ? 'x-goog-api-key' : 'authorization');
21
+ const headerValue = headerName.toLowerCase() === 'authorization'
22
+ ? `Bearer ${apiKey}`
23
+ : apiKey;
24
+
25
+ return { [headerName]: headerValue };
26
+ }
27
+
28
+ function buildProviderHeaders(provider) {
29
+ const apiKey = resolveProviderKey(provider);
30
+ const headers = {
31
+ 'content-type': 'application/json',
32
+ ...resolveAuthHeader(provider, apiKey),
33
+ ...(provider.headers || {})
34
+ };
35
+
36
+ if (inferProviderFormat(provider) === 'anthropic') {
37
+ headers['anthropic-version'] = headers['anthropic-version'] || '2023-06-01';
38
+ }
39
+
40
+ if ((provider?.name || '').toLowerCase() === 'openrouter') {
41
+ const referer = provider.referer || process.env.OPENROUTER_REFERRER;
42
+ const title = provider.app_name || process.env.OPENROUTER_APP_NAME;
43
+ if (referer) headers['http-referer'] = referer;
44
+ if (title) headers['x-title'] = title;
45
+ }
46
+
47
+ return { headers, apiKey };
48
+ }
49
+
50
+ module.exports = {
51
+ inferProviderFormat,
52
+ buildProviderHeaders
53
+ };
@@ -0,0 +1,93 @@
1
+ const fs = require('fs');
2
+
3
+ function estimateTokens(messages = [], system) {
4
+ const systemText = Array.isArray(system)
5
+ ? system.map((s) => (typeof s === 'string' ? s : s?.text || '')).join(' ')
6
+ : (system || '');
7
+ const messageText = (messages || [])
8
+ .map((m) => (typeof m.content === 'string' ? m.content : JSON.stringify(m.content || '')))
9
+ .join(' ');
10
+ const text = `${systemText} ${messageText}`;
11
+ return Math.max(1, Math.ceil(text.length / 4));
12
+ }
13
+
14
+ function parseExplicitRoute(model) {
15
+ if (!model || !model.includes(',')) return null;
16
+ const [provider, ...rest] = model.split(',');
17
+ const modelName = rest.join(',');
18
+ return provider && modelName ? { provider, model: modelName } : null;
19
+ }
20
+
21
+ function extractSubagentModel(system) {
22
+ const systemText = Array.isArray(system)
23
+ ? system.map((item) => (item?.text ? item.text : '')).join(' ')
24
+ : (system || '');
25
+ const match = systemText.match(/<CCR-SUBAGENT-MODEL>(.*?)<\/CCR-SUBAGENT-MODEL>/s);
26
+ return match ? match[1].trim() : null;
27
+ }
28
+
29
+ function loadCustomRouter(routerPath) {
30
+ try {
31
+ if (!routerPath) return null;
32
+ if (!fs.existsSync(routerPath)) return null;
33
+ delete require.cache[require.resolve(routerPath)];
34
+ return require(routerPath);
35
+ } catch {
36
+ return null;
37
+ }
38
+ }
39
+
40
+ async function resolveRoute(req, config) {
41
+ if (!req?.body) return config.Router?.default || null;
42
+
43
+ const explicit = parseExplicitRoute(req.body.model);
44
+ if (explicit) {
45
+ return `${explicit.provider},${explicit.model}`;
46
+ }
47
+
48
+ const tokenCount = estimateTokens(req.body.messages || [], req.body.system);
49
+ req.tokenCount = tokenCount;
50
+
51
+ const subagentModel = extractSubagentModel(req.body.system);
52
+ if (subagentModel) {
53
+ return subagentModel;
54
+ }
55
+
56
+ const customRouter = loadCustomRouter(config.CUSTOM_ROUTER_PATH);
57
+ if (customRouter) {
58
+ try {
59
+ const route = await customRouter(req, config);
60
+ if (route) return route;
61
+ } catch {
62
+ // Fall through to defaults
63
+ }
64
+ }
65
+
66
+ const routerConfig = config.Router || {};
67
+ const longContextThreshold = routerConfig.longContextThreshold || 60000;
68
+ if (tokenCount > longContextThreshold && routerConfig.longContext) {
69
+ return routerConfig.longContext;
70
+ }
71
+
72
+ if (req.body.thinking && routerConfig.think) {
73
+ return routerConfig.think;
74
+ }
75
+
76
+ if (Array.isArray(req.body.tools) && req.body.tools.some((tool) => {
77
+ return typeof tool?.type === 'string' && tool.type.startsWith('web_search');
78
+ }) && routerConfig.webSearch) {
79
+ return routerConfig.webSearch;
80
+ }
81
+
82
+ if (req.body.model?.includes('haiku') && routerConfig.background) {
83
+ return routerConfig.background;
84
+ }
85
+
86
+ return routerConfig.default || null;
87
+ }
88
+
89
+ module.exports = {
90
+ resolveRoute,
91
+ parseExplicitRoute,
92
+ estimateTokens
93
+ };