claude-code-router-config 1.0.0 ā 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +190 -18
- package/cli/analytics.js +509 -0
- package/cli/benchmark.js +342 -0
- package/cli/commands.js +300 -0
- package/config/smart-intent-router.js +543 -0
- package/docs/AGENTSKILLS_INTEGRATION.md +500 -0
- package/docs/AGENTSKILLS_SETUP.md +743 -0
- package/docs/AGENTSKILLS_SETUP_TR.md +736 -0
- package/docs/FULL_DOCUMENTATION.md +23 -2
- package/docs/FULL_DOCUMENTATION_EN.md +23 -2
- package/docs/HOMEBREW_SETUP.md +252 -0
- package/docs/v1.1.0-FEATURES.md +752 -0
- package/logging/enhanced-logger.js +410 -0
- package/logging/health-monitor.js +472 -0
- package/logging/middleware.js +384 -0
- package/package.json +42 -10
- package/plugins/plugin-manager.js +607 -0
- package/templates/README.md +161 -0
- package/templates/balanced.json +111 -0
- package/templates/cost-optimized.json +96 -0
- package/templates/development.json +104 -0
- package/templates/performance-optimized.json +88 -0
- package/templates/quality-focused.json +105 -0
- package/web-dashboard/public/css/dashboard.css +575 -0
- package/web-dashboard/public/index.html +308 -0
- package/web-dashboard/public/js/dashboard.js +512 -0
- package/web-dashboard/server.js +352 -0
package/cli/benchmark.js
ADDED
|
@@ -0,0 +1,342 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const { spawn } = require('child_process');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const chalk = require('chalk');
|
|
7
|
+
|
|
8
|
+
// Load provider configurations
|
|
9
|
+
function loadProviders() {
|
|
10
|
+
const configPath = path.join(require('os').homedir(), '.claude-code-router', 'config.json');
|
|
11
|
+
try {
|
|
12
|
+
const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
|
13
|
+
return config.Providers;
|
|
14
|
+
} catch (error) {
|
|
15
|
+
console.error(chalk.red('ā Failed to load provider configuration'));
|
|
16
|
+
return [];
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// Perform speed test for a provider
|
|
21
|
+
async function speedTest(providerName, modelName) {
|
|
22
|
+
return new Promise((resolve) => {
|
|
23
|
+
const testPrompt = "Hello, can you respond with just 'OK'?";
|
|
24
|
+
const startTime = Date.now();
|
|
25
|
+
|
|
26
|
+
// Use ccr to send a test request
|
|
27
|
+
const child = spawn('ccr', ['code', '--test'], {
|
|
28
|
+
stdio: 'pipe',
|
|
29
|
+
env: {
|
|
30
|
+
...process.env,
|
|
31
|
+
CCR_TEST_PROVIDER: providerName,
|
|
32
|
+
CCR_TEST_MODEL: modelName,
|
|
33
|
+
CCR_TEST_PROMPT: testPrompt
|
|
34
|
+
}
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
let output = '';
|
|
38
|
+
child.stdout.on('data', (data) => {
|
|
39
|
+
output += data.toString();
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
child.on('close', (code) => {
|
|
43
|
+
const endTime = Date.now();
|
|
44
|
+
const latency = endTime - startTime;
|
|
45
|
+
const success = code === 0 && output.includes('OK');
|
|
46
|
+
|
|
47
|
+
resolve({
|
|
48
|
+
provider: providerName,
|
|
49
|
+
model: modelName,
|
|
50
|
+
latency,
|
|
51
|
+
success,
|
|
52
|
+
timestamp: new Date().toISOString()
|
|
53
|
+
});
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
// Timeout after 30 seconds
|
|
57
|
+
setTimeout(() => {
|
|
58
|
+
child.kill();
|
|
59
|
+
resolve({
|
|
60
|
+
provider: providerName,
|
|
61
|
+
model: modelName,
|
|
62
|
+
latency: 30000,
|
|
63
|
+
success: false,
|
|
64
|
+
error: 'Timeout',
|
|
65
|
+
timestamp: new Date().toISOString()
|
|
66
|
+
});
|
|
67
|
+
}, 30000);
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Run comprehensive benchmark
|
|
72
|
+
async function runBenchmark(options = {}) {
|
|
73
|
+
const {
|
|
74
|
+
iterations = 3,
|
|
75
|
+
warmup = true,
|
|
76
|
+
output = 'console',
|
|
77
|
+
providers = [],
|
|
78
|
+
models = []
|
|
79
|
+
} = options;
|
|
80
|
+
|
|
81
|
+
const providersList = loadProviders();
|
|
82
|
+
const targetProviders = providers.length > 0
|
|
83
|
+
? providersList.filter(p => providers.includes(p.name))
|
|
84
|
+
: providersList;
|
|
85
|
+
|
|
86
|
+
console.log(chalk.blue('šāāļø Claude Code Router Benchmark'));
|
|
87
|
+
console.log(chalk.gray(`Running ${iterations} iterations per provider/model`));
|
|
88
|
+
console.log(chalk.gray('ā'.repeat(60)));
|
|
89
|
+
|
|
90
|
+
const results = [];
|
|
91
|
+
|
|
92
|
+
for (const provider of targetProviders) {
|
|
93
|
+
const modelsToTest = models.length > 0
|
|
94
|
+
? provider.models.filter(m => models.includes(m))
|
|
95
|
+
: [provider.models[0]]; // Test primary model only by default
|
|
96
|
+
|
|
97
|
+
for (const model of modelsToTest) {
|
|
98
|
+
console.log(chalk.yellow(`\nš Testing ${provider.name} - ${model}`));
|
|
99
|
+
|
|
100
|
+
// Warmup
|
|
101
|
+
if (warmup) {
|
|
102
|
+
await speedTest(provider.name, model);
|
|
103
|
+
console.log(chalk.gray(' Warmup completed'));
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Main benchmark
|
|
107
|
+
const runs = [];
|
|
108
|
+
for (let i = 0; i < iterations; i++) {
|
|
109
|
+
process.stdout.write(chalk.gray(` Run ${i + 1}/${iterations}... `));
|
|
110
|
+
const result = await speedTest(provider.name, model);
|
|
111
|
+
|
|
112
|
+
if (result.success) {
|
|
113
|
+
console.log(chalk.green(`${result.latency}ms`));
|
|
114
|
+
runs.push(result.latency);
|
|
115
|
+
} else {
|
|
116
|
+
console.log(chalk.red('Failed'));
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Calculate statistics
|
|
121
|
+
if (runs.length > 0) {
|
|
122
|
+
const avgLatency = Math.round(runs.reduce((a, b) => a + b, 0) / runs.length);
|
|
123
|
+
const minLatency = Math.min(...runs);
|
|
124
|
+
const maxLatency = Math.max(...runs);
|
|
125
|
+
const successRate = (runs.length / iterations) * 100;
|
|
126
|
+
|
|
127
|
+
const stats = {
|
|
128
|
+
provider: provider.name,
|
|
129
|
+
model,
|
|
130
|
+
avgLatency,
|
|
131
|
+
minLatency,
|
|
132
|
+
maxLatency,
|
|
133
|
+
successRate,
|
|
134
|
+
iterations,
|
|
135
|
+
timestamp: new Date().toISOString()
|
|
136
|
+
};
|
|
137
|
+
|
|
138
|
+
results.push(stats);
|
|
139
|
+
|
|
140
|
+
// Display results
|
|
141
|
+
console.log(chalk.green(` ā
Average: ${avgLatency}ms`));
|
|
142
|
+
console.log(chalk.blue(` Range: ${minLatency}ms - ${maxLatency}ms`));
|
|
143
|
+
console.log(chalk.blue(` Success Rate: ${successRate}%`));
|
|
144
|
+
} else {
|
|
145
|
+
console.log(chalk.red(` ā All runs failed`));
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// Summary
|
|
151
|
+
console.log(chalk.blue('\nš Benchmark Summary'));
|
|
152
|
+
console.log(chalk.gray('ā'.repeat(60)));
|
|
153
|
+
|
|
154
|
+
if (results.length > 0) {
|
|
155
|
+
// Sort by average latency
|
|
156
|
+
results.sort((a, b) => a.avgLatency - b.avgLatency);
|
|
157
|
+
|
|
158
|
+
results.forEach((result, index) => {
|
|
159
|
+
const medal = index === 0 ? 'š„' : index === 1 ? 'š„' : index === 2 ? 'š„' : ' ';
|
|
160
|
+
console.log(`${medal} ${result.provider}/${result.model}: ${result.avgLatency}ms (${result.successRate}% success)`);
|
|
161
|
+
});
|
|
162
|
+
|
|
163
|
+
// Performance classification
|
|
164
|
+
const fastest = results[0];
|
|
165
|
+
const slowest = results[results.length - 1];
|
|
166
|
+
const speedRatio = (slowest.avgLatency / fastest.avgLatency).toFixed(1);
|
|
167
|
+
|
|
168
|
+
console.log(chalk.yellow(`\nš Performance Insights:`));
|
|
169
|
+
console.log(` Fastest: ${fastest.provider}/${fastest.model} (${fastest.avgLatency}ms)`);
|
|
170
|
+
console.log(` Slowest: ${slowest.provider}/${slowest.model} (${slowest.avgLatency}ms)`);
|
|
171
|
+
console.log(` Speed Ratio: ${speedRatio}x`);
|
|
172
|
+
} else {
|
|
173
|
+
console.log(chalk.red('No successful tests completed'));
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// Export results
|
|
177
|
+
if (output === 'json' || output === 'file') {
|
|
178
|
+
const reportPath = path.join(process.cwd(), `benchmark-${Date.now()}.json`);
|
|
179
|
+
fs.writeFileSync(reportPath, JSON.stringify(results, null, 2));
|
|
180
|
+
console.log(chalk.blue(`\nš¾ Results saved to: ${reportPath}`));
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
return results;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// Load test (stress test)
|
|
187
|
+
async function loadTest(provider, model, options = {}) {
|
|
188
|
+
const { concurrent = 5, duration = 30 } = options;
|
|
189
|
+
const startTime = Date.now();
|
|
190
|
+
const endTime = startTime + (duration * 1000);
|
|
191
|
+
|
|
192
|
+
console.log(chalk.blue(`š„ Load Testing ${provider}/${model}`));
|
|
193
|
+
console.log(chalk.gray(`Concurrent requests: ${concurrent}, Duration: ${duration}s`));
|
|
194
|
+
|
|
195
|
+
const results = [];
|
|
196
|
+
const promises = [];
|
|
197
|
+
|
|
198
|
+
// Run concurrent requests
|
|
199
|
+
for (let i = 0; i < concurrent; i++) {
|
|
200
|
+
promises.push((async () => {
|
|
201
|
+
let requestCount = 0;
|
|
202
|
+
let totalLatency = 0;
|
|
203
|
+
let errors = 0;
|
|
204
|
+
|
|
205
|
+
while (Date.now() < endTime) {
|
|
206
|
+
const requestStart = Date.now();
|
|
207
|
+
const result = await speedTest(provider, model);
|
|
208
|
+
const requestEnd = Date.now();
|
|
209
|
+
|
|
210
|
+
requestCount++;
|
|
211
|
+
totalLatency += (requestEnd - requestStart);
|
|
212
|
+
|
|
213
|
+
if (!result.success) {
|
|
214
|
+
errors++;
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
// Small delay between requests
|
|
218
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
return {
|
|
222
|
+
thread: i,
|
|
223
|
+
requests: requestCount,
|
|
224
|
+
avgLatency: Math.round(totalLatency / requestCount),
|
|
225
|
+
errors,
|
|
226
|
+
errorRate: (errors / requestCount) * 100
|
|
227
|
+
};
|
|
228
|
+
})());
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
const threadResults = await Promise.all(promises);
|
|
232
|
+
|
|
233
|
+
// Aggregate results
|
|
234
|
+
const totalRequests = threadResults.reduce((sum, r) => sum + r.requests, 0);
|
|
235
|
+
const totalErrors = threadResults.reduce((sum, r) => sum + r.errors, 0);
|
|
236
|
+
const avgLatency = Math.round(
|
|
237
|
+
threadResults.reduce((sum, r) => sum + r.avgLatency * r.requests, 0) / totalRequests
|
|
238
|
+
);
|
|
239
|
+
const requestsPerSecond = Math.round(totalRequests / duration);
|
|
240
|
+
const errorRate = (totalErrors / totalRequests) * 100;
|
|
241
|
+
|
|
242
|
+
console.log(chalk.green('\nš Load Test Results:'));
|
|
243
|
+
console.log(` Total Requests: ${totalRequests}`);
|
|
244
|
+
console.log(` Requests/Second: ${requestsPerSecond}`);
|
|
245
|
+
console.log(` Average Latency: ${avgLatency}ms`);
|
|
246
|
+
console.log(` Error Rate: ${errorRate}%`);
|
|
247
|
+
|
|
248
|
+
return {
|
|
249
|
+
provider,
|
|
250
|
+
model,
|
|
251
|
+
duration,
|
|
252
|
+
concurrent,
|
|
253
|
+
totalRequests,
|
|
254
|
+
requestsPerSecond,
|
|
255
|
+
avgLatency,
|
|
256
|
+
errorRate,
|
|
257
|
+
threadResults
|
|
258
|
+
};
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// CLI interface
|
|
262
|
+
async function main() {
|
|
263
|
+
const args = process.argv.slice(2);
|
|
264
|
+
const command = args[0];
|
|
265
|
+
|
|
266
|
+
switch (command) {
|
|
267
|
+
case 'speed':
|
|
268
|
+
const provider = args[1];
|
|
269
|
+
const model = args[2];
|
|
270
|
+
if (!provider) {
|
|
271
|
+
console.error(chalk.red('Usage: ccr benchmark speed <provider> [model]'));
|
|
272
|
+
process.exit(1);
|
|
273
|
+
}
|
|
274
|
+
await speedTest(provider, model);
|
|
275
|
+
break;
|
|
276
|
+
|
|
277
|
+
case 'full':
|
|
278
|
+
const iterations = parseInt(args[1]) || 3;
|
|
279
|
+
const providers = args.filter(arg => arg.startsWith('--provider=')).map(arg => arg.split('=')[1]);
|
|
280
|
+
const models = args.filter(arg => arg.startsWith('--model=')).map(arg => arg.split('=')[1]);
|
|
281
|
+
const output = args.includes('--json') ? 'json' : 'console';
|
|
282
|
+
const warmup = !args.includes('--no-warmup');
|
|
283
|
+
|
|
284
|
+
await runBenchmark({
|
|
285
|
+
iterations,
|
|
286
|
+
providers,
|
|
287
|
+
models,
|
|
288
|
+
output,
|
|
289
|
+
warmup
|
|
290
|
+
});
|
|
291
|
+
break;
|
|
292
|
+
|
|
293
|
+
case 'load':
|
|
294
|
+
const loadProvider = args[1];
|
|
295
|
+
const loadModel = args[2];
|
|
296
|
+
const concurrent = parseInt(args.find(arg => arg.startsWith('--concurrent='))?.split('=')[1]) || 5;
|
|
297
|
+
const duration = parseInt(args.find(arg => arg.startsWith('--duration='))?.split('=')[1]) || 30;
|
|
298
|
+
|
|
299
|
+
if (!loadProvider) {
|
|
300
|
+
console.error(chalk.red('Usage: ccr benchmark load <provider> <model> [--concurrent=N] [--duration=N]'));
|
|
301
|
+
process.exit(1);
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
await loadTest(loadProvider, loadModel, { concurrent, duration });
|
|
305
|
+
break;
|
|
306
|
+
|
|
307
|
+
default:
|
|
308
|
+
console.log(chalk.blue('Claude Code Router - Benchmark CLI'));
|
|
309
|
+
console.log(chalk.gray('ā'.repeat(45)));
|
|
310
|
+
console.log(chalk.yellow('Available commands:'));
|
|
311
|
+
console.log('');
|
|
312
|
+
console.log('Speed Testing:');
|
|
313
|
+
console.log(' ccr benchmark speed <provider> [model] - Single speed test');
|
|
314
|
+
console.log(' ccr benchmark full [iterations] [options] - Full benchmark');
|
|
315
|
+
console.log('');
|
|
316
|
+
console.log('Load Testing:');
|
|
317
|
+
console.log(' ccr benchmark load <provider> <model> [options] - Stress test');
|
|
318
|
+
console.log('');
|
|
319
|
+
console.log('Options:');
|
|
320
|
+
console.log(' --provider=<name> Test specific provider');
|
|
321
|
+
console.log(' --model=<name> Test specific model');
|
|
322
|
+
console.log(' --json Output results as JSON');
|
|
323
|
+
console.log(' --no-warmup Skip warmup requests');
|
|
324
|
+
console.log(' --concurrent=N Number of concurrent requests (load test)');
|
|
325
|
+
console.log(' --duration=N Test duration in seconds (load test)');
|
|
326
|
+
console.log('');
|
|
327
|
+
console.log('Examples:');
|
|
328
|
+
console.log(' ccr benchmark speed openai gpt-4o');
|
|
329
|
+
console.log(' ccr benchmark full 5 --provider=openai --provider=anthropic');
|
|
330
|
+
console.log(' ccr benchmark load openai gpt-4o --concurrent=10 --duration=60');
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
if (require.main === module) {
|
|
335
|
+
main().catch(console.error);
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
module.exports = {
|
|
339
|
+
runBenchmark,
|
|
340
|
+
loadTest,
|
|
341
|
+
speedTest
|
|
342
|
+
};
|
package/cli/commands.js
ADDED
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const { spawn } = require('child_process');
|
|
6
|
+
const chalk = require('chalk');
|
|
7
|
+
const configPath = path.join(require('os').homedir(), '.claude-code-router');
|
|
8
|
+
|
|
9
|
+
// Load config
|
|
10
|
+
function loadConfig() {
|
|
11
|
+
try {
|
|
12
|
+
return JSON.parse(fs.readFileSync(path.join(configPath, 'config.json'), 'utf8'));
|
|
13
|
+
} catch (error) {
|
|
14
|
+
console.error(chalk.red('ā Configuration not found. Run installation first.'));
|
|
15
|
+
process.exit(1);
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// Test provider connectivity
|
|
20
|
+
async function testProvider(provider, model) {
|
|
21
|
+
const config = loadConfig();
|
|
22
|
+
const providerConfig = config.Providers.find(p => p.name === provider);
|
|
23
|
+
|
|
24
|
+
if (!providerConfig) {
|
|
25
|
+
console.error(chalk.red(`ā Provider "${provider}" not found in config`));
|
|
26
|
+
return false;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
console.log(chalk.blue(`š Testing ${provider} with model: ${model || 'default'}`));
|
|
30
|
+
|
|
31
|
+
try {
|
|
32
|
+
const startTime = Date.now();
|
|
33
|
+
// Simple test request
|
|
34
|
+
const testRequest = {
|
|
35
|
+
model: model || providerConfig.models[0],
|
|
36
|
+
messages: [{ role: "user", content: "Test connection" }],
|
|
37
|
+
max_tokens: 10
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
// For now, just check if API key is set
|
|
41
|
+
const apiKey = process.env[providerConfig.api_key.replace('$', '')];
|
|
42
|
+
if (!apiKey) {
|
|
43
|
+
throw new Error(`API key not set for ${provider}`);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const endTime = Date.now();
|
|
47
|
+
console.log(chalk.green(`ā
${provider}: Connected (${endTime - startTime}ms)`));
|
|
48
|
+
return true;
|
|
49
|
+
} catch (error) {
|
|
50
|
+
console.error(chalk.red(`ā ${provider}: ${error.message}`));
|
|
51
|
+
return false;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// Benchmark all providers
|
|
56
|
+
async function benchmarkProviders(options = {}) {
|
|
57
|
+
const config = loadConfig();
|
|
58
|
+
const { allProviders = false, compareSpeed = false } = options;
|
|
59
|
+
|
|
60
|
+
console.log(chalk.blue('šāāļø Provider Benchmark'));
|
|
61
|
+
console.log(chalk.gray('ā'.repeat(50)));
|
|
62
|
+
|
|
63
|
+
const results = [];
|
|
64
|
+
|
|
65
|
+
for (const provider of config.Providers) {
|
|
66
|
+
if (allProviders || provider.name.includes('openai') || provider.name.includes('anthropic')) {
|
|
67
|
+
const startTime = Date.now();
|
|
68
|
+
const success = await testProvider(provider.name);
|
|
69
|
+
const endTime = Date.now();
|
|
70
|
+
|
|
71
|
+
if (success) {
|
|
72
|
+
results.push({
|
|
73
|
+
provider: provider.name,
|
|
74
|
+
latency: endTime - startTime,
|
|
75
|
+
status: 'healthy'
|
|
76
|
+
});
|
|
77
|
+
} else {
|
|
78
|
+
results.push({
|
|
79
|
+
provider: provider.name,
|
|
80
|
+
latency: null,
|
|
81
|
+
status: 'failed'
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
if (compareSpeed && results.length > 1) {
|
|
88
|
+
console.log(chalk.blue('\nš Speed Comparison'));
|
|
89
|
+
const healthyResults = results.filter(r => r.status === 'healthy');
|
|
90
|
+
healthyResults.sort((a, b) => a.latency - b.latency);
|
|
91
|
+
|
|
92
|
+
healthyResults.forEach((result, index) => {
|
|
93
|
+
const medal = index === 0 ? 'š„' : index === 1 ? 'š„' : index === 2 ? 'š„' : ' ';
|
|
94
|
+
console.log(`${medal} ${result.provider}: ${result.latency}ms`);
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return results;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Show detailed status
|
|
102
|
+
async function showDetailedStatus(options = {}) {
|
|
103
|
+
const config = loadConfig();
|
|
104
|
+
const { showCosts = false } = options;
|
|
105
|
+
|
|
106
|
+
console.log(chalk.blue('š Claude Code Router Status'));
|
|
107
|
+
console.log(chalk.gray('ā'.repeat(50)));
|
|
108
|
+
|
|
109
|
+
// Configuration info
|
|
110
|
+
console.log(chalk.yellow('Configuration:'));
|
|
111
|
+
console.log(` Providers: ${config.Providers.length}`);
|
|
112
|
+
console.log(` Logging: ${config.LOG ? 'Enabled' : 'Disabled'}`);
|
|
113
|
+
console.log(` Custom Router: ${config.CUSTOM_ROUTER_PATH ? 'Enabled' : 'Disabled'}`);
|
|
114
|
+
|
|
115
|
+
// Provider status
|
|
116
|
+
console.log(chalk.yellow('\nProviders:'));
|
|
117
|
+
for (const provider of config.Providers) {
|
|
118
|
+
const apiKey = process.env[provider.api_key.replace('$', '')];
|
|
119
|
+
const status = apiKey ? 'š¢ Active' : 'š“ Missing API Key';
|
|
120
|
+
console.log(` ${provider.name}: ${status}`);
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// Router configuration
|
|
124
|
+
console.log(chalk.yellow('\nRouter Configuration:'));
|
|
125
|
+
Object.entries(config.Router).forEach(([key, value]) => {
|
|
126
|
+
console.log(` ${key}: ${value}`);
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
if (showCosts) {
|
|
130
|
+
console.log(chalk.yellow('\nš° Cost Information:'));
|
|
131
|
+
console.log(' Note: Cost tracking requires analytics module');
|
|
132
|
+
console.log(' Run: ccr analytics --today');
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// Validate configuration
|
|
137
|
+
function validateConfig() {
|
|
138
|
+
const configPathFull = path.join(configPath, 'config.json');
|
|
139
|
+
|
|
140
|
+
if (!fs.existsSync(configPathFull)) {
|
|
141
|
+
console.error(chalk.red('ā Config file not found'));
|
|
142
|
+
return false;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
try {
|
|
146
|
+
const config = JSON.parse(fs.readFileSync(configPathFull, 'utf8'));
|
|
147
|
+
|
|
148
|
+
console.log(chalk.blue('š Validating Configuration'));
|
|
149
|
+
console.log(chalk.gray('ā'.repeat(50)));
|
|
150
|
+
|
|
151
|
+
// Check required fields
|
|
152
|
+
const requiredFields = ['Providers', 'Router'];
|
|
153
|
+
let valid = true;
|
|
154
|
+
|
|
155
|
+
for (const field of requiredFields) {
|
|
156
|
+
if (!config[field]) {
|
|
157
|
+
console.error(chalk.red(`ā Missing required field: ${field}`));
|
|
158
|
+
valid = false;
|
|
159
|
+
} else {
|
|
160
|
+
console.log(chalk.green(`ā
${field}: Present`));
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
// Check providers
|
|
165
|
+
if (config.Providers) {
|
|
166
|
+
console.log(chalk.yellow('\nProvider Validation:'));
|
|
167
|
+
config.Providers.forEach((provider, index) => {
|
|
168
|
+
const required = ['name', 'api_base_url', 'api_key', 'models'];
|
|
169
|
+
const missing = required.filter(field => !provider[field]);
|
|
170
|
+
|
|
171
|
+
if (missing.length === 0) {
|
|
172
|
+
console.log(chalk.green(` ā
Provider ${index + 1}: ${provider.name}`));
|
|
173
|
+
} else {
|
|
174
|
+
console.log(chalk.red(` ā Provider ${index + 1}: Missing ${missing.join(', ')}`));
|
|
175
|
+
valid = false;
|
|
176
|
+
}
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
if (valid) {
|
|
181
|
+
console.log(chalk.green('\nā
Configuration is valid!'));
|
|
182
|
+
} else {
|
|
183
|
+
console.log(chalk.red('\nā Configuration has errors'));
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
return valid;
|
|
187
|
+
} catch (error) {
|
|
188
|
+
console.error(chalk.red(`ā Error parsing config: ${error.message}`));
|
|
189
|
+
return false;
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
// Backup configuration
|
|
194
|
+
function backupConfig() {
|
|
195
|
+
const configPathFull = path.join(configPath, 'config.json');
|
|
196
|
+
const routerPathFull = path.join(configPath, 'intent-router.js');
|
|
197
|
+
const backupDir = path.join(configPath, 'backups');
|
|
198
|
+
|
|
199
|
+
if (!fs.existsSync(backupDir)) {
|
|
200
|
+
fs.mkdirSync(backupDir, { recursive: true });
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
204
|
+
|
|
205
|
+
try {
|
|
206
|
+
if (fs.existsSync(configPathFull)) {
|
|
207
|
+
const backupConfig = path.join(backupDir, `config-${timestamp}.json`);
|
|
208
|
+
fs.copyFileSync(configPathFull, backupConfig);
|
|
209
|
+
console.log(chalk.green(`ā
Config backed up: ${backupConfig}`));
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
if (fs.existsSync(routerPathFull)) {
|
|
213
|
+
const backupRouter = path.join(backupDir, `intent-router-${timestamp}.js`);
|
|
214
|
+
fs.copyFileSync(routerPathFull, backupRouter);
|
|
215
|
+
console.log(chalk.green(`ā
Router backed up: ${backupRouter}`));
|
|
216
|
+
}
|
|
217
|
+
} catch (error) {
|
|
218
|
+
console.error(chalk.red(`ā Backup failed: ${error.message}`));
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
// CLI command handler
|
|
223
|
+
async function main() {
|
|
224
|
+
const [command, ...args] = process.argv.slice(2);
|
|
225
|
+
|
|
226
|
+
switch (command) {
|
|
227
|
+
case 'test':
|
|
228
|
+
const provider = args[0];
|
|
229
|
+
const model = args[1];
|
|
230
|
+
if (provider) {
|
|
231
|
+
await testProvider(provider, model);
|
|
232
|
+
} else {
|
|
233
|
+
console.error(chalk.red('ā Please specify a provider: ccr test <provider> [model]'));
|
|
234
|
+
}
|
|
235
|
+
break;
|
|
236
|
+
|
|
237
|
+
case 'benchmark':
|
|
238
|
+
const options = {
|
|
239
|
+
allProviders: args.includes('--all'),
|
|
240
|
+
compareSpeed: args.includes('--compare-speed')
|
|
241
|
+
};
|
|
242
|
+
await benchmarkProviders(options);
|
|
243
|
+
break;
|
|
244
|
+
|
|
245
|
+
case 'status':
|
|
246
|
+
const statusOptions = {
|
|
247
|
+
detailed: args.includes('--detailed'),
|
|
248
|
+
showCosts: args.includes('--show-costs')
|
|
249
|
+
};
|
|
250
|
+
await showDetailedStatus(statusOptions);
|
|
251
|
+
break;
|
|
252
|
+
|
|
253
|
+
case 'config':
|
|
254
|
+
const configCommand = args[0];
|
|
255
|
+
switch (configCommand) {
|
|
256
|
+
case 'validate':
|
|
257
|
+
validateConfig();
|
|
258
|
+
break;
|
|
259
|
+
case 'backup':
|
|
260
|
+
backupConfig();
|
|
261
|
+
break;
|
|
262
|
+
default:
|
|
263
|
+
console.log(chalk.yellow('Available config commands:'));
|
|
264
|
+
console.log(' validate - Check configuration validity');
|
|
265
|
+
console.log(' backup - Backup current configuration');
|
|
266
|
+
}
|
|
267
|
+
break;
|
|
268
|
+
|
|
269
|
+
default:
|
|
270
|
+
console.log(chalk.blue('Claude Code Router - Advanced CLI'));
|
|
271
|
+
console.log(chalk.gray('ā'.repeat(40)));
|
|
272
|
+
console.log(chalk.yellow('Available commands:'));
|
|
273
|
+
console.log('');
|
|
274
|
+
console.log('Testing & Benchmarking:');
|
|
275
|
+
console.log(' ccr test <provider> [model] - Test provider connection');
|
|
276
|
+
console.log(' ccr benchmark [--all] [--compare-speed] - Benchmark providers');
|
|
277
|
+
console.log(' ccr status [--detailed] [--show-costs] - Show router status');
|
|
278
|
+
console.log('');
|
|
279
|
+
console.log('Configuration Management:');
|
|
280
|
+
console.log(' ccr config validate - Validate configuration');
|
|
281
|
+
console.log(' ccr config backup - Backup configuration');
|
|
282
|
+
console.log('');
|
|
283
|
+
console.log('Examples:');
|
|
284
|
+
console.log(' ccr test openai gpt-4o');
|
|
285
|
+
console.log(' ccr benchmark --all --compare-speed');
|
|
286
|
+
console.log(' ccr status --detailed --show-costs');
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
if (require.main === module) {
|
|
291
|
+
main().catch(console.error);
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
module.exports = {
|
|
295
|
+
testProvider,
|
|
296
|
+
benchmarkProviders,
|
|
297
|
+
showDetailedStatus,
|
|
298
|
+
validateConfig,
|
|
299
|
+
backupConfig
|
|
300
|
+
};
|