@sifwenf/cc-proxy 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.ja.md +337 -0
- package/README.md +284 -0
- package/README.zh-CN.md +337 -0
- package/TASKS.md +102 -0
- package/config.example.json +29 -0
- package/package.json +21 -0
- package/scripts/ccp +126 -0
- package/scripts/status.js +32 -0
- package/src/config.ts +86 -0
- package/src/format-converter.ts +168 -0
- package/src/logger.ts +213 -0
- package/src/proxy.ts +83 -0
- package/src/scripts/init.ts +161 -0
- package/src/server.ts +338 -0
- package/src/types.ts +78 -0
package/src/server.ts
ADDED
|
@@ -0,0 +1,338 @@
|
|
|
1
|
+
import { Hono } from 'hono';
|
|
2
|
+
import { cors } from 'hono/cors';
|
|
3
|
+
import { logger as honoLogger } from 'hono/logger';
|
|
4
|
+
import { streamText } from 'hono/streaming';
|
|
5
|
+
import { loadConfig, getDataDir, getDefaultConfigPath } from './config.js';
|
|
6
|
+
import { Logger } from './logger.js';
|
|
7
|
+
import { RequestMapper } from './proxy.js';
|
|
8
|
+
import { convertAnthropicToOpenAI, convertOpenAIToAnthropic } from './format-converter.js';
|
|
9
|
+
import { join } from 'path';
|
|
10
|
+
import chokidar from 'chokidar';
|
|
11
|
+
|
|
12
|
+
const app = new Hono();
|
|
13
|
+
|
|
14
|
+
let config;
|
|
15
|
+
try {
|
|
16
|
+
config = loadConfig();
|
|
17
|
+
} catch (error) {
|
|
18
|
+
console.error('❌ Failed to load configuration:');
|
|
19
|
+
console.error((error as Error).message);
|
|
20
|
+
console.error('\nPlease create a config.json file. See config.example.json for reference.');
|
|
21
|
+
process.exit(1);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const appLogger = new Logger(config.logging);
|
|
25
|
+
const requestMapper = new RequestMapper(
|
|
26
|
+
config.providers,
|
|
27
|
+
config.router
|
|
28
|
+
);
|
|
29
|
+
|
|
30
|
+
// Hot reload configuration with chokidar
|
|
31
|
+
let configWatcher: chokidar.FSWatcher | null = null;
|
|
32
|
+
|
|
33
|
+
function setupConfigWatcher() {
|
|
34
|
+
const configPath = getDefaultConfigPath();
|
|
35
|
+
|
|
36
|
+
// Reload function
|
|
37
|
+
const reloadConfig = async () => {
|
|
38
|
+
console.log('🔄 Config file changed, reloading...');
|
|
39
|
+
try {
|
|
40
|
+
const newConfig = await require('./config.js').reloadConfig();
|
|
41
|
+
if (newConfig) {
|
|
42
|
+
// Update providers and router
|
|
43
|
+
(requestMapper as any).updateProviders(newConfig.providers);
|
|
44
|
+
(requestMapper as any).updateRouter(newConfig.router);
|
|
45
|
+
|
|
46
|
+
// Update logger
|
|
47
|
+
(appLogger as any).updateConfig(newConfig.logging);
|
|
48
|
+
|
|
49
|
+
config = newConfig;
|
|
50
|
+
console.log('✅ Configuration reloaded successfully');
|
|
51
|
+
console.log(` OPUS → ${config.router.opus}`);
|
|
52
|
+
console.log(` SONNET → ${config.router.sonnet}`);
|
|
53
|
+
console.log(` HAIKU → ${config.router.haiku}`);
|
|
54
|
+
}
|
|
55
|
+
} catch (err) {
|
|
56
|
+
console.error('❌ Failed to reload config:', err);
|
|
57
|
+
}
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
try {
|
|
61
|
+
// Use chokidar for reliable file watching across platforms
|
|
62
|
+
configWatcher = chokidar.watch(configPath, {
|
|
63
|
+
persistent: true,
|
|
64
|
+
ignoreInitial: true,
|
|
65
|
+
awaitWriteFinish: {
|
|
66
|
+
stabilityThreshold: 100,
|
|
67
|
+
pollInterval: 50,
|
|
68
|
+
},
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
configWatcher.on('change', reloadConfig);
|
|
72
|
+
configWatcher.on('error', (error) => {
|
|
73
|
+
console.error('❌ Watcher error:', error);
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
console.log(`👀 Watching config file: ${configPath}`);
|
|
77
|
+
console.log('💡 Edit config file to automatically reload configuration');
|
|
78
|
+
} catch (err) {
|
|
79
|
+
console.warn('⚠️ Could not setup config watcher:', err);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// Middleware
|
|
84
|
+
app.use('*', cors({
|
|
85
|
+
origin: '*',
|
|
86
|
+
allowHeaders: ['*'],
|
|
87
|
+
allowMethods: ['POST', 'GET', 'OPTIONS'],
|
|
88
|
+
credentials: true,
|
|
89
|
+
}));
|
|
90
|
+
|
|
91
|
+
app.use('*', honoLogger());
|
|
92
|
+
|
|
93
|
+
// Health check endpoint
|
|
94
|
+
app.get('/health', (c) => {
|
|
95
|
+
return c.json({
|
|
96
|
+
status: 'ok',
|
|
97
|
+
providers: config.providers.length,
|
|
98
|
+
logging: config.logging.enabled,
|
|
99
|
+
});
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
// Status endpoint
|
|
103
|
+
app.get('/status', (c) => {
|
|
104
|
+
return c.json({
|
|
105
|
+
server: config.server,
|
|
106
|
+
providers: config.providers.map(p => ({
|
|
107
|
+
name: p.name,
|
|
108
|
+
baseUrl: p.baseUrl,
|
|
109
|
+
})),
|
|
110
|
+
router: config.router,
|
|
111
|
+
logging: config.logging,
|
|
112
|
+
});
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
// Main proxy endpoint for Anthropic Messages API
|
|
116
|
+
app.post('/v1/messages', async (c) => {
|
|
117
|
+
let requestId: string | null = null;
|
|
118
|
+
|
|
119
|
+
try {
|
|
120
|
+
const body = await c.req.json();
|
|
121
|
+
const headers = Object.fromEntries(c.req.raw.headers);
|
|
122
|
+
|
|
123
|
+
// Log request
|
|
124
|
+
requestId = appLogger.startRequest(
|
|
125
|
+
'POST',
|
|
126
|
+
'/v1/messages',
|
|
127
|
+
headers,
|
|
128
|
+
body
|
|
129
|
+
);
|
|
130
|
+
|
|
131
|
+
// Resolve provider and model mapping
|
|
132
|
+
const context = requestMapper.resolveProvider(body.model);
|
|
133
|
+
const { provider, modelName } = context;
|
|
134
|
+
|
|
135
|
+
// Normalize format to lowercase
|
|
136
|
+
const format = provider.format?.toLowerCase();
|
|
137
|
+
|
|
138
|
+
// Prepare request body and headers based on provider format
|
|
139
|
+
let providerRequest: any;
|
|
140
|
+
let fetchHeaders: Record<string, string>;
|
|
141
|
+
|
|
142
|
+
if (format === 'openai') {
|
|
143
|
+
// Convert Anthropic → OpenAI format
|
|
144
|
+
providerRequest = convertAnthropicToOpenAI({ ...body, model: modelName });
|
|
145
|
+
fetchHeaders = {
|
|
146
|
+
'Content-Type': 'application/json',
|
|
147
|
+
'Authorization': `Bearer ${provider.apiKey}`,
|
|
148
|
+
'HTTP-Referer': 'https://claude.ai',
|
|
149
|
+
'X-Title': 'Claude Code',
|
|
150
|
+
};
|
|
151
|
+
} else if (format === 'anthropic') {
|
|
152
|
+
// Explicit Anthropic format headers
|
|
153
|
+
providerRequest = { ...body, model: modelName };
|
|
154
|
+
fetchHeaders = {
|
|
155
|
+
'Content-Type': 'application/json',
|
|
156
|
+
'x-api-key': provider.apiKey,
|
|
157
|
+
'anthropic-version': '2023-06-01',
|
|
158
|
+
};
|
|
159
|
+
} else {
|
|
160
|
+
// No format specified: pass-through proxy
|
|
161
|
+
// Forward original headers, only replace api key and model
|
|
162
|
+
const originalHeaders = Object.fromEntries(c.req.raw.headers);
|
|
163
|
+
fetchHeaders = { ...originalHeaders };
|
|
164
|
+
// Remove hop-by-hop headers
|
|
165
|
+
delete fetchHeaders['host'];
|
|
166
|
+
delete fetchHeaders['connection'];
|
|
167
|
+
delete fetchHeaders['content-length'];
|
|
168
|
+
// Set provider's API key
|
|
169
|
+
if (provider.apiKey) {
|
|
170
|
+
fetchHeaders['x-api-key'] = provider.apiKey;
|
|
171
|
+
}
|
|
172
|
+
providerRequest = { ...body, model: modelName };
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// Forward request to provider
|
|
176
|
+
// Log forward details before sending (only once, not verbose for streaming)
|
|
177
|
+
if (!body.stream) {
|
|
178
|
+
appLogger.logForward(
|
|
179
|
+
provider.name,
|
|
180
|
+
modelName,
|
|
181
|
+
format || 'pass-through',
|
|
182
|
+
providerRequest
|
|
183
|
+
);
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
const response = await fetch(provider.baseUrl, {
|
|
187
|
+
method: 'POST',
|
|
188
|
+
headers: fetchHeaders,
|
|
189
|
+
body: JSON.stringify(providerRequest),
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
if (!response.ok) {
|
|
193
|
+
const errorText = await response.text();
|
|
194
|
+
appLogger.logError(
|
|
195
|
+
`Provider error: ${response.status} ${response.statusText}`,
|
|
196
|
+
errorText
|
|
197
|
+
);
|
|
198
|
+
return c.json(
|
|
199
|
+
{
|
|
200
|
+
type: 'error',
|
|
201
|
+
error: {
|
|
202
|
+
type: 'api_error',
|
|
203
|
+
message: `Provider returned ${response.status}: ${errorText}`,
|
|
204
|
+
},
|
|
205
|
+
},
|
|
206
|
+
response.status
|
|
207
|
+
);
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
// Handle streaming response
|
|
211
|
+
if (body.stream) {
|
|
212
|
+
// Performance optimization: Skip per-chunk logging for streaming
|
|
213
|
+
// Each SSE chunk would trigger disk I/O, killing performance
|
|
214
|
+
// Instead, log a single summary after stream completes
|
|
215
|
+
return streamText(c, async (stream) => {
|
|
216
|
+
const reader = response.body?.getReader();
|
|
217
|
+
if (!reader) {
|
|
218
|
+
throw new Error('No response body');
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
const decoder = new TextDecoder();
|
|
222
|
+
let chunkCount = 0;
|
|
223
|
+
|
|
224
|
+
try {
|
|
225
|
+
while (true) {
|
|
226
|
+
const { done, value } = await reader.read();
|
|
227
|
+
if (done) break;
|
|
228
|
+
|
|
229
|
+
const chunk = decoder.decode(value, { stream: true });
|
|
230
|
+
chunkCount++;
|
|
231
|
+
|
|
232
|
+
// Stream directly without logging each chunk
|
|
233
|
+
await stream.write(chunk);
|
|
234
|
+
}
|
|
235
|
+
} finally {
|
|
236
|
+
reader.releaseLock();
|
|
237
|
+
|
|
238
|
+
// Log streaming summary (only if verbose)
|
|
239
|
+
if (config.logging.level === 'verbose') {
|
|
240
|
+
appLogger.logForward(
|
|
241
|
+
provider.name,
|
|
242
|
+
modelName,
|
|
243
|
+
format || 'pass-through',
|
|
244
|
+
providerRequest,
|
|
245
|
+
{ streaming: true, chunks: chunkCount }
|
|
246
|
+
);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
// Handle non-streaming response
|
|
253
|
+
let responseBody = await response.json();
|
|
254
|
+
|
|
255
|
+
// Convert OpenAI format to Anthropic format if needed
|
|
256
|
+
if (format === 'openai' && responseBody.choices) {
|
|
257
|
+
responseBody = convertOpenAIToAnthropic(responseBody, body.model);
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// Log response details (combine forward + response into one call)
|
|
261
|
+
appLogger.logResponse(
|
|
262
|
+
response.status,
|
|
263
|
+
Object.fromEntries(response.headers.entries()),
|
|
264
|
+
responseBody,
|
|
265
|
+
provider.name,
|
|
266
|
+
modelName,
|
|
267
|
+
body.model
|
|
268
|
+
);
|
|
269
|
+
|
|
270
|
+
return c.json(responseBody);
|
|
271
|
+
|
|
272
|
+
} catch (error) {
|
|
273
|
+
if (requestId) {
|
|
274
|
+
appLogger.logError(
|
|
275
|
+
error instanceof Error ? error.message : String(error)
|
|
276
|
+
);
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
return c.json(
|
|
280
|
+
{
|
|
281
|
+
type: 'error',
|
|
282
|
+
error: {
|
|
283
|
+
type: 'api_error',
|
|
284
|
+
message: error instanceof Error ? error.message : 'Internal server error',
|
|
285
|
+
},
|
|
286
|
+
},
|
|
287
|
+
500
|
|
288
|
+
);
|
|
289
|
+
}
|
|
290
|
+
});
|
|
291
|
+
|
|
292
|
+
// Parse port from config (highest priority), then ANTHROPIC_BASE_URL, then default
|
|
293
|
+
function resolvePort(): number {
|
|
294
|
+
// Config file has highest priority
|
|
295
|
+
if (config.server?.port) {
|
|
296
|
+
return config.server.port;
|
|
297
|
+
}
|
|
298
|
+
// Environment variable as fallback
|
|
299
|
+
const baseUrl = process.env.ANTHROPIC_BASE_URL;
|
|
300
|
+
if (baseUrl) {
|
|
301
|
+
try {
|
|
302
|
+
const url = new URL(baseUrl);
|
|
303
|
+
const port = parseInt(url.port, 10);
|
|
304
|
+
if (port) return port;
|
|
305
|
+
} catch {}
|
|
306
|
+
}
|
|
307
|
+
return 3457; // Default port
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
const PORT = resolvePort();
|
|
311
|
+
const HOST = config.server.host || '127.0.0.1';
|
|
312
|
+
|
|
313
|
+
const server = Bun.serve({
|
|
314
|
+
port: PORT,
|
|
315
|
+
hostname: HOST,
|
|
316
|
+
fetch: app.fetch,
|
|
317
|
+
});
|
|
318
|
+
|
|
319
|
+
console.log(`🚀 cc-proxy server running on http://${HOST}:${PORT}`);
|
|
320
|
+
console.log(`📊 Status: http://${HOST}:${PORT}/status`);
|
|
321
|
+
console.log(`🔍 Health: http://${HOST}:${PORT}/health`);
|
|
322
|
+
console.log(`📝 Logging: ${config.logging.enabled ? 'enabled' : 'disabled'} (${config.logging.level})`);
|
|
323
|
+
console.log(`⚙️ Providers: ${config.providers.length} configured`);
|
|
324
|
+
console.log(`📁 Config: ${require('./config.js').getDefaultConfigPath()}`);
|
|
325
|
+
|
|
326
|
+
// Setup config watcher for hot reload (only once)
|
|
327
|
+
setupConfigWatcher();
|
|
328
|
+
|
|
329
|
+
// Graceful shutdown: flush log buffer on exit
|
|
330
|
+
process.on('exit', () => (appLogger as any).destroy?.());
|
|
331
|
+
process.on('SIGINT', () => {
|
|
332
|
+
(appLogger as any).destroy?.();
|
|
333
|
+
process.exit(0);
|
|
334
|
+
});
|
|
335
|
+
process.on('SIGTERM', () => {
|
|
336
|
+
(appLogger as any).destroy?.();
|
|
337
|
+
process.exit(0);
|
|
338
|
+
});
|
package/src/types.ts
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
// Configuration types
|
|
2
|
+
export interface ProviderConfig {
|
|
3
|
+
name: string;
|
|
4
|
+
baseUrl: string;
|
|
5
|
+
apiKey: string;
|
|
6
|
+
format?: string; // "anthropic" | "openai" | undefined (pass-through)
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export interface RouterConfig {
|
|
10
|
+
haiku: string; // Format: "providerName,modelName" e.g., "zp,glm-4.7"
|
|
11
|
+
sonnet: string; // Format: "providerName,modelName"
|
|
12
|
+
opus: string; // Format: "providerName,modelName"
|
|
13
|
+
image?: string; // Optional: "providerName,modelName"
|
|
14
|
+
webSearch?: string | number; // Optional: "providerName,modelName" or threshold
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export interface LoggingConfig {
|
|
18
|
+
enabled: boolean;
|
|
19
|
+
level: 'basic' | 'standard' | 'verbose';
|
|
20
|
+
dir: string;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export interface ServerConfig {
|
|
24
|
+
port: number;
|
|
25
|
+
host: string;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export interface Config {
|
|
29
|
+
server: ServerConfig;
|
|
30
|
+
logging: LoggingConfig;
|
|
31
|
+
providers: ProviderConfig[];
|
|
32
|
+
router: RouterConfig;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Log entry types
|
|
36
|
+
export interface LogEntry {
|
|
37
|
+
timestamp: string;
|
|
38
|
+
id: string;
|
|
39
|
+
type: 'request' | 'response' | 'stream_chunk' | 'error' | 'forward';
|
|
40
|
+
method: string;
|
|
41
|
+
path: string;
|
|
42
|
+
statusCode?: number;
|
|
43
|
+
headers?: Record<string, string>;
|
|
44
|
+
body?: any;
|
|
45
|
+
chunkIndex?: number;
|
|
46
|
+
provider?: string;
|
|
47
|
+
mappedModel?: string;
|
|
48
|
+
originalModel?: string;
|
|
49
|
+
duration?: number;
|
|
50
|
+
error?: string;
|
|
51
|
+
// Forward details
|
|
52
|
+
requestFormat?: string;
|
|
53
|
+
actualRequest?: any;
|
|
54
|
+
actualResponse?: any;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Anthropic API types
|
|
58
|
+
export interface AnthropicMessageRequest {
|
|
59
|
+
model: string;
|
|
60
|
+
messages: Array<{
|
|
61
|
+
role: string;
|
|
62
|
+
content: string | Array<any>;
|
|
63
|
+
}>;
|
|
64
|
+
max_tokens?: number;
|
|
65
|
+
temperature?: number;
|
|
66
|
+
top_p?: number;
|
|
67
|
+
stream?: boolean;
|
|
68
|
+
tools?: any;
|
|
69
|
+
tool_choice?: any;
|
|
70
|
+
system?: string;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export interface AnthropicStreamChunk {
|
|
74
|
+
type: string;
|
|
75
|
+
index?: number;
|
|
76
|
+
delta?: any;
|
|
77
|
+
message?: any;
|
|
78
|
+
}
|