aicodeswitch 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +176 -0
- package/bin/cli.js +29 -0
- package/bin/restart.js +232 -0
- package/bin/start.js +166 -0
- package/bin/stop.js +90 -0
- package/dist/server/auth.js +77 -0
- package/dist/server/database.js +480 -0
- package/dist/server/main.js +382 -0
- package/dist/server/proxy-server.js +889 -0
- package/dist/server/transformers/chunk-collector.js +39 -0
- package/dist/server/transformers/claude-openai.js +231 -0
- package/dist/server/transformers/openai-responses.js +392 -0
- package/dist/server/transformers/streaming.js +888 -0
- package/dist/types/index.js +2 -0
- package/dist/ui/assets/index-BN77E7-U.js +259 -0
- package/dist/ui/assets/index-CaNSVfpD.css +1 -0
- package/dist/ui/index.html +13 -0
- package/package.json +59 -0
|
@@ -0,0 +1,889 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
exports.ProxyServer = void 0;
|
|
16
|
+
const axios_1 = __importDefault(require("axios"));
|
|
17
|
+
const stream_1 = require("stream");
|
|
18
|
+
const streaming_1 = require("./transformers/streaming");
|
|
19
|
+
const chunk_collector_1 = require("./transformers/chunk-collector");
|
|
20
|
+
const claude_openai_1 = require("./transformers/claude-openai");
|
|
21
|
+
const openai_responses_1 = require("./transformers/openai-responses");
|
|
22
|
+
class ProxyServer {
|
|
23
|
+
constructor(dbManager, app) {
|
|
24
|
+
Object.defineProperty(this, "app", {
|
|
25
|
+
enumerable: true,
|
|
26
|
+
configurable: true,
|
|
27
|
+
writable: true,
|
|
28
|
+
value: void 0
|
|
29
|
+
});
|
|
30
|
+
Object.defineProperty(this, "dbManager", {
|
|
31
|
+
enumerable: true,
|
|
32
|
+
configurable: true,
|
|
33
|
+
writable: true,
|
|
34
|
+
value: void 0
|
|
35
|
+
});
|
|
36
|
+
Object.defineProperty(this, "routes", {
|
|
37
|
+
enumerable: true,
|
|
38
|
+
configurable: true,
|
|
39
|
+
writable: true,
|
|
40
|
+
value: []
|
|
41
|
+
});
|
|
42
|
+
Object.defineProperty(this, "rules", {
|
|
43
|
+
enumerable: true,
|
|
44
|
+
configurable: true,
|
|
45
|
+
writable: true,
|
|
46
|
+
value: new Map()
|
|
47
|
+
});
|
|
48
|
+
Object.defineProperty(this, "services", {
|
|
49
|
+
enumerable: true,
|
|
50
|
+
configurable: true,
|
|
51
|
+
writable: true,
|
|
52
|
+
value: new Map()
|
|
53
|
+
});
|
|
54
|
+
Object.defineProperty(this, "config", {
|
|
55
|
+
enumerable: true,
|
|
56
|
+
configurable: true,
|
|
57
|
+
writable: true,
|
|
58
|
+
value: void 0
|
|
59
|
+
});
|
|
60
|
+
this.dbManager = dbManager;
|
|
61
|
+
this.config = dbManager.getConfig();
|
|
62
|
+
this.app = app;
|
|
63
|
+
}
|
|
64
|
+
setupMiddleware() {
|
|
65
|
+
// Access logging middleware
|
|
66
|
+
this.app.use((req, res, next) => __awaiter(this, void 0, void 0, function* () {
|
|
67
|
+
var _a;
|
|
68
|
+
// Capture client info
|
|
69
|
+
const clientIp = ((_a = req.headers['x-forwarded-for']) === null || _a === void 0 ? void 0 : _a.split(',')[0]) || req.socket.remoteAddress || '';
|
|
70
|
+
const userAgent = req.headers['user-agent'] || '';
|
|
71
|
+
this.dbManager.addAccessLog({
|
|
72
|
+
timestamp: Date.now(),
|
|
73
|
+
method: req.method,
|
|
74
|
+
path: req.path,
|
|
75
|
+
headers: this.normalizeHeaders(req.headers),
|
|
76
|
+
body: req.body ? JSON.stringify(req.body) : undefined,
|
|
77
|
+
clientIp,
|
|
78
|
+
userAgent,
|
|
79
|
+
statusCode: res.statusCode,
|
|
80
|
+
});
|
|
81
|
+
next();
|
|
82
|
+
}));
|
|
83
|
+
// Logging middleware (legacy RequestLog)
|
|
84
|
+
this.app.use((req, res, next) => __awaiter(this, void 0, void 0, function* () {
|
|
85
|
+
const startTime = Date.now();
|
|
86
|
+
const originalSend = res.send.bind(res);
|
|
87
|
+
res.send = (data) => {
|
|
88
|
+
var _a;
|
|
89
|
+
res.send = originalSend;
|
|
90
|
+
if (!res.locals.skipLog && ((_a = this.config) === null || _a === void 0 ? void 0 : _a.enableLogging)) {
|
|
91
|
+
const responseTime = Date.now() - startTime;
|
|
92
|
+
this.dbManager.addLog({
|
|
93
|
+
timestamp: Date.now(),
|
|
94
|
+
method: req.method,
|
|
95
|
+
path: req.path,
|
|
96
|
+
headers: this.normalizeHeaders(req.headers),
|
|
97
|
+
body: req.body ? JSON.stringify(req.body) : undefined,
|
|
98
|
+
statusCode: res.statusCode,
|
|
99
|
+
responseTime,
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
return res.send(data);
|
|
103
|
+
};
|
|
104
|
+
next();
|
|
105
|
+
}));
|
|
106
|
+
// Fixed route handlers
|
|
107
|
+
this.app.use('/claude-code/', this.createFixedRouteHandler('claude-code'));
|
|
108
|
+
this.app.use('/claude-code', this.createFixedRouteHandler('claude-code'));
|
|
109
|
+
this.app.use('/codex/', this.createFixedRouteHandler('codex'));
|
|
110
|
+
this.app.use('/codex', this.createFixedRouteHandler('codex'));
|
|
111
|
+
// Dynamic proxy middleware
|
|
112
|
+
this.app.use((req, res, _next) => __awaiter(this, void 0, void 0, function* () {
|
|
113
|
+
var _a;
|
|
114
|
+
try {
|
|
115
|
+
const route = this.findMatchingRoute(req);
|
|
116
|
+
if (!route) {
|
|
117
|
+
return res.status(404).json({ error: 'No matching route found' });
|
|
118
|
+
}
|
|
119
|
+
const rule = this.findMatchingRule(route.id, req);
|
|
120
|
+
if (!rule) {
|
|
121
|
+
return res.status(404).json({ error: 'No matching rule found' });
|
|
122
|
+
}
|
|
123
|
+
const service = this.services.get(rule.targetServiceId);
|
|
124
|
+
if (!service) {
|
|
125
|
+
return res.status(500).json({ error: 'Target service not configured' });
|
|
126
|
+
}
|
|
127
|
+
yield this.proxyRequest(req, res, route, rule, service);
|
|
128
|
+
}
|
|
129
|
+
catch (error) {
|
|
130
|
+
console.error('Proxy error:', error);
|
|
131
|
+
if ((_a = this.config) === null || _a === void 0 ? void 0 : _a.enableLogging) {
|
|
132
|
+
yield this.dbManager.addLog({
|
|
133
|
+
timestamp: Date.now(),
|
|
134
|
+
method: req.method,
|
|
135
|
+
path: req.path,
|
|
136
|
+
headers: this.normalizeHeaders(req.headers),
|
|
137
|
+
body: req.body ? JSON.stringify(req.body) : undefined,
|
|
138
|
+
error: error.message,
|
|
139
|
+
});
|
|
140
|
+
}
|
|
141
|
+
// Add error log
|
|
142
|
+
yield this.dbManager.addErrorLog({
|
|
143
|
+
timestamp: Date.now(),
|
|
144
|
+
method: req.method,
|
|
145
|
+
path: req.path,
|
|
146
|
+
statusCode: 500,
|
|
147
|
+
errorMessage: error.message,
|
|
148
|
+
errorStack: error.stack,
|
|
149
|
+
requestHeaders: this.normalizeHeaders(req.headers),
|
|
150
|
+
requestBody: req.body ? JSON.stringify(req.body) : undefined,
|
|
151
|
+
});
|
|
152
|
+
res.status(500).json({ error: error.message });
|
|
153
|
+
}
|
|
154
|
+
}));
|
|
155
|
+
}
|
|
156
|
+
createFixedRouteHandler(targetType) {
|
|
157
|
+
return (req, res) => __awaiter(this, void 0, void 0, function* () {
|
|
158
|
+
var _a;
|
|
159
|
+
try {
|
|
160
|
+
// 检查API Key验证
|
|
161
|
+
if (this.config.apiKey) {
|
|
162
|
+
const authHeader = req.headers.authorization;
|
|
163
|
+
const providedKey = authHeader === null || authHeader === void 0 ? void 0 : authHeader.replace('Bearer ', '');
|
|
164
|
+
if (!providedKey || providedKey !== this.config.apiKey) {
|
|
165
|
+
return res.status(401).json({ error: 'Invalid API key' });
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
const route = this.findRouteByTargetType(targetType);
|
|
169
|
+
if (!route) {
|
|
170
|
+
return res.status(404).json({ error: `No active route found for target type: ${targetType}` });
|
|
171
|
+
}
|
|
172
|
+
const rule = this.findMatchingRule(route.id, req);
|
|
173
|
+
if (!rule) {
|
|
174
|
+
return res.status(404).json({ error: 'No matching rule found' });
|
|
175
|
+
}
|
|
176
|
+
const service = this.services.get(rule.targetServiceId);
|
|
177
|
+
if (!service) {
|
|
178
|
+
return res.status(500).json({ error: 'Target service not configured' });
|
|
179
|
+
}
|
|
180
|
+
yield this.proxyRequest(req, res, route, rule, service);
|
|
181
|
+
}
|
|
182
|
+
catch (error) {
|
|
183
|
+
console.error(`Fixed route error for ${targetType}:`, error);
|
|
184
|
+
if ((_a = this.config) === null || _a === void 0 ? void 0 : _a.enableLogging) {
|
|
185
|
+
yield this.dbManager.addLog({
|
|
186
|
+
timestamp: Date.now(),
|
|
187
|
+
method: req.method,
|
|
188
|
+
path: req.path,
|
|
189
|
+
headers: this.normalizeHeaders(req.headers),
|
|
190
|
+
body: req.body ? JSON.stringify(req.body) : undefined,
|
|
191
|
+
error: error.message,
|
|
192
|
+
});
|
|
193
|
+
}
|
|
194
|
+
// Add error log
|
|
195
|
+
yield this.dbManager.addErrorLog({
|
|
196
|
+
timestamp: Date.now(),
|
|
197
|
+
method: req.method,
|
|
198
|
+
path: req.path,
|
|
199
|
+
statusCode: 500,
|
|
200
|
+
errorMessage: error.message,
|
|
201
|
+
errorStack: error.stack,
|
|
202
|
+
requestHeaders: this.normalizeHeaders(req.headers),
|
|
203
|
+
requestBody: req.body ? JSON.stringify(req.body) : undefined,
|
|
204
|
+
});
|
|
205
|
+
res.status(500).json({ error: error.message });
|
|
206
|
+
}
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
findMatchingRoute(_req) {
|
|
210
|
+
// Find active route based on targetType - for now, return the first active route
|
|
211
|
+
// This can be extended later based on specific routing logic
|
|
212
|
+
return this.routes.find(route => route.isActive);
|
|
213
|
+
}
|
|
214
|
+
findRouteByTargetType(targetType) {
|
|
215
|
+
return this.routes.find(route => route.targetType === targetType && route.isActive);
|
|
216
|
+
}
|
|
217
|
+
findMatchingRule(routeId, req) {
|
|
218
|
+
const rules = this.rules.get(routeId);
|
|
219
|
+
if (!rules)
|
|
220
|
+
return undefined;
|
|
221
|
+
// Determine content type from request
|
|
222
|
+
const contentType = this.determineContentType(req);
|
|
223
|
+
return rules.find(rule => rule.contentType === contentType) || rules.find(rule => rule.contentType === 'default');
|
|
224
|
+
}
|
|
225
|
+
determineContentType(req) {
|
|
226
|
+
const body = req.body;
|
|
227
|
+
if (!body)
|
|
228
|
+
return 'default';
|
|
229
|
+
const explicitType = this.getExplicitContentType(req, body);
|
|
230
|
+
if (explicitType) {
|
|
231
|
+
return explicitType;
|
|
232
|
+
}
|
|
233
|
+
for (const detector of this.getContentTypeDetectors()) {
|
|
234
|
+
if (detector.match(req, body)) {
|
|
235
|
+
return detector.type;
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
return 'default';
|
|
239
|
+
}
|
|
240
|
+
getContentTypeDetectors() {
|
|
241
|
+
return [
|
|
242
|
+
{
|
|
243
|
+
type: 'image-understanding',
|
|
244
|
+
match: (_req, body) => this.containsImageContent(body.messages) || this.containsImageContent(body.input),
|
|
245
|
+
},
|
|
246
|
+
{
|
|
247
|
+
type: 'thinking',
|
|
248
|
+
match: (_req, body) => this.hasThinkingSignal(body),
|
|
249
|
+
},
|
|
250
|
+
{
|
|
251
|
+
type: 'long-context',
|
|
252
|
+
match: (_req, body) => this.hasLongContextSignal(body),
|
|
253
|
+
},
|
|
254
|
+
{
|
|
255
|
+
type: 'background',
|
|
256
|
+
match: (_req, body) => this.hasBackgroundSignal(body),
|
|
257
|
+
},
|
|
258
|
+
];
|
|
259
|
+
}
|
|
260
|
+
getExplicitContentType(req, body) {
|
|
261
|
+
var _a, _b, _c, _d, _e, _f;
|
|
262
|
+
const headerKeys = ['x-aicodeswitch-content-type', 'x-content-type', 'x-request-type', 'x-object-type'];
|
|
263
|
+
const queryKeys = ['contentType', 'content_type', 'requestType', 'request_type', 'objectType', 'object_type'];
|
|
264
|
+
const bodyKeys = ['contentType', 'content_type', 'requestType', 'request_type', 'objectType', 'object_type', 'mode'];
|
|
265
|
+
for (const key of headerKeys) {
|
|
266
|
+
const raw = req.headers[key];
|
|
267
|
+
if (typeof raw === 'string') {
|
|
268
|
+
const normalized = this.normalizeContentType(raw);
|
|
269
|
+
if (normalized)
|
|
270
|
+
return normalized;
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
for (const key of queryKeys) {
|
|
274
|
+
const raw = req.query[key];
|
|
275
|
+
if (typeof raw === 'string') {
|
|
276
|
+
const normalized = this.normalizeContentType(raw);
|
|
277
|
+
if (normalized)
|
|
278
|
+
return normalized;
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
for (const key of bodyKeys) {
|
|
282
|
+
const raw = body === null || body === void 0 ? void 0 : body[key];
|
|
283
|
+
if (typeof raw === 'string') {
|
|
284
|
+
const normalized = this.normalizeContentType(raw);
|
|
285
|
+
if (normalized)
|
|
286
|
+
return normalized;
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
const metaCandidates = [
|
|
290
|
+
(_a = body === null || body === void 0 ? void 0 : body.metadata) === null || _a === void 0 ? void 0 : _a.contentType,
|
|
291
|
+
(_b = body === null || body === void 0 ? void 0 : body.metadata) === null || _b === void 0 ? void 0 : _b.content_type,
|
|
292
|
+
(_c = body === null || body === void 0 ? void 0 : body.metadata) === null || _c === void 0 ? void 0 : _c.requestType,
|
|
293
|
+
(_d = body === null || body === void 0 ? void 0 : body.metadata) === null || _d === void 0 ? void 0 : _d.request_type,
|
|
294
|
+
(_e = body === null || body === void 0 ? void 0 : body.meta) === null || _e === void 0 ? void 0 : _e.contentType,
|
|
295
|
+
(_f = body === null || body === void 0 ? void 0 : body.meta) === null || _f === void 0 ? void 0 : _f.content_type,
|
|
296
|
+
];
|
|
297
|
+
for (const raw of metaCandidates) {
|
|
298
|
+
if (typeof raw === 'string') {
|
|
299
|
+
const normalized = this.normalizeContentType(raw);
|
|
300
|
+
if (normalized)
|
|
301
|
+
return normalized;
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
return null;
|
|
305
|
+
}
|
|
306
|
+
normalizeContentType(raw) {
|
|
307
|
+
const normalized = raw.trim().toLowerCase();
|
|
308
|
+
const mapping = {
|
|
309
|
+
default: 'default',
|
|
310
|
+
background: 'background',
|
|
311
|
+
bg: 'background',
|
|
312
|
+
thinking: 'thinking',
|
|
313
|
+
reasoning: 'thinking',
|
|
314
|
+
'long-context': 'long-context',
|
|
315
|
+
long_context: 'long-context',
|
|
316
|
+
long: 'long-context',
|
|
317
|
+
image: 'image-understanding',
|
|
318
|
+
image_understanding: 'image-understanding',
|
|
319
|
+
'image-understanding': 'image-understanding',
|
|
320
|
+
vision: 'image-understanding',
|
|
321
|
+
};
|
|
322
|
+
return mapping[normalized] || null;
|
|
323
|
+
}
|
|
324
|
+
containsImageContent(payload) {
|
|
325
|
+
var _a;
|
|
326
|
+
if (!payload)
|
|
327
|
+
return false;
|
|
328
|
+
const messages = Array.isArray(payload) ? payload : [payload];
|
|
329
|
+
for (const message of messages) {
|
|
330
|
+
const content = (_a = message === null || message === void 0 ? void 0 : message.content) !== null && _a !== void 0 ? _a : message;
|
|
331
|
+
if (Array.isArray(content)) {
|
|
332
|
+
for (const block of content) {
|
|
333
|
+
if (!block || typeof block !== 'object')
|
|
334
|
+
continue;
|
|
335
|
+
const type = block.type;
|
|
336
|
+
if (type === 'image' || type === 'image_url' || type === 'input_image') {
|
|
337
|
+
return true;
|
|
338
|
+
}
|
|
339
|
+
if (block.image_url) {
|
|
340
|
+
return true;
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
return false;
|
|
346
|
+
}
|
|
347
|
+
hasThinkingSignal(body) {
|
|
348
|
+
var _a, _b;
|
|
349
|
+
return Boolean((body === null || body === void 0 ? void 0 : body.reasoning) ||
|
|
350
|
+
(body === null || body === void 0 ? void 0 : body.thinking) ||
|
|
351
|
+
(body === null || body === void 0 ? void 0 : body.reasoning_effort) ||
|
|
352
|
+
((_a = body === null || body === void 0 ? void 0 : body.reasoning) === null || _a === void 0 ? void 0 : _a.effort) ||
|
|
353
|
+
((_b = body === null || body === void 0 ? void 0 : body.reasoning) === null || _b === void 0 ? void 0 : _b.enabled));
|
|
354
|
+
}
|
|
355
|
+
hasBackgroundSignal(body) {
|
|
356
|
+
var _a, _b, _c;
|
|
357
|
+
const candidates = [
|
|
358
|
+
body === null || body === void 0 ? void 0 : body.background,
|
|
359
|
+
(_a = body === null || body === void 0 ? void 0 : body.metadata) === null || _a === void 0 ? void 0 : _a.background,
|
|
360
|
+
(_b = body === null || body === void 0 ? void 0 : body.meta) === null || _b === void 0 ? void 0 : _b.background,
|
|
361
|
+
body === null || body === void 0 ? void 0 : body.priority,
|
|
362
|
+
(_c = body === null || body === void 0 ? void 0 : body.metadata) === null || _c === void 0 ? void 0 : _c.priority,
|
|
363
|
+
body === null || body === void 0 ? void 0 : body.mode,
|
|
364
|
+
];
|
|
365
|
+
return candidates.some((value) => value === true || value === 'background');
|
|
366
|
+
}
|
|
367
|
+
hasLongContextSignal(body) {
|
|
368
|
+
var _a, _b;
|
|
369
|
+
const explicit = [
|
|
370
|
+
body === null || body === void 0 ? void 0 : body.long_context,
|
|
371
|
+
body === null || body === void 0 ? void 0 : body.longContext,
|
|
372
|
+
(_a = body === null || body === void 0 ? void 0 : body.metadata) === null || _a === void 0 ? void 0 : _a.long_context,
|
|
373
|
+
(_b = body === null || body === void 0 ? void 0 : body.metadata) === null || _b === void 0 ? void 0 : _b.longContext,
|
|
374
|
+
];
|
|
375
|
+
if (explicit.some((value) => value === true)) {
|
|
376
|
+
return true;
|
|
377
|
+
}
|
|
378
|
+
const maxTokens = this.extractNumericField(body, [
|
|
379
|
+
'max_tokens',
|
|
380
|
+
'max_output_tokens',
|
|
381
|
+
'max_completion_tokens',
|
|
382
|
+
'max_context_tokens',
|
|
383
|
+
]);
|
|
384
|
+
if (maxTokens !== null && maxTokens >= 8000) {
|
|
385
|
+
return true;
|
|
386
|
+
}
|
|
387
|
+
const contentLength = this.estimateTextLength(body);
|
|
388
|
+
return contentLength >= 12000;
|
|
389
|
+
}
|
|
390
|
+
extractNumericField(body, fields) {
|
|
391
|
+
for (const field of fields) {
|
|
392
|
+
const value = body === null || body === void 0 ? void 0 : body[field];
|
|
393
|
+
if (typeof value === 'number' && Number.isFinite(value)) {
|
|
394
|
+
return value;
|
|
395
|
+
}
|
|
396
|
+
if (typeof value === 'string') {
|
|
397
|
+
const parsed = Number(value);
|
|
398
|
+
if (Number.isFinite(parsed)) {
|
|
399
|
+
return parsed;
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
return null;
|
|
404
|
+
}
|
|
405
|
+
estimateTextLength(body) {
|
|
406
|
+
var _a, _b;
|
|
407
|
+
let length = 0;
|
|
408
|
+
const addText = (value) => {
|
|
409
|
+
if (typeof value === 'string') {
|
|
410
|
+
length += value.length;
|
|
411
|
+
}
|
|
412
|
+
};
|
|
413
|
+
const addContent = (content) => {
|
|
414
|
+
if (typeof content === 'string' || content === null) {
|
|
415
|
+
addText(content);
|
|
416
|
+
return;
|
|
417
|
+
}
|
|
418
|
+
if (Array.isArray(content)) {
|
|
419
|
+
for (const part of content) {
|
|
420
|
+
if (typeof part === 'string') {
|
|
421
|
+
addText(part);
|
|
422
|
+
continue;
|
|
423
|
+
}
|
|
424
|
+
if (part && typeof part === 'object') {
|
|
425
|
+
if (typeof part.text === 'string') {
|
|
426
|
+
addText(part.text);
|
|
427
|
+
}
|
|
428
|
+
if (typeof part.content === 'string') {
|
|
429
|
+
addText(part.content);
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
};
|
|
435
|
+
if (Array.isArray(body === null || body === void 0 ? void 0 : body.messages)) {
|
|
436
|
+
for (const message of body.messages) {
|
|
437
|
+
addContent(message === null || message === void 0 ? void 0 : message.content);
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
if (body === null || body === void 0 ? void 0 : body.input) {
|
|
441
|
+
if (typeof body.input === 'string') {
|
|
442
|
+
addText(body.input);
|
|
443
|
+
}
|
|
444
|
+
else if (Array.isArray(body.input)) {
|
|
445
|
+
for (const message of body.input) {
|
|
446
|
+
if (typeof message === 'string') {
|
|
447
|
+
addText(message);
|
|
448
|
+
}
|
|
449
|
+
else if (message && typeof message === 'object') {
|
|
450
|
+
addContent((_a = message.content) !== null && _a !== void 0 ? _a : message);
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
else if (body.input && typeof body.input === 'object') {
|
|
455
|
+
addContent((_b = body.input.content) !== null && _b !== void 0 ? _b : body.input);
|
|
456
|
+
}
|
|
457
|
+
}
|
|
458
|
+
addContent(body === null || body === void 0 ? void 0 : body.system);
|
|
459
|
+
addText(body === null || body === void 0 ? void 0 : body.instructions);
|
|
460
|
+
addText(body === null || body === void 0 ? void 0 : body.prompt);
|
|
461
|
+
return length;
|
|
462
|
+
}
|
|
463
|
+
isClaudeSource(sourceType) {
|
|
464
|
+
return sourceType === 'claude-chat' || sourceType === 'claude-code';
|
|
465
|
+
}
|
|
466
|
+
isOpenAIChatSource(sourceType) {
|
|
467
|
+
return sourceType === 'openai-chat' || sourceType === 'openai-code' || sourceType === 'deepseek-chat';
|
|
468
|
+
}
|
|
469
|
+
isOpenAIResponsesSource(sourceType) {
|
|
470
|
+
return sourceType === 'openai-responses';
|
|
471
|
+
}
|
|
472
|
+
applyModelOverride(body, rule) {
|
|
473
|
+
if (!rule.targetModel)
|
|
474
|
+
return body;
|
|
475
|
+
if (body && typeof body === 'object') {
|
|
476
|
+
return Object.assign(Object.assign({}, body), { model: rule.targetModel });
|
|
477
|
+
}
|
|
478
|
+
return body;
|
|
479
|
+
}
|
|
480
|
+
isStreamRequested(req, body) {
|
|
481
|
+
const accept = typeof req.headers.accept === 'string' ? req.headers.accept : '';
|
|
482
|
+
return (body === null || body === void 0 ? void 0 : body.stream) === true || accept.includes('text/event-stream');
|
|
483
|
+
}
|
|
484
|
+
buildUpstreamHeaders(req, service, sourceType, streamRequested) {
|
|
485
|
+
const headers = {};
|
|
486
|
+
for (const [key, value] of Object.entries(req.headers)) {
|
|
487
|
+
if (['host', 'connection', 'content-length', 'authorization'].includes(key.toLowerCase())) {
|
|
488
|
+
continue;
|
|
489
|
+
}
|
|
490
|
+
if (typeof value === 'string') {
|
|
491
|
+
headers[key] = value;
|
|
492
|
+
}
|
|
493
|
+
else if (Array.isArray(value)) {
|
|
494
|
+
headers[key] = value.join(', ');
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
if (streamRequested) {
|
|
498
|
+
headers.accept = 'text/event-stream';
|
|
499
|
+
}
|
|
500
|
+
if (this.isClaudeSource(sourceType)) {
|
|
501
|
+
headers['x-api-key'] = service.apiKey;
|
|
502
|
+
headers['anthropic-version'] = headers['anthropic-version'] || '2023-06-01';
|
|
503
|
+
}
|
|
504
|
+
else {
|
|
505
|
+
delete headers['anthropic-version'];
|
|
506
|
+
delete headers['anthropic-beta'];
|
|
507
|
+
headers.authorization = `Bearer ${service.apiKey}`;
|
|
508
|
+
}
|
|
509
|
+
if (!headers['content-type']) {
|
|
510
|
+
headers['content-type'] = 'application/json';
|
|
511
|
+
}
|
|
512
|
+
return headers;
|
|
513
|
+
}
|
|
514
|
+
copyResponseHeaders(responseHeaders, res) {
|
|
515
|
+
Object.keys(responseHeaders).forEach((key) => {
|
|
516
|
+
if (!['content-encoding', 'transfer-encoding', 'connection', 'content-length'].includes(key.toLowerCase())) {
|
|
517
|
+
res.setHeader(key, responseHeaders[key]);
|
|
518
|
+
}
|
|
519
|
+
});
|
|
520
|
+
}
|
|
521
|
+
normalizeHeaders(headers) {
|
|
522
|
+
const normalized = {};
|
|
523
|
+
for (const [key, value] of Object.entries(headers)) {
|
|
524
|
+
if (typeof value === 'string') {
|
|
525
|
+
normalized[key] = value;
|
|
526
|
+
}
|
|
527
|
+
else if (Array.isArray(value)) {
|
|
528
|
+
normalized[key] = value.join(', ');
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
return normalized;
|
|
532
|
+
}
|
|
533
|
+
normalizeResponseHeaders(headers) {
|
|
534
|
+
const normalized = {};
|
|
535
|
+
for (const [key, value] of Object.entries(headers)) {
|
|
536
|
+
if (value !== null && value !== undefined) {
|
|
537
|
+
if (typeof value === 'string') {
|
|
538
|
+
normalized[key] = value;
|
|
539
|
+
}
|
|
540
|
+
else if (Array.isArray(value)) {
|
|
541
|
+
normalized[key] = value.join(', ');
|
|
542
|
+
}
|
|
543
|
+
else {
|
|
544
|
+
normalized[key] = String(value);
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
}
|
|
548
|
+
return normalized;
|
|
549
|
+
}
|
|
550
|
+
readStreamBody(stream) {
|
|
551
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
552
|
+
return new Promise((resolve, reject) => {
|
|
553
|
+
let data = '';
|
|
554
|
+
stream.on('data', (chunk) => {
|
|
555
|
+
data += chunk.toString();
|
|
556
|
+
});
|
|
557
|
+
stream.on('end', () => resolve(data));
|
|
558
|
+
stream.on('error', reject);
|
|
559
|
+
});
|
|
560
|
+
});
|
|
561
|
+
}
|
|
562
|
+
safeJsonParse(raw) {
|
|
563
|
+
try {
|
|
564
|
+
return JSON.parse(raw);
|
|
565
|
+
}
|
|
566
|
+
catch (_a) {
|
|
567
|
+
return null;
|
|
568
|
+
}
|
|
569
|
+
}
|
|
570
|
+
extractTokenUsage(usage) {
|
|
571
|
+
if (!usage)
|
|
572
|
+
return undefined;
|
|
573
|
+
if (typeof usage.input_tokens === 'number' && typeof usage.output_tokens === 'number' && usage.prompt_tokens === undefined) {
|
|
574
|
+
return (0, openai_responses_1.extractTokenUsageFromOpenAIResponsesUsage)(usage);
|
|
575
|
+
}
|
|
576
|
+
if (typeof usage.prompt_tokens === 'number' || typeof usage.completion_tokens === 'number') {
|
|
577
|
+
return (0, claude_openai_1.extractTokenUsageFromOpenAIUsage)(usage);
|
|
578
|
+
}
|
|
579
|
+
if (typeof usage.input_tokens === 'number' || typeof usage.output_tokens === 'number') {
|
|
580
|
+
return (0, claude_openai_1.extractTokenUsageFromClaudeUsage)(usage);
|
|
581
|
+
}
|
|
582
|
+
return undefined;
|
|
583
|
+
}
|
|
584
|
+
proxyRequest(req, res, route, rule, service) {
|
|
585
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
586
|
+
var _a;
|
|
587
|
+
res.locals.skipLog = true;
|
|
588
|
+
const startTime = Date.now();
|
|
589
|
+
const sourceType = (service.sourceType || 'openai-chat');
|
|
590
|
+
const targetType = route.targetType;
|
|
591
|
+
let requestBody = req.body || {};
|
|
592
|
+
let usageForLog;
|
|
593
|
+
let logged = false;
|
|
594
|
+
// 用于收集响应数据的变量
|
|
595
|
+
let responseHeadersForLog;
|
|
596
|
+
let responseBodyForLog;
|
|
597
|
+
let streamChunksForLog;
|
|
598
|
+
const finalizeLog = (statusCode, error) => __awaiter(this, void 0, void 0, function* () {
|
|
599
|
+
var _a;
|
|
600
|
+
if (logged || !((_a = this.config) === null || _a === void 0 ? void 0 : _a.enableLogging))
|
|
601
|
+
return;
|
|
602
|
+
logged = true;
|
|
603
|
+
yield this.dbManager.addLog({
|
|
604
|
+
timestamp: Date.now(),
|
|
605
|
+
method: req.method,
|
|
606
|
+
path: req.path,
|
|
607
|
+
headers: this.normalizeHeaders(req.headers),
|
|
608
|
+
body: req.body ? JSON.stringify(req.body) : undefined,
|
|
609
|
+
statusCode,
|
|
610
|
+
responseTime: Date.now() - startTime,
|
|
611
|
+
targetProvider: service.name,
|
|
612
|
+
usage: usageForLog,
|
|
613
|
+
error,
|
|
614
|
+
// 新增字段
|
|
615
|
+
targetType,
|
|
616
|
+
targetServiceId: service.id,
|
|
617
|
+
targetServiceName: service.name,
|
|
618
|
+
targetModel: rule.targetModel,
|
|
619
|
+
responseHeaders: responseHeadersForLog,
|
|
620
|
+
responseBody: responseBodyForLog,
|
|
621
|
+
streamChunks: streamChunksForLog,
|
|
622
|
+
});
|
|
623
|
+
});
|
|
624
|
+
try {
|
|
625
|
+
if (targetType === 'claude-code') {
|
|
626
|
+
if (this.isClaudeSource(sourceType)) {
|
|
627
|
+
requestBody = this.applyModelOverride(requestBody, rule);
|
|
628
|
+
}
|
|
629
|
+
else if (this.isOpenAIChatSource(sourceType)) {
|
|
630
|
+
requestBody = (0, claude_openai_1.transformClaudeRequestToOpenAIChat)(requestBody, rule.targetModel);
|
|
631
|
+
}
|
|
632
|
+
else if (this.isOpenAIResponsesSource(sourceType)) {
|
|
633
|
+
requestBody = (0, openai_responses_1.transformClaudeRequestToOpenAIResponses)(requestBody, rule.targetModel);
|
|
634
|
+
}
|
|
635
|
+
else {
|
|
636
|
+
res.status(400).json({ error: 'Unsupported source type for Claude Code.' });
|
|
637
|
+
yield finalizeLog(400, 'Unsupported source type for Claude Code');
|
|
638
|
+
return;
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
else if (targetType === 'codex') {
|
|
642
|
+
if (this.isOpenAIResponsesSource(sourceType)) {
|
|
643
|
+
requestBody = this.applyModelOverride(requestBody, rule);
|
|
644
|
+
}
|
|
645
|
+
else if (this.isOpenAIChatSource(sourceType)) {
|
|
646
|
+
requestBody = (0, openai_responses_1.transformOpenAIResponsesRequestToOpenAIChat)(requestBody, rule.targetModel);
|
|
647
|
+
}
|
|
648
|
+
else if (this.isClaudeSource(sourceType)) {
|
|
649
|
+
requestBody = (0, openai_responses_1.transformOpenAIResponsesRequestToClaude)(requestBody, rule.targetModel);
|
|
650
|
+
}
|
|
651
|
+
else {
|
|
652
|
+
res.status(400).json({ error: 'Codex requires an OpenAI Responses compatible source.' });
|
|
653
|
+
yield finalizeLog(400, 'Unsupported source type for Codex');
|
|
654
|
+
return;
|
|
655
|
+
}
|
|
656
|
+
}
|
|
657
|
+
const streamRequested = this.isStreamRequested(req, requestBody);
|
|
658
|
+
const config = {
|
|
659
|
+
method: req.method,
|
|
660
|
+
url: service.apiUrl,
|
|
661
|
+
headers: this.buildUpstreamHeaders(req, service, sourceType, streamRequested),
|
|
662
|
+
timeout: service.timeout || 30000,
|
|
663
|
+
validateStatus: () => true,
|
|
664
|
+
responseType: streamRequested ? 'stream' : 'json',
|
|
665
|
+
};
|
|
666
|
+
if (Object.keys(req.query).length > 0) {
|
|
667
|
+
config.params = req.query;
|
|
668
|
+
}
|
|
669
|
+
if (['POST', 'PUT', 'PATCH'].includes(req.method.toUpperCase())) {
|
|
670
|
+
config.data = requestBody;
|
|
671
|
+
}
|
|
672
|
+
const response = yield (0, axios_1.default)(config);
|
|
673
|
+
const responseHeaders = response.headers || {};
|
|
674
|
+
const contentType = typeof responseHeaders['content-type'] === 'string' ? responseHeaders['content-type'] : '';
|
|
675
|
+
const isEventStream = streamRequested && contentType.includes('text/event-stream');
|
|
676
|
+
if (isEventStream && response.data) {
|
|
677
|
+
res.status(response.status);
|
|
678
|
+
if (targetType === 'claude-code' && this.isOpenAIChatSource(sourceType)) {
|
|
679
|
+
res.setHeader('Content-Type', 'text/event-stream');
|
|
680
|
+
res.setHeader('Cache-Control', 'no-cache');
|
|
681
|
+
res.setHeader('Connection', 'keep-alive');
|
|
682
|
+
const parser = new streaming_1.SSEParserTransform();
|
|
683
|
+
const chunkCollector = new chunk_collector_1.ChunkCollectorTransform();
|
|
684
|
+
const converter = new streaming_1.OpenAIToClaudeEventTransform({ model: requestBody === null || requestBody === void 0 ? void 0 : requestBody.model });
|
|
685
|
+
const serializer = new streaming_1.SSESerializerTransform();
|
|
686
|
+
// 收集响应头
|
|
687
|
+
responseHeadersForLog = this.normalizeResponseHeaders(responseHeaders);
|
|
688
|
+
res.on('finish', () => {
|
|
689
|
+
const usage = converter.getUsage();
|
|
690
|
+
if (usage) {
|
|
691
|
+
usageForLog = (0, claude_openai_1.extractTokenUsageFromClaudeUsage)(usage);
|
|
692
|
+
}
|
|
693
|
+
// 收集stream chunks
|
|
694
|
+
streamChunksForLog = chunkCollector.getChunks();
|
|
695
|
+
void finalizeLog(res.statusCode);
|
|
696
|
+
});
|
|
697
|
+
(0, stream_1.pipeline)(response.data, parser, chunkCollector, converter, serializer, res, (error) => {
|
|
698
|
+
if (error) {
|
|
699
|
+
void finalizeLog(500, error.message);
|
|
700
|
+
}
|
|
701
|
+
});
|
|
702
|
+
return;
|
|
703
|
+
}
|
|
704
|
+
if (targetType === 'claude-code' && this.isOpenAIResponsesSource(sourceType)) {
|
|
705
|
+
res.setHeader('Content-Type', 'text/event-stream');
|
|
706
|
+
res.setHeader('Cache-Control', 'no-cache');
|
|
707
|
+
res.setHeader('Connection', 'keep-alive');
|
|
708
|
+
const parser = new streaming_1.SSEParserTransform();
|
|
709
|
+
const chunkCollector = new chunk_collector_1.ChunkCollectorTransform();
|
|
710
|
+
const converter = new streaming_1.OpenAIResponsesToClaudeEventTransform({ model: requestBody === null || requestBody === void 0 ? void 0 : requestBody.model });
|
|
711
|
+
const serializer = new streaming_1.SSESerializerTransform();
|
|
712
|
+
responseHeadersForLog = this.normalizeResponseHeaders(responseHeaders);
|
|
713
|
+
res.on('finish', () => {
|
|
714
|
+
const usage = converter.getUsage();
|
|
715
|
+
if (usage) {
|
|
716
|
+
usageForLog = (0, claude_openai_1.extractTokenUsageFromClaudeUsage)(usage);
|
|
717
|
+
}
|
|
718
|
+
streamChunksForLog = chunkCollector.getChunks();
|
|
719
|
+
void finalizeLog(res.statusCode);
|
|
720
|
+
});
|
|
721
|
+
(0, stream_1.pipeline)(response.data, parser, chunkCollector, converter, serializer, res, (error) => {
|
|
722
|
+
if (error) {
|
|
723
|
+
void finalizeLog(500, error.message);
|
|
724
|
+
}
|
|
725
|
+
});
|
|
726
|
+
return;
|
|
727
|
+
}
|
|
728
|
+
if (targetType === 'codex' && this.isClaudeSource(sourceType)) {
|
|
729
|
+
res.setHeader('Content-Type', 'text/event-stream');
|
|
730
|
+
res.setHeader('Cache-Control', 'no-cache');
|
|
731
|
+
res.setHeader('Connection', 'keep-alive');
|
|
732
|
+
const parser = new streaming_1.SSEParserTransform();
|
|
733
|
+
const chunkCollector = new chunk_collector_1.ChunkCollectorTransform();
|
|
734
|
+
const converter = new streaming_1.ClaudeToOpenAIResponsesEventTransform({ model: requestBody === null || requestBody === void 0 ? void 0 : requestBody.model });
|
|
735
|
+
const serializer = new streaming_1.SSESerializerTransform();
|
|
736
|
+
responseHeadersForLog = this.normalizeResponseHeaders(responseHeaders);
|
|
737
|
+
res.on('finish', () => {
|
|
738
|
+
const usage = converter.getUsage();
|
|
739
|
+
if (usage) {
|
|
740
|
+
usageForLog = (0, claude_openai_1.extractTokenUsageFromClaudeUsage)(usage);
|
|
741
|
+
}
|
|
742
|
+
streamChunksForLog = chunkCollector.getChunks();
|
|
743
|
+
void finalizeLog(res.statusCode);
|
|
744
|
+
});
|
|
745
|
+
(0, stream_1.pipeline)(response.data, parser, chunkCollector, converter, serializer, res, (error) => {
|
|
746
|
+
if (error) {
|
|
747
|
+
void finalizeLog(500, error.message);
|
|
748
|
+
}
|
|
749
|
+
});
|
|
750
|
+
return;
|
|
751
|
+
}
|
|
752
|
+
if (targetType === 'codex' && this.isOpenAIChatSource(sourceType)) {
|
|
753
|
+
res.setHeader('Content-Type', 'text/event-stream');
|
|
754
|
+
res.setHeader('Cache-Control', 'no-cache');
|
|
755
|
+
res.setHeader('Connection', 'keep-alive');
|
|
756
|
+
const parser = new streaming_1.SSEParserTransform();
|
|
757
|
+
const chunkCollector = new chunk_collector_1.ChunkCollectorTransform();
|
|
758
|
+
const toClaude = new streaming_1.OpenAIToClaudeEventTransform({ model: requestBody === null || requestBody === void 0 ? void 0 : requestBody.model });
|
|
759
|
+
const toResponses = new streaming_1.ClaudeToOpenAIResponsesEventTransform({ model: requestBody === null || requestBody === void 0 ? void 0 : requestBody.model });
|
|
760
|
+
const serializer = new streaming_1.SSESerializerTransform();
|
|
761
|
+
responseHeadersForLog = this.normalizeResponseHeaders(responseHeaders);
|
|
762
|
+
res.on('finish', () => {
|
|
763
|
+
const usage = toResponses.getUsage();
|
|
764
|
+
if (usage) {
|
|
765
|
+
usageForLog = (0, claude_openai_1.extractTokenUsageFromClaudeUsage)(usage);
|
|
766
|
+
}
|
|
767
|
+
streamChunksForLog = chunkCollector.getChunks();
|
|
768
|
+
void finalizeLog(res.statusCode);
|
|
769
|
+
});
|
|
770
|
+
(0, stream_1.pipeline)(response.data, parser, chunkCollector, toClaude, toResponses, serializer, res, (error) => {
|
|
771
|
+
if (error) {
|
|
772
|
+
void finalizeLog(500, error.message);
|
|
773
|
+
}
|
|
774
|
+
});
|
|
775
|
+
return;
|
|
776
|
+
}
|
|
777
|
+
// 默认stream处理(无转换)
|
|
778
|
+
const chunkCollector = new chunk_collector_1.ChunkCollectorTransform();
|
|
779
|
+
responseHeadersForLog = this.normalizeResponseHeaders(responseHeaders);
|
|
780
|
+
this.copyResponseHeaders(responseHeaders, res);
|
|
781
|
+
res.on('finish', () => {
|
|
782
|
+
streamChunksForLog = chunkCollector.getChunks();
|
|
783
|
+
void finalizeLog(res.statusCode);
|
|
784
|
+
});
|
|
785
|
+
(0, stream_1.pipeline)(response.data, chunkCollector, res, (error) => {
|
|
786
|
+
if (error) {
|
|
787
|
+
void finalizeLog(500, error.message);
|
|
788
|
+
}
|
|
789
|
+
});
|
|
790
|
+
return;
|
|
791
|
+
}
|
|
792
|
+
let responseData = response.data;
|
|
793
|
+
if (streamRequested && response.data && typeof response.data.on === 'function' && !isEventStream) {
|
|
794
|
+
const raw = yield this.readStreamBody(response.data);
|
|
795
|
+
responseData = (_a = this.safeJsonParse(raw)) !== null && _a !== void 0 ? _a : raw;
|
|
796
|
+
}
|
|
797
|
+
// 收集响应头
|
|
798
|
+
responseHeadersForLog = this.normalizeResponseHeaders(responseHeaders);
|
|
799
|
+
if (response.status >= 400) {
|
|
800
|
+
usageForLog = this.extractTokenUsage(responseData === null || responseData === void 0 ? void 0 : responseData.usage);
|
|
801
|
+
// 记录错误响应体
|
|
802
|
+
responseBodyForLog = typeof responseData === 'string' ? responseData : JSON.stringify(responseData);
|
|
803
|
+
this.copyResponseHeaders(responseHeaders, res);
|
|
804
|
+
if (contentType.includes('application/json')) {
|
|
805
|
+
res.status(response.status).json(responseData);
|
|
806
|
+
}
|
|
807
|
+
else {
|
|
808
|
+
res.status(response.status).send(responseData);
|
|
809
|
+
}
|
|
810
|
+
yield finalizeLog(res.statusCode);
|
|
811
|
+
return;
|
|
812
|
+
}
|
|
813
|
+
if (targetType === 'claude-code' && this.isOpenAIChatSource(sourceType)) {
|
|
814
|
+
const converted = (0, claude_openai_1.transformOpenAIChatResponseToClaude)(responseData);
|
|
815
|
+
usageForLog = (0, claude_openai_1.extractTokenUsageFromOpenAIUsage)(responseData === null || responseData === void 0 ? void 0 : responseData.usage);
|
|
816
|
+
// 记录转换后的响应体
|
|
817
|
+
responseBodyForLog = JSON.stringify(converted);
|
|
818
|
+
res.status(response.status).json(converted);
|
|
819
|
+
}
|
|
820
|
+
else if (targetType === 'claude-code' && this.isOpenAIResponsesSource(sourceType)) {
|
|
821
|
+
const converted = (0, openai_responses_1.transformOpenAIResponsesToClaude)(responseData);
|
|
822
|
+
usageForLog = (0, openai_responses_1.extractTokenUsageFromOpenAIResponsesUsage)(responseData === null || responseData === void 0 ? void 0 : responseData.usage);
|
|
823
|
+
responseBodyForLog = JSON.stringify(converted);
|
|
824
|
+
res.status(response.status).json(converted);
|
|
825
|
+
}
|
|
826
|
+
else if (targetType === 'codex' && this.isClaudeSource(sourceType)) {
|
|
827
|
+
const converted = (0, openai_responses_1.transformClaudeResponseToOpenAIResponses)(responseData);
|
|
828
|
+
usageForLog = (0, claude_openai_1.extractTokenUsageFromClaudeUsage)(responseData === null || responseData === void 0 ? void 0 : responseData.usage);
|
|
829
|
+
responseBodyForLog = JSON.stringify(converted);
|
|
830
|
+
res.status(response.status).json(converted);
|
|
831
|
+
}
|
|
832
|
+
else if (targetType === 'codex' && this.isOpenAIChatSource(sourceType)) {
|
|
833
|
+
const claudeResponse = (0, claude_openai_1.transformOpenAIChatResponseToClaude)(responseData);
|
|
834
|
+
const converted = (0, openai_responses_1.transformClaudeResponseToOpenAIResponses)(claudeResponse);
|
|
835
|
+
usageForLog = (0, claude_openai_1.extractTokenUsageFromOpenAIUsage)(responseData === null || responseData === void 0 ? void 0 : responseData.usage);
|
|
836
|
+
responseBodyForLog = JSON.stringify(converted);
|
|
837
|
+
res.status(response.status).json(converted);
|
|
838
|
+
}
|
|
839
|
+
else {
|
|
840
|
+
usageForLog = this.extractTokenUsage(responseData === null || responseData === void 0 ? void 0 : responseData.usage);
|
|
841
|
+
// 记录原始响应体
|
|
842
|
+
responseBodyForLog = typeof responseData === 'string' ? responseData : JSON.stringify(responseData);
|
|
843
|
+
this.copyResponseHeaders(responseHeaders, res);
|
|
844
|
+
if (contentType.includes('application/json')) {
|
|
845
|
+
res.status(response.status).json(responseData);
|
|
846
|
+
}
|
|
847
|
+
else {
|
|
848
|
+
res.status(response.status).send(responseData);
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
yield finalizeLog(res.statusCode);
|
|
852
|
+
}
|
|
853
|
+
catch (error) {
|
|
854
|
+
console.error('Proxy error:', error);
|
|
855
|
+
yield finalizeLog(500, error.message);
|
|
856
|
+
res.status(500).json({ error: error.message });
|
|
857
|
+
}
|
|
858
|
+
});
|
|
859
|
+
}
|
|
860
|
+
reloadRoutes() {
|
|
861
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
862
|
+
this.routes = this.dbManager.getRoutes().filter((g) => g.isActive);
|
|
863
|
+
this.rules.clear();
|
|
864
|
+
for (const route of this.routes) {
|
|
865
|
+
const routeRules = this.dbManager.getRules(route.id);
|
|
866
|
+
this.rules.set(route.id, routeRules);
|
|
867
|
+
}
|
|
868
|
+
// Load all services
|
|
869
|
+
const allServices = this.dbManager.getAPIServices();
|
|
870
|
+
this.services.clear();
|
|
871
|
+
allServices.forEach((service) => {
|
|
872
|
+
this.services.set(service.id, service);
|
|
873
|
+
});
|
|
874
|
+
console.log(`Loaded ${this.routes.length} active routes and ${this.services.size} services`);
|
|
875
|
+
});
|
|
876
|
+
}
|
|
877
|
+
updateConfig(config) {
|
|
878
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
879
|
+
this.config = config;
|
|
880
|
+
});
|
|
881
|
+
}
|
|
882
|
+
initialize() {
|
|
883
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
884
|
+
this.setupMiddleware();
|
|
885
|
+
yield this.reloadRoutes();
|
|
886
|
+
});
|
|
887
|
+
}
|
|
888
|
+
}
|
|
889
|
+
exports.ProxyServer = ProxyServer;
|