aicodeswitch 1.6.2 → 1.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CLAUDE.md CHANGED
@@ -92,14 +92,13 @@ aicos version # Show current version information
92
92
  #### 2. Proxy Server - `server/proxy-server.ts`
93
93
  - **Route Matching**: Finds active route based on target type (claude-code/codex)
94
94
  - **Rule Matching**: Determines content type from request (image-understanding/thinking/long-context/background/default)
95
- - **Request Transformation**: Converts between different API formats (Claude ↔ OpenAI ↔ OpenAI Responses)
95
+ - **Request Transformation**: Converts between different API formats (Claude ↔ OpenAI Chat)
96
96
  - **Streaming**: Handles SSE (Server-Sent Events) streaming responses with real-time transformation
97
97
  - **Logging**: Tracks requests, responses, and errors
98
98
 
99
99
  #### 3. Transformers - `server/transformers/`
100
100
  - **streaming.ts**: SSE parsing/serialization and event transformation
101
101
  - **claude-openai.ts**: Claude ↔ OpenAI Chat format conversion
102
- - **openai-responses.ts**: OpenAI Responses format conversion
103
102
  - **chunk-collector.ts**: Collects streaming chunks for logging
104
103
 
105
104
  #### 4. Database - `server/database.ts`
package/README.md CHANGED
@@ -199,6 +199,10 @@ PORT=4567
199
199
  * [indb](https://github.com/tangshuang/indb): 网页端轻量kv数据库操作库
200
200
  * [Formast](https://github.com/tangshuang/formast): 复杂业务场景下的企业级JSON驱动表单框架
201
201
 
202
+ ## 关联资源
203
+
204
+ * [Claude Code 深度教程](https://claudecode.tangshuang.net): 100%免费的Claude Code入门到精通教程
205
+
202
206
  ## 支持我
203
207
 
204
208
  ![](public/donate-to-me.png)
@@ -18,7 +18,6 @@ const stream_1 = require("stream");
18
18
  const streaming_1 = require("./transformers/streaming");
19
19
  const chunk_collector_1 = require("./transformers/chunk-collector");
20
20
  const claude_openai_1 = require("./transformers/claude-openai");
21
- const openai_responses_1 = require("./transformers/openai-responses");
22
21
  const SUPPORTED_TARGETS = ['claude-code', 'codex'];
23
22
  class ProxyServer {
24
23
  constructor(dbManager, app) {
@@ -34,6 +33,8 @@ class ProxyServer {
34
33
  writable: true,
35
34
  value: void 0
36
35
  });
36
+ // 以下字段用于缓存备份(将来可能用于性能优化)
37
+ // 实际使用时,所有配置都从数据库实时读取
37
38
  Object.defineProperty(this, "routes", {
38
39
  enumerable: true,
39
40
  configurable: true,
@@ -161,7 +162,7 @@ class ProxyServer {
161
162
  if (!rule) {
162
163
  return res.status(404).json({ error: 'No matching rule found' });
163
164
  }
164
- const service = this.services.get(rule.targetServiceId);
165
+ const service = this.getServiceById(rule.targetServiceId);
165
166
  if (!service) {
166
167
  return res.status(500).json({ error: 'Target service not configured' });
167
168
  }
@@ -176,7 +177,7 @@ class ProxyServer {
176
177
  // 尝试每个规则,直到成功或全部失败
177
178
  let lastError = null;
178
179
  for (const rule of allRules) {
179
- const service = this.services.get(rule.targetServiceId);
180
+ const service = this.getServiceById(rule.targetServiceId);
180
181
  if (!service)
181
182
  continue;
182
183
  // 检查黑名单
@@ -227,10 +228,24 @@ class ProxyServer {
227
228
  requestHeaders: this.normalizeHeaders(req.headers),
228
229
  requestBody: req.body ? JSON.stringify(req.body) : undefined,
229
230
  });
230
- res.status(503).json({
231
- error: 'All services failed',
232
- details: lastError === null || lastError === void 0 ? void 0 : lastError.message
233
- });
231
+ // 根据路径判断目标类型并返回适当的错误格式
232
+ const isClaudeCode = req.path.startsWith('/claude-code/');
233
+ if (isClaudeCode) {
234
+ const claudeError = {
235
+ type: 'error',
236
+ error: {
237
+ type: 'api_error',
238
+ message: 'All API services failed. Please try again later.'
239
+ }
240
+ };
241
+ res.status(503).json(claudeError);
242
+ }
243
+ else {
244
+ res.status(503).json({
245
+ error: 'All services failed',
246
+ details: lastError === null || lastError === void 0 ? void 0 : lastError.message
247
+ });
248
+ }
234
249
  }
235
250
  catch (error) {
236
251
  console.error('Proxy error:', error);
@@ -255,7 +270,21 @@ class ProxyServer {
255
270
  requestHeaders: this.normalizeHeaders(req.headers),
256
271
  requestBody: req.body ? JSON.stringify(req.body) : undefined,
257
272
  });
258
- res.status(500).json({ error: error.message });
273
+ // 根据路径判断目标类型并返回适当的错误格式
274
+ const isClaudeCode = req.path.startsWith('/claude-code/');
275
+ if (isClaudeCode) {
276
+ const claudeError = {
277
+ type: 'error',
278
+ error: {
279
+ type: 'api_error',
280
+ message: error.message || 'Internal server error'
281
+ }
282
+ };
283
+ res.status(500).json(claudeError);
284
+ }
285
+ else {
286
+ res.status(500).json({ error: error.message });
287
+ }
259
288
  }
260
289
  }));
261
290
  }
@@ -283,7 +312,7 @@ class ProxyServer {
283
312
  if (!rule) {
284
313
  return res.status(404).json({ error: 'No matching rule found' });
285
314
  }
286
- const service = this.services.get(rule.targetServiceId);
315
+ const service = this.getServiceById(rule.targetServiceId);
287
316
  if (!service) {
288
317
  return res.status(500).json({ error: 'Target service not configured' });
289
318
  }
@@ -298,7 +327,7 @@ class ProxyServer {
298
327
  // 尝试每个规则,直到成功或全部失败
299
328
  let lastError = null;
300
329
  for (const rule of allRules) {
301
- const service = this.services.get(rule.targetServiceId);
330
+ const service = this.getServiceById(rule.targetServiceId);
302
331
  if (!service)
303
332
  continue;
304
333
  // 检查黑名单
@@ -349,10 +378,24 @@ class ProxyServer {
349
378
  requestHeaders: this.normalizeHeaders(req.headers),
350
379
  requestBody: req.body ? JSON.stringify(req.body) : undefined,
351
380
  });
352
- res.status(503).json({
353
- error: 'All services failed',
354
- details: lastError === null || lastError === void 0 ? void 0 : lastError.message
355
- });
381
+ // 根据路径判断目标类型并返回适当的错误格式
382
+ const isClaudeCode = req.path.startsWith('/claude-code/');
383
+ if (isClaudeCode) {
384
+ const claudeError = {
385
+ type: 'error',
386
+ error: {
387
+ type: 'api_error',
388
+ message: 'All API services failed. Please try again later.'
389
+ }
390
+ };
391
+ res.status(503).json(claudeError);
392
+ }
393
+ else {
394
+ res.status(503).json({
395
+ error: 'All services failed',
396
+ details: lastError === null || lastError === void 0 ? void 0 : lastError.message
397
+ });
398
+ }
356
399
  }
357
400
  catch (error) {
358
401
  console.error(`Fixed route error for ${targetType}:`, error);
@@ -381,18 +424,45 @@ class ProxyServer {
381
424
  }
382
425
  });
383
426
  }
427
+ /**
428
+ * 从数据库实时获取所有活跃路由
429
+ * @returns 活跃路由列表
430
+ */
431
+ getActiveRoutes() {
432
+ return this.dbManager.getRoutes().filter(route => route.isActive);
433
+ }
434
+ /**
435
+ * 从数据库实时获取指定路由的规则
436
+ * @param routeId 路由ID
437
+ * @returns 规则列表(按 sortOrder 降序排序)
438
+ */
439
+ getRulesByRouteId(routeId) {
440
+ const routeRules = this.dbManager.getRules(routeId);
441
+ return routeRules.sort((a, b) => (b.sortOrder || 0) - (a.sortOrder || 0));
442
+ }
384
443
  findMatchingRoute(_req) {
385
444
  // Find active route based on targetType - for now, return the first active route
386
445
  // This can be extended later based on specific routing logic
387
- return this.routes.find(route => route.isActive);
446
+ const activeRoutes = this.getActiveRoutes();
447
+ return activeRoutes.find(route => route.isActive);
388
448
  }
389
449
  findRouteByTargetType(targetType) {
390
- return this.routes.find(route => route.targetType === targetType && route.isActive);
450
+ const activeRoutes = this.getActiveRoutes();
451
+ return activeRoutes.find(route => route.targetType === targetType && route.isActive);
452
+ }
453
+ /**
454
+ * 从数据库实时获取服务配置
455
+ * @param serviceId 服务ID
456
+ * @returns 服务配置,如果不存在则返回 undefined
457
+ */
458
+ getServiceById(serviceId) {
459
+ const allServices = this.dbManager.getAPIServices();
460
+ return allServices.find(service => service.id === serviceId);
391
461
  }
392
462
  findMatchingRule(routeId, req) {
393
463
  return __awaiter(this, void 0, void 0, function* () {
394
- const rules = this.rules.get(routeId);
395
- if (!rules)
464
+ const rules = this.getRulesByRouteId(routeId);
465
+ if (!rules || rules.length === 0)
396
466
  return undefined;
397
467
  const body = req.body;
398
468
  const requestModel = body === null || body === void 0 ? void 0 : body.model;
@@ -432,8 +502,8 @@ class ProxyServer {
432
502
  });
433
503
  }
434
504
  getAllMatchingRules(routeId, req) {
435
- const rules = this.rules.get(routeId);
436
- if (!rules)
505
+ const rules = this.getRulesByRouteId(routeId);
506
+ if (!rules || rules.length === 0)
437
507
  return [];
438
508
  const body = req.body;
439
509
  const requestModel = body === null || body === void 0 ? void 0 : body.model;
@@ -693,13 +763,10 @@ class ProxyServer {
693
763
  return length;
694
764
  }
695
765
  isClaudeSource(sourceType) {
696
- return sourceType === 'claude-chat' || sourceType === 'claude-code';
766
+ return sourceType === 'claude-chat';
697
767
  }
698
768
  isOpenAIChatSource(sourceType) {
699
- return sourceType === 'openai-chat' || sourceType === 'openai-code' || sourceType === 'deepseek-chat';
700
- }
701
- isOpenAIResponsesSource(sourceType) {
702
- return sourceType === 'openai-responses';
769
+ return sourceType === 'openai-chat' || sourceType === 'deepseek-chat';
703
770
  }
704
771
  applyModelOverride(body, rule) {
705
772
  // 如果 targetModel 为空或不存在,保留原始 model(透传)
@@ -803,9 +870,6 @@ class ProxyServer {
803
870
  extractTokenUsage(usage) {
804
871
  if (!usage)
805
872
  return undefined;
806
- if (typeof usage.input_tokens === 'number' && typeof usage.output_tokens === 'number' && usage.prompt_tokens === undefined) {
807
- return (0, openai_responses_1.extractTokenUsageFromOpenAIResponsesUsage)(usage);
808
- }
809
873
  if (typeof usage.prompt_tokens === 'number' || typeof usage.completion_tokens === 'number') {
810
874
  return (0, claude_openai_1.extractTokenUsageFromOpenAIUsage)(usage);
811
875
  }
@@ -833,25 +897,17 @@ class ProxyServer {
833
897
  // Claude → OpenAI Chat: /v1/messages → /v1/chat/completions
834
898
  return originalPath.replace(/\/v1\/messages\b/, '/v1/chat/completions');
835
899
  }
836
- else if (this.isOpenAIResponsesSource(targetSourceType)) {
837
- // Claude → OpenAI Responses: /v1/messages → /v1/responses/completions
838
- return originalPath.replace(/\/v1\/messages\b/, '/v1/responses/completions');
839
- }
840
900
  }
841
901
  // Codex 发起的请求
842
902
  if (sourceTool === 'codex') {
843
- // Codex 默认使用 OpenAI Responses API 格式
844
- if (this.isOpenAIResponsesSource(targetSourceType)) {
845
- // OpenAI Responses → OpenAI Responses: 直接透传路径
903
+ // Codex 默认使用 OpenAI Chat API 格式
904
+ if (this.isOpenAIChatSource(targetSourceType)) {
905
+ // OpenAI Chat → OpenAI Chat: 直接透传路径
846
906
  return originalPath;
847
907
  }
848
- else if (this.isOpenAIChatSource(targetSourceType)) {
849
- // OpenAI Responses → OpenAI Chat: /v1/responses/completions → /v1/chat/completions
850
- return originalPath.replace(/\/v1\/responses\/completions\b/, '/v1/chat/completions');
851
- }
852
908
  else if (this.isClaudeSource(targetSourceType)) {
853
- // OpenAI Responses → Claude: /v1/responses/completions → /v1/messages
854
- return originalPath.replace(/\/v1\/responses\/completions\b/, '/v1/messages');
909
+ // OpenAI Chat → Claude: /v1/chat/completions → /v1/messages
910
+ return originalPath.replace(/\/v1\/chat\/completions\b/, '/v1/messages');
855
911
  }
856
912
  }
857
913
  // 默认:直接返回原始路径
@@ -859,7 +915,7 @@ class ProxyServer {
859
915
  }
860
916
  proxyRequest(req, res, route, rule, service) {
861
917
  return __awaiter(this, void 0, void 0, function* () {
862
- var _a;
918
+ var _a, _b;
863
919
  res.locals.skipLog = true;
864
920
  const startTime = Date.now();
865
921
  const sourceType = (service.sourceType || 'openai-chat');
@@ -915,9 +971,6 @@ class ProxyServer {
915
971
  else if (this.isOpenAIChatSource(sourceType)) {
916
972
  requestBody = (0, claude_openai_1.transformClaudeRequestToOpenAIChat)(requestBody, rule.targetModel);
917
973
  }
918
- else if (this.isOpenAIResponsesSource(sourceType)) {
919
- requestBody = (0, openai_responses_1.transformClaudeRequestToOpenAIResponses)(requestBody, rule.targetModel);
920
- }
921
974
  else {
922
975
  res.status(400).json({ error: 'Unsupported source type for Claude Code.' });
923
976
  yield finalizeLog(400, 'Unsupported source type for Claude Code');
@@ -925,17 +978,14 @@ class ProxyServer {
925
978
  }
926
979
  }
927
980
  else if (targetType === 'codex') {
928
- if (this.isOpenAIResponsesSource(sourceType)) {
981
+ if (this.isOpenAIChatSource(sourceType)) {
929
982
  requestBody = this.applyModelOverride(requestBody, rule);
930
983
  }
931
- else if (this.isOpenAIChatSource(sourceType)) {
932
- requestBody = (0, openai_responses_1.transformOpenAIResponsesRequestToOpenAIChat)(requestBody, rule.targetModel);
933
- }
934
984
  else if (this.isClaudeSource(sourceType)) {
935
- requestBody = (0, openai_responses_1.transformOpenAIResponsesRequestToClaude)(requestBody, rule.targetModel);
985
+ requestBody = (0, claude_openai_1.transformClaudeRequestToOpenAIChat)(requestBody, rule.targetModel);
936
986
  }
937
987
  else {
938
- res.status(400).json({ error: 'Codex requires an OpenAI Responses compatible source.' });
988
+ res.status(400).json({ error: 'Unsupported source type for Codex.' });
939
989
  yield finalizeLog(400, 'Unsupported source type for Codex');
940
990
  return;
941
991
  }
@@ -955,7 +1005,7 @@ class ProxyServer {
955
1005
  method: req.method,
956
1006
  url: `${service.apiUrl}${mappedPath}`,
957
1007
  headers: this.buildUpstreamHeaders(req, service, sourceType, streamRequested),
958
- timeout: service.timeout || 30000,
1008
+ timeout: service.timeout || 3000000, // 默认300秒
959
1009
  validateStatus: () => true,
960
1010
  responseType: streamRequested ? 'stream' : 'json',
961
1011
  };
@@ -1009,50 +1059,19 @@ class ProxyServer {
1009
1059
  });
1010
1060
  return;
1011
1061
  }
1012
- if (targetType === 'claude-code' && this.isOpenAIResponsesSource(sourceType)) {
1013
- res.setHeader('Content-Type', 'text/event-stream');
1014
- res.setHeader('Cache-Control', 'no-cache');
1015
- res.setHeader('Connection', 'keep-alive');
1016
- const parser = new streaming_1.SSEParserTransform();
1017
- const eventCollector = new chunk_collector_1.SSEEventCollectorTransform();
1018
- const converter = new streaming_1.OpenAIResponsesToClaudeEventTransform({ model: requestBody === null || requestBody === void 0 ? void 0 : requestBody.model });
1019
- const serializer = new streaming_1.SSESerializerTransform();
1020
- responseHeadersForLog = this.normalizeResponseHeaders(responseHeaders);
1021
- res.on('finish', () => {
1022
- const usage = converter.getUsage();
1023
- if (usage) {
1024
- usageForLog = (0, claude_openai_1.extractTokenUsageFromClaudeUsage)(usage);
1025
- }
1026
- else {
1027
- // 尝试从event collector中提取usage
1028
- const extractedUsage = eventCollector.extractUsage();
1029
- if (extractedUsage) {
1030
- usageForLog = this.extractTokenUsage(extractedUsage);
1031
- }
1032
- }
1033
- streamChunksForLog = eventCollector.getChunks();
1034
- void finalizeLog(res.statusCode);
1035
- });
1036
- (0, stream_1.pipeline)(response.data, parser, eventCollector, converter, serializer, res, (error) => {
1037
- if (error) {
1038
- void finalizeLog(500, error.message);
1039
- }
1040
- });
1041
- return;
1042
- }
1043
1062
  if (targetType === 'codex' && this.isClaudeSource(sourceType)) {
1044
1063
  res.setHeader('Content-Type', 'text/event-stream');
1045
1064
  res.setHeader('Cache-Control', 'no-cache');
1046
1065
  res.setHeader('Connection', 'keep-alive');
1047
1066
  const parser = new streaming_1.SSEParserTransform();
1048
1067
  const eventCollector = new chunk_collector_1.SSEEventCollectorTransform();
1049
- const converter = new streaming_1.ClaudeToOpenAIResponsesEventTransform({ model: requestBody === null || requestBody === void 0 ? void 0 : requestBody.model });
1068
+ const converter = new streaming_1.ClaudeToOpenAIChatEventTransform({ model: requestBody === null || requestBody === void 0 ? void 0 : requestBody.model });
1050
1069
  const serializer = new streaming_1.SSESerializerTransform();
1051
1070
  responseHeadersForLog = this.normalizeResponseHeaders(responseHeaders);
1052
1071
  res.on('finish', () => {
1053
1072
  const usage = converter.getUsage();
1054
1073
  if (usage) {
1055
- usageForLog = (0, claude_openai_1.extractTokenUsageFromClaudeUsage)(usage);
1074
+ usageForLog = (0, claude_openai_1.extractTokenUsageFromOpenAIUsage)(usage);
1056
1075
  }
1057
1076
  else {
1058
1077
  // 尝试从event collector中提取usage
@@ -1071,38 +1090,6 @@ class ProxyServer {
1071
1090
  });
1072
1091
  return;
1073
1092
  }
1074
- if (targetType === 'codex' && this.isOpenAIChatSource(sourceType)) {
1075
- res.setHeader('Content-Type', 'text/event-stream');
1076
- res.setHeader('Cache-Control', 'no-cache');
1077
- res.setHeader('Connection', 'keep-alive');
1078
- const parser = new streaming_1.SSEParserTransform();
1079
- const eventCollector = new chunk_collector_1.SSEEventCollectorTransform();
1080
- const toClaude = new streaming_1.OpenAIToClaudeEventTransform({ model: requestBody === null || requestBody === void 0 ? void 0 : requestBody.model });
1081
- const toResponses = new streaming_1.ClaudeToOpenAIResponsesEventTransform({ model: requestBody === null || requestBody === void 0 ? void 0 : requestBody.model });
1082
- const serializer = new streaming_1.SSESerializerTransform();
1083
- responseHeadersForLog = this.normalizeResponseHeaders(responseHeaders);
1084
- res.on('finish', () => {
1085
- const usage = toResponses.getUsage();
1086
- if (usage) {
1087
- usageForLog = (0, claude_openai_1.extractTokenUsageFromClaudeUsage)(usage);
1088
- }
1089
- else {
1090
- // 尝试从event collector中提取usage
1091
- const extractedUsage = eventCollector.extractUsage();
1092
- if (extractedUsage) {
1093
- usageForLog = this.extractTokenUsage(extractedUsage);
1094
- }
1095
- }
1096
- streamChunksForLog = eventCollector.getChunks();
1097
- void finalizeLog(res.statusCode);
1098
- });
1099
- (0, stream_1.pipeline)(response.data, parser, eventCollector, toClaude, toResponses, serializer, res, (error) => {
1100
- if (error) {
1101
- void finalizeLog(500, error.message);
1102
- }
1103
- });
1104
- return;
1105
- }
1106
1093
  // 默认stream处理(无转换)
1107
1094
  const eventCollector = new chunk_collector_1.SSEEventCollectorTransform();
1108
1095
  responseHeadersForLog = this.normalizeResponseHeaders(responseHeaders);
@@ -1151,25 +1138,12 @@ class ProxyServer {
1151
1138
  responseBodyForLog = JSON.stringify(converted);
1152
1139
  res.status(response.status).json(converted);
1153
1140
  }
1154
- else if (targetType === 'claude-code' && this.isOpenAIResponsesSource(sourceType)) {
1155
- const converted = (0, openai_responses_1.transformOpenAIResponsesToClaude)(responseData);
1156
- usageForLog = (0, openai_responses_1.extractTokenUsageFromOpenAIResponsesUsage)(responseData === null || responseData === void 0 ? void 0 : responseData.usage);
1157
- responseBodyForLog = JSON.stringify(converted);
1158
- res.status(response.status).json(converted);
1159
- }
1160
1141
  else if (targetType === 'codex' && this.isClaudeSource(sourceType)) {
1161
- const converted = (0, openai_responses_1.transformClaudeResponseToOpenAIResponses)(responseData);
1142
+ const converted = (0, claude_openai_1.transformClaudeResponseToOpenAIChat)(responseData);
1162
1143
  usageForLog = (0, claude_openai_1.extractTokenUsageFromClaudeUsage)(responseData === null || responseData === void 0 ? void 0 : responseData.usage);
1163
1144
  responseBodyForLog = JSON.stringify(converted);
1164
1145
  res.status(response.status).json(converted);
1165
1146
  }
1166
- else if (targetType === 'codex' && this.isOpenAIChatSource(sourceType)) {
1167
- const claudeResponse = (0, claude_openai_1.transformOpenAIChatResponseToClaude)(responseData);
1168
- const converted = (0, openai_responses_1.transformClaudeResponseToOpenAIResponses)(claudeResponse);
1169
- usageForLog = (0, claude_openai_1.extractTokenUsageFromOpenAIUsage)(responseData === null || responseData === void 0 ? void 0 : responseData.usage);
1170
- responseBodyForLog = JSON.stringify(converted);
1171
- res.status(response.status).json(converted);
1172
- }
1173
1147
  else {
1174
1148
  usageForLog = this.extractTokenUsage(responseData === null || responseData === void 0 ? void 0 : responseData.usage);
1175
1149
  // 记录原始响应体
@@ -1186,46 +1160,74 @@ class ProxyServer {
1186
1160
  }
1187
1161
  catch (error) {
1188
1162
  console.error('Proxy error:', error);
1189
- yield finalizeLog(500, error.message);
1163
+ // 检测是否是 timeout 错误
1164
+ const isTimeout = error.code === 'ECONNABORTED' ||
1165
+ ((_b = error.message) === null || _b === void 0 ? void 0 : _b.toLowerCase().includes('timeout')) ||
1166
+ (error.errno && error.errno === 'ETIMEDOUT');
1167
+ const errorMessage = isTimeout
1168
+ ? 'Request timeout - the upstream API took too long to respond'
1169
+ : (error.message || 'Internal server error');
1170
+ yield finalizeLog(500, errorMessage);
1190
1171
  // 根据请求类型返回适当格式的错误响应
1191
1172
  const streamRequested = this.isStreamRequested(req, req.body || {});
1192
- if (streamRequested && route.targetType === 'claude-code') {
1193
- // 对于 Claude Code 的流式请求,返回 SSE 格式的错误响应
1194
- res.setHeader('Content-Type', 'text/event-stream');
1195
- res.setHeader('Cache-Control', 'no-cache');
1196
- res.setHeader('Connection', 'keep-alive');
1197
- res.status(500);
1198
- // 发送错误事件
1199
- const errorEvent = `event: error\ndata: ${JSON.stringify({ error: error.message })}\n\n`;
1200
- const doneEvent = `data: [DONE]\n\n`;
1201
- res.write(errorEvent);
1202
- res.write(doneEvent);
1203
- res.end();
1173
+ if (route.targetType === 'claude-code') {
1174
+ // 对于 Claude Code,返回符合 Claude API 标准的错误响应
1175
+ const claudeError = {
1176
+ type: 'error',
1177
+ error: {
1178
+ type: isTimeout ? 'api_error' : 'api_error',
1179
+ message: errorMessage
1180
+ }
1181
+ };
1182
+ if (streamRequested) {
1183
+ // 流式请求:使用 SSE 格式
1184
+ res.setHeader('Content-Type', 'text/event-stream');
1185
+ res.setHeader('Cache-Control', 'no-cache');
1186
+ res.setHeader('Connection', 'keep-alive');
1187
+ res.status(200);
1188
+ // 发送错误事件(使用 Claude API 的标准格式)
1189
+ const errorEvent = `event: error\ndata: ${JSON.stringify(claudeError)}\n\n`;
1190
+ res.write(errorEvent);
1191
+ res.end();
1192
+ }
1193
+ else {
1194
+ // 非流式请求:返回 JSON 格式
1195
+ res.status(500).json(claudeError);
1196
+ }
1204
1197
  }
1205
1198
  else {
1206
- // 对于非流式请求,返回 JSON 格式的错误响应
1207
- res.status(500).json({ error: error.message });
1199
+ // 对于 Codex,返回 JSON 格式的错误响应
1200
+ res.status(500).json({ error: errorMessage });
1208
1201
  }
1209
1202
  }
1210
1203
  });
1211
1204
  }
1212
1205
  reloadRoutes() {
1213
1206
  return __awaiter(this, void 0, void 0, function* () {
1214
- this.routes = this.dbManager.getRoutes().filter((g) => g.isActive);
1215
- this.rules.clear();
1216
- for (const route of this.routes) {
1217
- const routeRules = this.dbManager.getRules(route.id);
1218
- // 确保按 sortOrder 降序排序(database 层已处理,但再次确保)
1219
- const sortedRules = [...routeRules].sort((a, b) => (b.sortOrder || 0) - (a.sortOrder || 0));
1220
- this.rules.set(route.id, sortedRules);
1221
- }
1222
- // Load all services
1207
+ // 注意:所有配置(路由、规则、服务)现在都在每次请求时实时从数据库读取
1208
+ // 这个方法主要用于初始化和日志记录
1209
+ // 修改数据库后无需调用此方法,配置会自动生效
1210
+ const allRoutes = this.dbManager.getRoutes();
1211
+ const activeRoutes = allRoutes.filter((g) => g.isActive);
1223
1212
  const allServices = this.dbManager.getAPIServices();
1224
- this.services.clear();
1225
- allServices.forEach((service) => {
1226
- this.services.set(service.id, service);
1227
- });
1228
- console.log(`Loaded ${this.routes.length} active routes and ${this.services.size} services`);
1213
+ // 保留缓存以备将来可能的性能优化需求
1214
+ this.routes = activeRoutes;
1215
+ if (this.rules) {
1216
+ this.rules.clear();
1217
+ for (const route of activeRoutes) {
1218
+ const routeRules = this.dbManager.getRules(route.id);
1219
+ const sortedRules = [...routeRules].sort((a, b) => (b.sortOrder || 0) - (a.sortOrder || 0));
1220
+ this.rules.set(route.id, sortedRules);
1221
+ }
1222
+ }
1223
+ if (this.services) {
1224
+ const services = this.services;
1225
+ services.clear();
1226
+ allServices.forEach((service) => {
1227
+ services.set(service.id, service);
1228
+ });
1229
+ }
1230
+ console.log(`Initialized with ${activeRoutes.length} active routes and ${allServices.length} services (all config read from database in real-time)`);
1229
1231
  });
1230
1232
  }
1231
1233
  updateConfig(config) {
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.extractTokenUsageFromClaudeUsage = exports.extractTokenUsageFromOpenAIUsage = exports.transformOpenAIChatResponseToClaude = exports.transformClaudeRequestToOpenAIChat = exports.mapStopReason = exports.convertOpenAIUsageToClaude = void 0;
3
+ exports.extractTokenUsageFromClaudeUsage = exports.extractTokenUsageFromOpenAIUsage = exports.transformClaudeResponseToOpenAIChat = exports.transformOpenAIChatResponseToClaude = exports.transformClaudeRequestToOpenAIChat = exports.mapStopReason = exports.convertOpenAIUsageToClaude = void 0;
4
4
  const toTextContent = (content) => {
5
5
  if (typeof content === 'string')
6
6
  return content;
@@ -203,6 +203,51 @@ const transformOpenAIChatResponseToClaude = (body) => {
203
203
  };
204
204
  };
205
205
  exports.transformOpenAIChatResponseToClaude = transformOpenAIChatResponseToClaude;
206
+ const transformClaudeResponseToOpenAIChat = (body) => {
207
+ const content = (body === null || body === void 0 ? void 0 : body.content) || [];
208
+ let textContent = '';
209
+ const toolCalls = [];
210
+ for (const block of content) {
211
+ if ((block === null || block === void 0 ? void 0 : block.type) === 'text') {
212
+ textContent += block.text || '';
213
+ }
214
+ else if ((block === null || block === void 0 ? void 0 : block.type) === 'tool_use') {
215
+ toolCalls.push({
216
+ id: block.id,
217
+ type: 'function',
218
+ function: {
219
+ name: block.name || 'tool',
220
+ arguments: typeof block.input === 'string' ? block.input : JSON.stringify(block.input || {}),
221
+ },
222
+ });
223
+ }
224
+ }
225
+ const message = {
226
+ role: 'assistant',
227
+ content: textContent,
228
+ };
229
+ if (toolCalls.length > 0) {
230
+ message.tool_calls = toolCalls;
231
+ }
232
+ const usage = (body === null || body === void 0 ? void 0 : body.usage) ? {
233
+ prompt_tokens: body.usage.input_tokens || 0,
234
+ completion_tokens: body.usage.output_tokens || 0,
235
+ total_tokens: (body.usage.input_tokens || 0) + (body.usage.output_tokens || 0),
236
+ } : undefined;
237
+ return {
238
+ id: body === null || body === void 0 ? void 0 : body.id,
239
+ object: 'chat.completion',
240
+ created: Math.floor(Date.now() / 1000),
241
+ model: body === null || body === void 0 ? void 0 : body.model,
242
+ choices: [{
243
+ index: 0,
244
+ message,
245
+ finish_reason: (0, exports.mapStopReason)(body === null || body === void 0 ? void 0 : body.stop_reason),
246
+ }],
247
+ usage,
248
+ };
249
+ };
250
+ exports.transformClaudeResponseToOpenAIChat = transformClaudeResponseToOpenAIChat;
206
251
  const extractTokenUsageFromOpenAIUsage = (usage) => {
207
252
  if (!usage)
208
253
  return undefined;