@adversity/coding-tool-x 3.0.6 → 3.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (133) hide show
  1. package/CHANGELOG.md +38 -18
  2. package/README.md +8 -8
  3. package/dist/web/assets/ConfigTemplates-Bidwfdf2.css +1 -0
  4. package/dist/web/assets/ConfigTemplates-ZrK_s7ma.js +1 -0
  5. package/dist/web/assets/Home-B8YfhZ3c.js +1 -0
  6. package/dist/web/assets/Home-Di2qsylF.css +1 -0
  7. package/dist/web/assets/PluginManager-BD7QUZbU.js +1 -0
  8. package/dist/web/assets/PluginManager-ROyoZ-6m.css +1 -0
  9. package/dist/web/assets/ProjectList-C1fQb9OW.css +1 -0
  10. package/dist/web/assets/ProjectList-DRb1DuHV.js +1 -0
  11. package/dist/web/assets/SessionList-BGJWyneI.css +1 -0
  12. package/dist/web/assets/SessionList-lZ0LKzfT.js +1 -0
  13. package/dist/web/assets/SkillManager-C1xG5B4Q.js +1 -0
  14. package/dist/web/assets/SkillManager-D7pd-d_P.css +1 -0
  15. package/dist/web/assets/Terminal-DGNJeVtc.css +1 -0
  16. package/dist/web/assets/Terminal-DksBo_lM.js +1 -0
  17. package/dist/web/assets/WorkspaceManager-Burx7XOo.js +1 -0
  18. package/dist/web/assets/WorkspaceManager-CrwgQgmP.css +1 -0
  19. package/dist/web/assets/icons-kcfLIMBB.js +1 -0
  20. package/dist/web/assets/index-Ufv5rCa5.css +1 -0
  21. package/dist/web/assets/index-lAkrRC3h.js +2 -0
  22. package/dist/web/assets/markdown-BfC0goYb.css +10 -0
  23. package/dist/web/assets/markdown-C9MYpaSi.js +1 -0
  24. package/dist/web/assets/naive-ui-CSrLusZZ.js +1 -0
  25. package/dist/web/assets/{vendors-D2HHw_aW.js → vendors-CO3Upi1d.js} +2 -2
  26. package/dist/web/assets/vue-vendor-DqyWIXEb.js +45 -0
  27. package/dist/web/assets/xterm-6GBZ9nXN.css +32 -0
  28. package/dist/web/assets/xterm-BJzAjXCH.js +13 -0
  29. package/dist/web/index.html +8 -6
  30. package/package.json +4 -2
  31. package/src/commands/channels.js +48 -1
  32. package/src/commands/cli-type.js +4 -2
  33. package/src/commands/daemon.js +92 -13
  34. package/src/commands/doctor.js +10 -9
  35. package/src/commands/list.js +1 -1
  36. package/src/commands/logs.js +6 -4
  37. package/src/commands/port-config.js +24 -4
  38. package/src/commands/proxy-control.js +12 -6
  39. package/src/commands/search.js +1 -1
  40. package/src/commands/security.js +3 -2
  41. package/src/commands/stats.js +226 -52
  42. package/src/commands/switch.js +1 -1
  43. package/src/commands/toggle-proxy.js +31 -6
  44. package/src/commands/ui.js +8 -1
  45. package/src/commands/update.js +97 -0
  46. package/src/commands/workspace.js +1 -1
  47. package/src/config/default.js +39 -2
  48. package/src/config/loader.js +74 -8
  49. package/src/config/paths.js +105 -33
  50. package/src/index.js +67 -4
  51. package/src/plugins/constants.js +3 -2
  52. package/src/plugins/plugin-api.js +1 -1
  53. package/src/reset-config.js +4 -2
  54. package/src/server/api/agents.js +57 -14
  55. package/src/server/api/channels.js +112 -33
  56. package/src/server/api/codex-channels.js +111 -18
  57. package/src/server/api/codex-proxy.js +14 -8
  58. package/src/server/api/commands.js +71 -18
  59. package/src/server/api/config-export.js +0 -6
  60. package/src/server/api/config-registry.js +11 -3
  61. package/src/server/api/config.js +376 -5
  62. package/src/server/api/convert.js +133 -0
  63. package/src/server/api/dashboard.js +22 -6
  64. package/src/server/api/gemini-channels.js +107 -18
  65. package/src/server/api/gemini-proxy.js +14 -8
  66. package/src/server/api/gemini-sessions.js +1 -1
  67. package/src/server/api/health-check.js +4 -3
  68. package/src/server/api/mcp.js +3 -3
  69. package/src/server/api/opencode-channels.js +419 -0
  70. package/src/server/api/opencode-projects.js +99 -0
  71. package/src/server/api/opencode-proxy.js +198 -0
  72. package/src/server/api/opencode-sessions.js +403 -0
  73. package/src/server/api/opencode-statistics.js +57 -0
  74. package/src/server/api/plugins.js +66 -19
  75. package/src/server/api/prompts.js +2 -2
  76. package/src/server/api/proxy.js +7 -4
  77. package/src/server/api/sessions.js +3 -0
  78. package/src/server/api/skills.js +69 -18
  79. package/src/server/api/workspaces.js +78 -6
  80. package/src/server/codex-proxy-server.js +32 -19
  81. package/src/server/dev-server.js +1 -1
  82. package/src/server/gemini-proxy-server.js +17 -3
  83. package/src/server/index.js +164 -48
  84. package/src/server/opencode-proxy-server.js +4375 -0
  85. package/src/server/proxy-server.js +30 -19
  86. package/src/server/services/agents-service.js +61 -24
  87. package/src/server/services/channel-scheduler.js +9 -5
  88. package/src/server/services/channels.js +70 -12
  89. package/src/server/services/codex-channels.js +61 -23
  90. package/src/server/services/codex-settings-manager.js +271 -49
  91. package/src/server/services/codex-statistics-service.js +2 -2
  92. package/src/server/services/commands-service.js +84 -25
  93. package/src/server/services/config-export-service.js +7 -45
  94. package/src/server/services/config-registry-service.js +63 -17
  95. package/src/server/services/config-sync-manager.js +160 -7
  96. package/src/server/services/config-templates-service.js +204 -51
  97. package/src/server/services/env-checker.js +26 -12
  98. package/src/server/services/env-manager.js +126 -18
  99. package/src/server/services/favorites.js +5 -3
  100. package/src/server/services/gemini-channels.js +37 -15
  101. package/src/server/services/gemini-statistics-service.js +2 -2
  102. package/src/server/services/mcp-service.js +350 -9
  103. package/src/server/services/model-detector.js +707 -221
  104. package/src/server/services/network-access.js +80 -0
  105. package/src/server/services/opencode-channels.js +206 -0
  106. package/src/server/services/opencode-gateway-converter.js +639 -0
  107. package/src/server/services/opencode-sessions.js +663 -0
  108. package/src/server/services/opencode-settings-manager.js +342 -0
  109. package/src/server/services/opencode-statistics-service.js +255 -0
  110. package/src/server/services/plugins-service.js +479 -22
  111. package/src/server/services/prompts-service.js +53 -11
  112. package/src/server/services/proxy-runtime.js +1 -1
  113. package/src/server/services/repo-scanner-base.js +1 -1
  114. package/src/server/services/security-config.js +1 -1
  115. package/src/server/services/session-cache.js +1 -1
  116. package/src/server/services/skill-service.js +300 -46
  117. package/src/server/services/speed-test.js +464 -186
  118. package/src/server/services/statistics-service.js +2 -2
  119. package/src/server/services/terminal-commands.js +10 -3
  120. package/src/server/services/terminal-config.js +1 -1
  121. package/src/server/services/ui-config.js +1 -1
  122. package/src/server/services/workspace-service.js +57 -100
  123. package/src/server/websocket-server.js +132 -3
  124. package/src/ui/menu.js +49 -40
  125. package/src/utils/port-helper.js +22 -8
  126. package/src/utils/session.js +5 -4
  127. package/dist/web/assets/icons-BxudHPiX.js +0 -1
  128. package/dist/web/assets/index-D2VfwJBa.js +0 -14
  129. package/dist/web/assets/index-oXBzu0bd.css +0 -41
  130. package/dist/web/assets/naive-ui-DT-Uur8K.js +0 -1
  131. package/dist/web/assets/vue-vendor-6JaYHOiI.js +0 -44
  132. package/src/server/api/permissions.js +0 -385
  133. package/src/server/services/permission-templates-service.js +0 -308
@@ -0,0 +1,4375 @@
1
+ const express = require('express');
2
+ const httpProxy = require('http-proxy');
3
+ const http = require('http');
4
+ const https = require('https');
5
+ const fs = require('fs');
6
+ const os = require('os');
7
+ const path = require('path');
8
+ const zlib = require('zlib');
9
+ const chalk = require('chalk');
10
+ const { broadcastLog, broadcastSchedulerState } = require('./websocket-server');
11
+ const { allocateChannel, releaseChannel, getSchedulerState } = require('./services/channel-scheduler');
12
+ const { recordSuccess, recordFailure } = require('./services/channel-health');
13
+ const { loadConfig } = require('../config/loader');
14
+ const DEFAULT_CONFIG = require('../config/default');
15
+ const { PATHS, ensureStorageDirMigrated } = require('../config/paths');
16
+ const { resolvePricing } = require('./utils/pricing');
17
+ const { recordRequest: recordOpenCodeRequest } = require('./services/opencode-statistics-service');
18
+ const { saveProxyStartTime, clearProxyStartTime, getProxyStartTime, getProxyRuntime } = require('./services/proxy-runtime');
19
+ const { getEnabledChannels, getEffectiveApiKey } = require('./services/opencode-channels');
20
+ const { probeModelAvailability, fetchModelsFromProvider } = require('./services/model-detector');
21
+ const { CLAUDE_MODEL_PRICING } = require('../config/model-pricing');
22
+
23
+ let proxyServer = null;
24
+ let proxyApp = null;
25
+ let currentPort = null;
26
+
27
+ // 用于存储每个请求的元数据
28
+ const requestMetadata = new Map();
29
+
30
+ // 用于缓存已打印过的模型重定向规则,避免重复打印
31
+ // 格式: { channelId: { "originalModel": "redirectedModel", ... } }
32
+ const printedRedirectCache = new Map();
33
+
34
+ // OpenAI 模型定价(每百万 tokens 的价格,单位:美元)
35
+ // Claude 模型使用 config/model-pricing.js 中的集中定价
36
+ const PRICING = {
37
+ 'gpt-4o': { input: 2.5, output: 10 },
38
+ 'gpt-4o-2024-11-20': { input: 2.5, output: 10 },
39
+ 'gpt-4o-mini': { input: 0.15, output: 0.6 },
40
+ 'gpt-4-turbo': { input: 10, output: 30 },
41
+ 'gpt-4': { input: 30, output: 60 },
42
+ 'gpt-3.5-turbo': { input: 0.5, output: 1.5 },
43
+ 'o1': { input: 15, output: 60 },
44
+ 'o1-mini': { input: 3, output: 12 },
45
+ 'o1-pro': { input: 150, output: 600 },
46
+ 'o3': { input: 10, output: 40 },
47
+ 'o3-mini': { input: 1.1, output: 4.4 },
48
+ 'o4-mini': { input: 1.1, output: 4.4 }
49
+ };
50
+
51
+ const OPENCODE_BASE_PRICING = DEFAULT_CONFIG.pricing.opencode || DEFAULT_CONFIG.pricing.codex;
52
+ const ONE_MILLION = 1000000;
53
+ const CLAUDE_CODE_BETA_HEADER = 'claude-code-20250219,oauth-2025-04-20,interleaved-thinking-2025-05-14,fine-grained-tool-streaming-2025-05-14,prompt-caching-2024-07-31';
54
+ const CLAUDE_CODE_USER_AGENT = 'claude-cli/2.1.44 (external, sdk-cli)';
55
+ const CODEX_CLI_VERSION = '0.101.0';
56
+ const CODEX_CLI_USER_AGENT = 'codex_cli_rs/0.101.0 (Mac OS 26.0.1; arm64) Apple_Terminal/464';
57
+ const GEMINI_CLI_USER_AGENT = 'google-api-nodejs-client/9.15.1';
58
+ const GEMINI_CLI_API_CLIENT = 'gl-node/22.17.0';
59
+ const GEMINI_CLI_CLIENT_METADATA = 'ideType=IDE_UNSPECIFIED,platform=PLATFORM_UNSPECIFIED,pluginType=GEMINI';
60
+ const CLAUDE_SESSION_USER_ID_TTL_MS = 60 * 60 * 1000;
61
+ const CLAUDE_SESSION_USER_ID_CACHE_MAX = 2000;
62
+ const claudeSessionUserIdCache = new Map();
63
+
64
+ /**
65
+ * 检测模型层级
66
+ * @param {string} modelName - 模型名称
67
+ * @returns {string|null} 模型层级 (opus/sonnet/haiku) 或 null
68
+ */
69
+ function detectModelTier(modelName) {
70
+ if (!modelName) return null;
71
+ const lower = modelName.toLowerCase();
72
+ if (lower.includes('opus')) return 'opus';
73
+ if (lower.includes('sonnet')) return 'sonnet';
74
+ if (lower.includes('haiku')) return 'haiku';
75
+ return null;
76
+ }
77
+
78
+ /**
79
+ * 应用模型重定向
80
+ * @param {string} originalModel - 原始模型名称
81
+ * @param {object} channel - 渠道对象,包含 modelConfig 和 modelRedirects
82
+ * @returns {string} 重定向后的模型名称
83
+ */
84
+ function redirectModel(originalModel, channel) {
85
+ if (!originalModel) return originalModel;
86
+
87
+ // 优先使用新的 modelRedirects 数组格式
88
+ const modelRedirects = channel?.modelRedirects;
89
+ if (Array.isArray(modelRedirects) && modelRedirects.length > 0) {
90
+ for (const rule of modelRedirects) {
91
+ if (rule.from && rule.to && rule.from === originalModel) {
92
+ return rule.to;
93
+ }
94
+ }
95
+ }
96
+
97
+ // 向后兼容:使用旧的 modelConfig 格式
98
+ const modelConfig = channel?.modelConfig;
99
+ if (!modelConfig) return originalModel;
100
+
101
+ const tier = detectModelTier(originalModel);
102
+
103
+ // 优先级:层级特定配置 > 通用模型覆盖
104
+ if (tier === 'opus' && modelConfig.opusModel) {
105
+ return modelConfig.opusModel;
106
+ }
107
+ if (tier === 'sonnet' && modelConfig.sonnetModel) {
108
+ return modelConfig.sonnetModel;
109
+ }
110
+ if (tier === 'haiku' && modelConfig.haikuModel) {
111
+ return modelConfig.haikuModel;
112
+ }
113
+
114
+ // 回退到通用模型覆盖
115
+ if (modelConfig.model) {
116
+ return modelConfig.model;
117
+ }
118
+
119
+ return originalModel;
120
+ }
121
+
122
+ /**
123
+ * 解析 OpenCode 代理目标 URL
124
+ *
125
+ * OpenCode CLI 发送请求到我们的代理时,请求路径格式:
126
+ * - /v1/responses (OpenAI Responses API)
127
+ * - /v1/chat/completions (OpenAI Chat Completions API)
128
+ *
129
+ * 渠道配置的 base_url 可能是:
130
+ * - https://api.openai.com/v1
131
+ * - https://example.com/openai/v1
132
+ * - https://example.com
133
+ *
134
+ * 最终转发目标示例:
135
+ * - base_url: https://example.com/openai/v1, path: /v1/responses
136
+ * -> target: https://example.com/openai, 最终: https://example.com/openai/v1/responses
137
+ *
138
+ * 这个函数返回要传给 http-proxy 的 target,http-proxy 会自动拼接 req.url
139
+ */
140
+ function resolveOpenCodeTarget(baseUrl = '', requestPath = '') {
141
+ let target = baseUrl || '';
142
+
143
+ // 移除末尾斜杠
144
+ if (target.endsWith('/')) {
145
+ target = target.slice(0, -1);
146
+ }
147
+
148
+ // 核心逻辑:避免 /v1/v1 重复
149
+ // 如果 base_url 以 /v1 结尾,且请求路径以 /v1 开头,去掉 base_url 的 /v1
150
+ // 因为 http-proxy 会将 requestPath 追加到 target 后面
151
+ if (target.endsWith('/v1') && requestPath.startsWith('/v1')) {
152
+ target = target.slice(0, -3);
153
+ }
154
+
155
+ return target;
156
+ }
157
+
158
+ /**
159
+ * 计算请求成本
160
+ */
161
+ function calculateCost(model, tokens) {
162
+ let pricing;
163
+
164
+ // 首先检查是否是 Claude 模型,使用集中定价
165
+ if (model.startsWith('claude-') || model.toLowerCase().includes('claude')) {
166
+ pricing = CLAUDE_MODEL_PRICING[model];
167
+
168
+ // 如果没有精确匹配,尝试模糊匹配 Claude 模型
169
+ if (!pricing) {
170
+ const modelLower = model.toLowerCase();
171
+ // 查找最接近的 Claude 模型
172
+ for (const [key, value] of Object.entries(CLAUDE_MODEL_PRICING)) {
173
+ if (key.toLowerCase().includes(modelLower) || modelLower.includes(key.toLowerCase())) {
174
+ pricing = value;
175
+ break;
176
+ }
177
+ }
178
+ }
179
+
180
+ // 如果仍然没有找到,使用默认 Sonnet 定价
181
+ if (!pricing) {
182
+ pricing = CLAUDE_MODEL_PRICING['claude-sonnet-4-5-20250929'];
183
+ }
184
+ } else {
185
+ // 非 Claude 模型,使用 PRICING 对象(OpenAI 等)
186
+ pricing = PRICING[model];
187
+
188
+ // 如果没有精确匹配,尝试模糊匹配
189
+ if (!pricing) {
190
+ const modelLower = model.toLowerCase();
191
+ if (modelLower.includes('gpt-4o-mini')) {
192
+ pricing = PRICING['gpt-4o-mini'];
193
+ } else if (modelLower.includes('gpt-4o')) {
194
+ pricing = PRICING['gpt-4o'];
195
+ } else if (modelLower.includes('gpt-4')) {
196
+ pricing = PRICING['gpt-4'];
197
+ } else if (modelLower.includes('gpt-3.5')) {
198
+ pricing = PRICING['gpt-3.5-turbo'];
199
+ } else if (modelLower.includes('o1-mini')) {
200
+ pricing = PRICING['o1-mini'];
201
+ } else if (modelLower.includes('o1-pro')) {
202
+ pricing = PRICING['o1-pro'];
203
+ } else if (modelLower.includes('o1')) {
204
+ pricing = PRICING['o1'];
205
+ } else if (modelLower.includes('o3-mini')) {
206
+ pricing = PRICING['o3-mini'];
207
+ } else if (modelLower.includes('o3')) {
208
+ pricing = PRICING['o3'];
209
+ } else if (modelLower.includes('o4-mini')) {
210
+ pricing = PRICING['o4-mini'];
211
+ }
212
+ }
213
+ }
214
+
215
+ // 默认使用基础定价
216
+ pricing = resolvePricing('opencode', pricing, OPENCODE_BASE_PRICING);
217
+ const inputRate = typeof pricing.input === 'number' ? pricing.input : OPENCODE_BASE_PRICING.input;
218
+ const outputRate = typeof pricing.output === 'number' ? pricing.output : OPENCODE_BASE_PRICING.output;
219
+
220
+ return (
221
+ (tokens.input || 0) * inputRate / ONE_MILLION +
222
+ (tokens.output || 0) * outputRate / ONE_MILLION
223
+ );
224
+ }
225
+
226
+ const jsonBodyParser = express.json({
227
+ limit: '100mb',
228
+ verify: (req, res, buf) => {
229
+ req.rawBody = Buffer.from(buf);
230
+ }
231
+ });
232
+
233
+ function shouldParseJson(req) {
234
+ const contentType = req.headers['content-type'] || '';
235
+ return req.method === 'POST' && contentType.includes('application/json');
236
+ }
237
+
238
+ function normalizeSessionKeyValue(value) {
239
+ const source = Array.isArray(value) ? value[0] : value;
240
+ return typeof source === 'string' ? source.trim() : '';
241
+ }
242
+
243
+ function extractSessionIdFromBody(body = {}) {
244
+ if (!body || typeof body !== 'object') return '';
245
+
246
+ return normalizeSessionKeyValue(
247
+ body.session_id ||
248
+ body.sessionId ||
249
+ body.conversation_id ||
250
+ body.conversationId ||
251
+ body.metadata?.session_id ||
252
+ body.metadata?.sessionId ||
253
+ body.metadata?.conversation_id ||
254
+ body.metadata?.conversationId ||
255
+ body.workspace?.workspace_id ||
256
+ body.project_id ||
257
+ body.projectId ||
258
+ ''
259
+ );
260
+ }
261
+
262
+ function extractSessionIdFromRequest(req, body = {}) {
263
+ if (!req || typeof req !== 'object') {
264
+ return extractSessionIdFromBody(body);
265
+ }
266
+
267
+ const headerSessionId = normalizeSessionKeyValue(
268
+ req.headers?.['x-session-id'] ||
269
+ req.headers?.['x-claude-session'] ||
270
+ req.headers?.['x-cc-session'] ||
271
+ req.headers?.['x-conversation-id'] ||
272
+ req.headers?.['x-session']
273
+ );
274
+
275
+ return headerSessionId || extractSessionIdFromBody(body);
276
+ }
277
+
278
+ function cleanupExpiredClaudeSessionUserIds(now = Date.now()) {
279
+ for (const [sessionKey, entry] of claudeSessionUserIdCache.entries()) {
280
+ if (!entry || typeof entry !== 'object') {
281
+ claudeSessionUserIdCache.delete(sessionKey);
282
+ continue;
283
+ }
284
+ const lastUsedAt = Number(entry.lastUsedAt);
285
+ if (!Number.isFinite(lastUsedAt) || now - lastUsedAt > CLAUDE_SESSION_USER_ID_TTL_MS) {
286
+ claudeSessionUserIdCache.delete(sessionKey);
287
+ }
288
+ }
289
+ }
290
+
291
+ function trimClaudeSessionUserIdCache() {
292
+ while (claudeSessionUserIdCache.size > CLAUDE_SESSION_USER_ID_CACHE_MAX) {
293
+ const oldestKey = claudeSessionUserIdCache.keys().next().value;
294
+ if (!oldestKey) break;
295
+ claudeSessionUserIdCache.delete(oldestKey);
296
+ }
297
+ }
298
+
299
+ function resolveClaudeUserIdBySession(sessionKey, preferredUserId = '') {
300
+ const normalizedSessionKey = normalizeSessionKeyValue(sessionKey);
301
+ const providedUserId = normalizeSessionKeyValue(preferredUserId);
302
+ const now = Date.now();
303
+
304
+ cleanupExpiredClaudeSessionUserIds(now);
305
+
306
+ if (!normalizedSessionKey) {
307
+ return providedUserId || buildClaudeCodeUserId();
308
+ }
309
+
310
+ const cached = claudeSessionUserIdCache.get(normalizedSessionKey);
311
+ if (cached && typeof cached.userId === 'string' && cached.userId.trim()) {
312
+ const userId = cached.userId.trim();
313
+ claudeSessionUserIdCache.delete(normalizedSessionKey);
314
+ claudeSessionUserIdCache.set(normalizedSessionKey, {
315
+ userId,
316
+ lastUsedAt: now
317
+ });
318
+ return userId;
319
+ }
320
+
321
+ const generatedUserId = providedUserId || buildClaudeCodeUserId();
322
+ claudeSessionUserIdCache.set(normalizedSessionKey, {
323
+ userId: generatedUserId,
324
+ lastUsedAt: now
325
+ });
326
+ trimClaudeSessionUserIdCache();
327
+ return generatedUserId;
328
+ }
329
+
330
+ function normalizeGatewaySourceType(channel) {
331
+ const value = String(channel?.gatewaySourceType || '').trim().toLowerCase();
332
+ if (value === 'claude') return 'claude';
333
+ if (value === 'gemini') return 'gemini';
334
+ return 'codex';
335
+ }
336
+
337
+ function mapStainlessOs() {
338
+ switch (process.platform) {
339
+ case 'darwin':
340
+ return 'MacOS';
341
+ case 'win32':
342
+ return 'Windows';
343
+ case 'linux':
344
+ return 'Linux';
345
+ default:
346
+ return `other::${process.platform}`;
347
+ }
348
+ }
349
+
350
+ function mapStainlessArch() {
351
+ switch (process.arch) {
352
+ case 'x64':
353
+ return 'x64';
354
+ case 'arm64':
355
+ return 'arm64';
356
+ case 'ia32':
357
+ return 'x86';
358
+ default:
359
+ return `other::${process.arch}`;
360
+ }
361
+ }
362
+
363
+ function getRequestPathname(urlPath = '') {
364
+ try {
365
+ const parsed = new URL(urlPath, 'http://localhost');
366
+ return parsed.pathname || '/';
367
+ } catch {
368
+ return String(urlPath || '').split('?')[0] || '/';
369
+ }
370
+ }
371
+
372
+ function isResponsesPath(pathname) {
373
+ return pathname === '/v1/responses' || pathname === '/responses';
374
+ }
375
+
376
+ function isChatCompletionsPath(pathname) {
377
+ return pathname === '/v1/chat/completions' || pathname === '/chat/completions';
378
+ }
379
+
380
+ function collectPreferredProbeModels(channel) {
381
+ const candidates = [];
382
+ if (!channel || typeof channel !== 'object') return candidates;
383
+
384
+ candidates.push(channel.model);
385
+ candidates.push(channel.speedTestModel);
386
+
387
+ const modelConfig = channel.modelConfig;
388
+ if (modelConfig && typeof modelConfig === 'object') {
389
+ candidates.push(modelConfig.model);
390
+ candidates.push(modelConfig.opusModel);
391
+ candidates.push(modelConfig.sonnetModel);
392
+ candidates.push(modelConfig.haikuModel);
393
+ }
394
+
395
+ if (Array.isArray(channel.modelRedirects)) {
396
+ channel.modelRedirects.forEach((rule) => {
397
+ candidates.push(rule?.from);
398
+ candidates.push(rule?.to);
399
+ });
400
+ }
401
+
402
+ const seen = new Set();
403
+ const models = [];
404
+ candidates.forEach((model) => {
405
+ if (typeof model !== 'string') return;
406
+ const trimmed = model.trim();
407
+ if (!trimmed) return;
408
+ const key = trimmed.toLowerCase();
409
+ if (seen.has(key)) return;
410
+ seen.add(key);
411
+ models.push(trimmed);
412
+ });
413
+ return models;
414
+ }
415
+
416
+ function isConverterPresetChannel(channel) {
417
+ const presetId = String(channel?.presetId || '').trim().toLowerCase();
418
+ return presetId === 'entry_claude' || presetId === 'entry_codex' || presetId === 'entry_gemini';
419
+ }
420
+
421
+ function extractTextFragments(value, fragments) {
422
+ if (value === null || value === undefined) return;
423
+ if (typeof value === 'string') {
424
+ if (value.trim()) fragments.push(value);
425
+ return;
426
+ }
427
+ if (typeof value === 'number' || typeof value === 'boolean') {
428
+ fragments.push(String(value));
429
+ return;
430
+ }
431
+ if (Array.isArray(value)) {
432
+ value.forEach(item => extractTextFragments(item, fragments));
433
+ return;
434
+ }
435
+ if (typeof value !== 'object') return;
436
+
437
+ if (typeof value.text === 'string') {
438
+ extractTextFragments(value.text, fragments);
439
+ return;
440
+ }
441
+ if (typeof value.input_text === 'string') {
442
+ extractTextFragments(value.input_text, fragments);
443
+ return;
444
+ }
445
+ if (typeof value.output_text === 'string') {
446
+ extractTextFragments(value.output_text, fragments);
447
+ return;
448
+ }
449
+ if (value.content !== undefined) {
450
+ extractTextFragments(value.content, fragments);
451
+ return;
452
+ }
453
+ if (Array.isArray(value.parts)) {
454
+ extractTextFragments(value.parts, fragments);
455
+ }
456
+ }
457
+
458
+ function extractText(value) {
459
+ const fragments = [];
460
+ extractTextFragments(value, fragments);
461
+ return fragments.join('\n').trim();
462
+ }
463
+
464
+ function normalizeOpenAiRole(role) {
465
+ const value = String(role || '').trim().toLowerCase();
466
+ if (value === 'assistant' || value === 'model') return 'assistant';
467
+ if (value === 'system') return 'system';
468
+ return 'user';
469
+ }
470
+
471
+ function normalizeOpenAiToolsToClaude(tools = []) {
472
+ if (!Array.isArray(tools)) return [];
473
+
474
+ const normalized = [];
475
+ for (const tool of tools) {
476
+ if (!tool || typeof tool !== 'object') continue;
477
+
478
+ if (tool.type === 'function' && tool.function && typeof tool.function === 'object') {
479
+ const fn = tool.function;
480
+ if (!fn.name) continue;
481
+ normalized.push({
482
+ name: fn.name,
483
+ description: fn.description || '',
484
+ input_schema: fn.parameters || { type: 'object', properties: {} }
485
+ });
486
+ continue;
487
+ }
488
+
489
+ if (tool.type === 'function' && tool.name) {
490
+ normalized.push({
491
+ name: tool.name,
492
+ description: tool.description || '',
493
+ input_schema: tool.parameters || { type: 'object', properties: {} }
494
+ });
495
+ }
496
+ }
497
+
498
+ return normalized;
499
+ }
500
+
501
+ function normalizeToolChoiceToClaude(toolChoice) {
502
+ if (!toolChoice) return undefined;
503
+
504
+ if (typeof toolChoice === 'string') {
505
+ if (toolChoice === 'auto') return { type: 'auto' };
506
+ if (toolChoice === 'required') return { type: 'any' };
507
+ return undefined;
508
+ }
509
+
510
+ if (typeof toolChoice === 'object') {
511
+ if (toolChoice.type === 'function' && toolChoice.function?.name) {
512
+ return { type: 'tool', name: toolChoice.function.name };
513
+ }
514
+ if (toolChoice.type === 'function' && toolChoice.name) {
515
+ return { type: 'tool', name: toolChoice.name };
516
+ }
517
+ if (toolChoice.type === 'auto') return { type: 'auto' };
518
+ if (toolChoice.type === 'required') return { type: 'any' };
519
+ }
520
+
521
+ return undefined;
522
+ }
523
+
524
+ function generateToolCallId() {
525
+ return `toolu_${Date.now()}_${Math.random().toString(36).slice(2, 10)}`;
526
+ }
527
+
528
+ function parseToolArguments(value) {
529
+ if (value && typeof value === 'object' && !Array.isArray(value)) {
530
+ return value;
531
+ }
532
+ if (typeof value === 'string') {
533
+ const trimmed = value.trim();
534
+ if (!trimmed) return {};
535
+ try {
536
+ const parsed = JSON.parse(trimmed);
537
+ if (parsed && typeof parsed === 'object' && !Array.isArray(parsed)) {
538
+ return parsed;
539
+ }
540
+ } catch {
541
+ return {};
542
+ }
543
+ }
544
+ return {};
545
+ }
546
+
547
+ function normalizeToolResultContent(value) {
548
+ if (typeof value === 'string') return value;
549
+ if (value === null || value === undefined) return '';
550
+ if (typeof value === 'number' || typeof value === 'boolean') return String(value);
551
+ try {
552
+ return JSON.stringify(value);
553
+ } catch {
554
+ return String(value);
555
+ }
556
+ }
557
+
558
+ function buildAssistantToolUseMessageFromFunctionCall(item) {
559
+ const functionPayload = (item?.function && typeof item.function === 'object')
560
+ ? item.function
561
+ : item;
562
+ const name = functionPayload?.name || item?.name;
563
+ if (!name) return null;
564
+
565
+ const callId = functionPayload?.call_id || item?.call_id || functionPayload?.id || item?.id || generateToolCallId();
566
+ const argumentsSource = functionPayload?.arguments ?? item?.arguments ?? functionPayload?.input ?? item?.input;
567
+ const input = parseToolArguments(argumentsSource);
568
+
569
+ return {
570
+ role: 'assistant',
571
+ content: [
572
+ {
573
+ type: 'tool_use',
574
+ id: callId,
575
+ name,
576
+ input
577
+ }
578
+ ]
579
+ };
580
+ }
581
+
582
+ function buildUserToolResultMessage(item) {
583
+ const callId = item?.call_id || item?.tool_call_id || item?.id || generateToolCallId();
584
+ const outputSource = item?.output ?? item?.content ?? '';
585
+ const content = normalizeToolResultContent(outputSource);
586
+
587
+ return {
588
+ role: 'user',
589
+ content: [
590
+ {
591
+ type: 'tool_result',
592
+ tool_use_id: callId,
593
+ content
594
+ }
595
+ ]
596
+ };
597
+ }
598
+
599
+ function normalizeOpenCodeMessages(pathname, payload = {}) {
600
+ const systemParts = [];
601
+ const messages = [];
602
+
603
+ if (isResponsesPath(pathname) && typeof payload.instructions === 'string' && payload.instructions.trim()) {
604
+ systemParts.push(payload.instructions.trim());
605
+ }
606
+
607
+ const appendMessage = (role, content) => {
608
+ const normalizedRole = normalizeOpenAiRole(role);
609
+ const text = extractText(content);
610
+ if (!text) return;
611
+ if (normalizedRole === 'system') {
612
+ systemParts.push(text);
613
+ return;
614
+ }
615
+ messages.push({
616
+ role: normalizedRole === 'assistant' ? 'assistant' : 'user',
617
+ content: [{ type: 'text', text }]
618
+ });
619
+ };
620
+
621
+ if (isResponsesPath(pathname)) {
622
+ if (typeof payload.input === 'string') {
623
+ appendMessage('user', payload.input);
624
+ } else if (Array.isArray(payload.input)) {
625
+ payload.input.forEach(item => {
626
+ if (!item || typeof item !== 'object') return;
627
+ if (item.type === 'function_call') {
628
+ const assistantToolUse = buildAssistantToolUseMessageFromFunctionCall(item);
629
+ if (assistantToolUse) {
630
+ messages.push(assistantToolUse);
631
+ }
632
+ return;
633
+ }
634
+ if (item.type === 'function_call_output') {
635
+ messages.push(buildUserToolResultMessage(item));
636
+ return;
637
+ }
638
+ if (item.type === 'message' || item.role) {
639
+ appendMessage(item.role, item.content);
640
+ }
641
+ });
642
+ }
643
+ }
644
+
645
+ if (isChatCompletionsPath(pathname) && Array.isArray(payload.messages)) {
646
+ payload.messages.forEach(message => {
647
+ if (!message || typeof message !== 'object') return;
648
+ if (message.role === 'tool') {
649
+ messages.push(buildUserToolResultMessage(message));
650
+ return;
651
+ }
652
+ if (message.role === 'assistant' && Array.isArray(message.tool_calls) && message.tool_calls.length > 0) {
653
+ const assistantContent = [];
654
+ const text = extractText(message.content);
655
+ if (text) {
656
+ assistantContent.push({ type: 'text', text });
657
+ }
658
+
659
+ message.tool_calls.forEach(toolCall => {
660
+ if (!toolCall || typeof toolCall !== 'object') return;
661
+ const functionPayload = (toolCall.function && typeof toolCall.function === 'object')
662
+ ? toolCall.function
663
+ : toolCall;
664
+ const name = functionPayload.name || toolCall.name;
665
+ if (!name) return;
666
+ assistantContent.push({
667
+ type: 'tool_use',
668
+ id: toolCall.id || functionPayload.call_id || generateToolCallId(),
669
+ name,
670
+ input: parseToolArguments(functionPayload.arguments ?? functionPayload.input)
671
+ });
672
+ });
673
+
674
+ if (assistantContent.length > 0) {
675
+ messages.push({
676
+ role: 'assistant',
677
+ content: assistantContent
678
+ });
679
+ }
680
+ return;
681
+ }
682
+ appendMessage(message.role, message.content);
683
+ });
684
+ }
685
+
686
+ if (messages.length === 0) {
687
+ messages.push({
688
+ role: 'user',
689
+ content: [{ type: 'text', text: 'Hello' }]
690
+ });
691
+ }
692
+
693
+ return {
694
+ system: systemParts.join('\n\n').trim(),
695
+ messages
696
+ };
697
+ }
698
+
699
+ function buildClaudeCodeUserId() {
700
+ const sessionId = Math.random().toString(36).substring(2, 15);
701
+ return `user_0000000000000000000000000000000000000000000000000000000000000000_account__session_${sessionId}`;
702
+ }
703
+
704
+ function normalizeClaudeMetadata(metadata, fallbackUserId = '') {
705
+ const normalized = (metadata && typeof metadata === 'object' && !Array.isArray(metadata))
706
+ ? { ...metadata }
707
+ : {};
708
+ const userId = typeof normalized.user_id === 'string' ? normalized.user_id.trim() : '';
709
+ normalized.user_id = userId || normalizeSessionKeyValue(fallbackUserId) || buildClaudeCodeUserId();
710
+ return normalized;
711
+ }
712
+
713
+ function convertOpenCodePayloadToClaude(pathname, payload = {}, fallbackModel = '', options = {}) {
714
+ const normalized = normalizeOpenCodeMessages(pathname, payload);
715
+ const maxTokens = Number(payload.max_output_tokens ?? payload.max_tokens);
716
+
717
+ const converted = {
718
+ model: payload.model || fallbackModel || 'claude-sonnet-4-20250514',
719
+ max_tokens: Number.isFinite(maxTokens) && maxTokens > 0 ? Math.round(maxTokens) : 4096,
720
+ stream: false,
721
+ messages: normalized.messages
722
+ };
723
+
724
+ if (normalized.system) {
725
+ // 部分 relay 仅接受 Claude system 的 block 数组格式,不接受纯字符串
726
+ converted.system = [
727
+ {
728
+ type: 'text',
729
+ text: normalized.system
730
+ }
731
+ ];
732
+ }
733
+
734
+ const tools = normalizeOpenAiToolsToClaude(payload.tools || []);
735
+ if (tools.length > 0) {
736
+ converted.tools = tools;
737
+ }
738
+
739
+ const toolChoice = normalizeToolChoiceToClaude(payload.tool_choice);
740
+ if (toolChoice) {
741
+ converted.tool_choice = toolChoice;
742
+ }
743
+
744
+ if (Number.isFinite(Number(payload.temperature))) {
745
+ converted.temperature = Number(payload.temperature);
746
+ }
747
+ if (Number.isFinite(Number(payload.top_p))) {
748
+ converted.top_p = Number(payload.top_p);
749
+ }
750
+ if (Number.isFinite(Number(payload.top_k))) {
751
+ converted.top_k = Number(payload.top_k);
752
+ }
753
+
754
+ // 某些 Claude relay 会校验 metadata.user_id 以识别 Claude Code 请求
755
+ converted.metadata = normalizeClaudeMetadata(payload.metadata, options.sessionUserId);
756
+
757
+ return converted;
758
+ }
759
+
760
+ function normalizeOpenAiToolsToGemini(tools = []) {
761
+ if (!Array.isArray(tools)) return [];
762
+
763
+ const functionDeclarations = [];
764
+ for (const tool of tools) {
765
+ if (!tool || typeof tool !== 'object') continue;
766
+
767
+ if (tool.type === 'function' && tool.function && typeof tool.function === 'object') {
768
+ const fn = tool.function;
769
+ if (!fn.name) continue;
770
+ functionDeclarations.push({
771
+ name: fn.name,
772
+ description: fn.description || '',
773
+ parameters: fn.parameters || { type: 'object', properties: {} }
774
+ });
775
+ continue;
776
+ }
777
+
778
+ if (tool.type === 'function' && tool.name) {
779
+ functionDeclarations.push({
780
+ name: tool.name,
781
+ description: tool.description || '',
782
+ parameters: tool.parameters || { type: 'object', properties: {} }
783
+ });
784
+ }
785
+ }
786
+
787
+ if (functionDeclarations.length === 0) return [];
788
+ return [{ functionDeclarations }];
789
+ }
790
+
791
+ function normalizeToolChoiceToGemini(toolChoice) {
792
+ if (!toolChoice) return undefined;
793
+
794
+ if (typeof toolChoice === 'string') {
795
+ if (toolChoice === 'auto') {
796
+ return { functionCallingConfig: { mode: 'AUTO' } };
797
+ }
798
+ if (toolChoice === 'required') {
799
+ return { functionCallingConfig: { mode: 'ANY' } };
800
+ }
801
+ if (toolChoice === 'none') {
802
+ return { functionCallingConfig: { mode: 'NONE' } };
803
+ }
804
+ return undefined;
805
+ }
806
+
807
+ if (typeof toolChoice === 'object') {
808
+ const functionName = toolChoice.function?.name || toolChoice.name;
809
+ if (toolChoice.type === 'function' && functionName) {
810
+ return {
811
+ functionCallingConfig: {
812
+ mode: 'ANY',
813
+ allowedFunctionNames: [functionName]
814
+ }
815
+ };
816
+ }
817
+ if (toolChoice.type === 'auto') {
818
+ return { functionCallingConfig: { mode: 'AUTO' } };
819
+ }
820
+ if (toolChoice.type === 'required') {
821
+ return { functionCallingConfig: { mode: 'ANY' } };
822
+ }
823
+ if (toolChoice.type === 'none') {
824
+ return { functionCallingConfig: { mode: 'NONE' } };
825
+ }
826
+ }
827
+
828
+ return undefined;
829
+ }
830
+
831
+ function normalizeStopSequences(stopValue) {
832
+ if (!stopValue) return undefined;
833
+ if (typeof stopValue === 'string' && stopValue.trim()) {
834
+ return [stopValue];
835
+ }
836
+ if (Array.isArray(stopValue)) {
837
+ const sequences = stopValue
838
+ .filter(item => typeof item === 'string')
839
+ .map(item => item.trim())
840
+ .filter(Boolean);
841
+ return sequences.length > 0 ? sequences : undefined;
842
+ }
843
+ return undefined;
844
+ }
845
+
846
+ function normalizeGeminiFunctionResponsePayload(value) {
847
+ if (value && typeof value === 'object' && !Array.isArray(value)) {
848
+ return value;
849
+ }
850
+ if (typeof value === 'string') {
851
+ const trimmed = value.trim();
852
+ if (!trimmed) return { content: '' };
853
+ try {
854
+ const parsed = JSON.parse(trimmed);
855
+ if (parsed && typeof parsed === 'object' && !Array.isArray(parsed)) {
856
+ return parsed;
857
+ }
858
+ } catch {
859
+ return { content: value };
860
+ }
861
+ return { content: value };
862
+ }
863
+ return { content: normalizeToolResultContent(value) };
864
+ }
865
+
866
+ function buildGeminiContents(messages = []) {
867
+ const contents = [];
868
+ const toolNameById = new Map();
869
+
870
+ for (const message of messages) {
871
+ if (!message || typeof message !== 'object') continue;
872
+ const role = message.role === 'assistant' ? 'model' : 'user';
873
+ const contentBlocks = Array.isArray(message.content) ? message.content : [message.content];
874
+ const parts = [];
875
+
876
+ for (const block of contentBlocks) {
877
+ if (!block || typeof block !== 'object') {
878
+ const text = extractText(block);
879
+ if (text) {
880
+ parts.push({ text });
881
+ }
882
+ continue;
883
+ }
884
+
885
+ if (block.type === 'tool_use' && block.name) {
886
+ const callId = String(block.id || generateToolCallId());
887
+ const args = (block.input && typeof block.input === 'object' && !Array.isArray(block.input))
888
+ ? block.input
889
+ : {};
890
+ toolNameById.set(callId, block.name);
891
+ parts.push({
892
+ functionCall: {
893
+ name: block.name,
894
+ args
895
+ }
896
+ });
897
+ continue;
898
+ }
899
+
900
+ if (block.type === 'tool_result') {
901
+ const toolUseId = String(block.tool_use_id || block.id || '');
902
+ const toolName = block.name || toolNameById.get(toolUseId);
903
+ if (!toolName) {
904
+ const text = normalizeToolResultContent(block.content);
905
+ if (text) {
906
+ parts.push({ text });
907
+ }
908
+ continue;
909
+ }
910
+
911
+ parts.push({
912
+ functionResponse: {
913
+ name: toolName,
914
+ response: normalizeGeminiFunctionResponsePayload(block.content)
915
+ }
916
+ });
917
+ continue;
918
+ }
919
+
920
+ const text = extractText(block);
921
+ if (text) {
922
+ parts.push({ text });
923
+ }
924
+ }
925
+
926
+ if (parts.length === 0) continue;
927
+ contents.push({ role, parts });
928
+ }
929
+ return contents;
930
+ }
931
+
932
+ function cloneJsonCompatible(value) {
933
+ try {
934
+ return JSON.parse(JSON.stringify(value));
935
+ } catch {
936
+ return value;
937
+ }
938
+ }
939
+
940
+ function normalizeCodexResponsesInput(inputValue) {
941
+ if (typeof inputValue === 'string') {
942
+ return [
943
+ {
944
+ type: 'message',
945
+ role: 'user',
946
+ content: [
947
+ {
948
+ type: 'input_text',
949
+ text: inputValue
950
+ }
951
+ ]
952
+ }
953
+ ];
954
+ }
955
+
956
+ if (!Array.isArray(inputValue)) return undefined;
957
+ return inputValue.map(item => {
958
+ if (!item || typeof item !== 'object') return item;
959
+ const clonedItem = cloneJsonCompatible(item);
960
+ if (String(clonedItem?.role || '').trim().toLowerCase() === 'system') {
961
+ clonedItem.role = 'developer';
962
+ }
963
+ return clonedItem;
964
+ });
965
+ }
966
+
967
+ function convertOpenCodePayloadToCodexResponses(payload = {}, fallbackModel = '') {
968
+ const requestBody = cloneJsonCompatible((payload && typeof payload === 'object') ? payload : {});
969
+ if (requestBody.model === undefined && fallbackModel) {
970
+ requestBody.model = fallbackModel;
971
+ }
972
+
973
+ const normalizedInput = normalizeCodexResponsesInput(requestBody.input);
974
+ if (normalizedInput !== undefined) {
975
+ requestBody.input = normalizedInput;
976
+ }
977
+
978
+ requestBody.stream = true;
979
+ requestBody.store = false;
980
+ if (requestBody.parallel_tool_calls === undefined) {
981
+ requestBody.parallel_tool_calls = true;
982
+ }
983
+ if (typeof requestBody.instructions !== 'string') {
984
+ requestBody.instructions = '';
985
+ }
986
+
987
+ const include = Array.isArray(requestBody.include)
988
+ ? requestBody.include.filter(item => typeof item === 'string' && item.trim())
989
+ : [];
990
+ if (!include.includes('reasoning.encrypted_content')) {
991
+ include.push('reasoning.encrypted_content');
992
+ }
993
+ requestBody.include = include;
994
+
995
+ delete requestBody.max_output_tokens;
996
+ delete requestBody.max_completion_tokens;
997
+ delete requestBody.temperature;
998
+ delete requestBody.top_p;
999
+ delete requestBody.service_tier;
1000
+ delete requestBody.user;
1001
+ delete requestBody.previous_response_id;
1002
+ delete requestBody.prompt_cache_retention;
1003
+ delete requestBody.safety_identifier;
1004
+
1005
+ return {
1006
+ requestBody,
1007
+ model: requestBody.model || fallbackModel || ''
1008
+ };
1009
+ }
1010
+
1011
+ function convertOpenCodePayloadToGemini(pathname, payload = {}, fallbackModel = '') {
1012
+ const normalized = normalizeOpenCodeMessages(pathname, payload);
1013
+ const maxTokens = Number(payload.max_output_tokens ?? payload.max_tokens);
1014
+ const stopSequences = normalizeStopSequences(payload.stop);
1015
+ const tools = normalizeOpenAiToolsToGemini(payload.tools || []);
1016
+ const toolConfig = normalizeToolChoiceToGemini(payload.tool_choice);
1017
+
1018
+ const requestBody = {
1019
+ contents: buildGeminiContents(normalized.messages)
1020
+ };
1021
+
1022
+ if (normalized.system) {
1023
+ requestBody.systemInstruction = {
1024
+ parts: [{ text: normalized.system }]
1025
+ };
1026
+ }
1027
+
1028
+ const generationConfig = {};
1029
+ if (Number.isFinite(maxTokens) && maxTokens > 0) {
1030
+ generationConfig.maxOutputTokens = Math.round(maxTokens);
1031
+ }
1032
+ if (Number.isFinite(Number(payload.temperature))) {
1033
+ generationConfig.temperature = Number(payload.temperature);
1034
+ }
1035
+ if (Number.isFinite(Number(payload.top_p))) {
1036
+ generationConfig.topP = Number(payload.top_p);
1037
+ }
1038
+ if (Number.isFinite(Number(payload.top_k))) {
1039
+ generationConfig.topK = Number(payload.top_k);
1040
+ }
1041
+ if (stopSequences) {
1042
+ generationConfig.stopSequences = stopSequences;
1043
+ }
1044
+ if (Object.keys(generationConfig).length > 0) {
1045
+ requestBody.generationConfig = generationConfig;
1046
+ }
1047
+
1048
+ if (tools.length > 0) {
1049
+ requestBody.tools = tools;
1050
+ }
1051
+ if (toolConfig) {
1052
+ requestBody.toolConfig = toolConfig;
1053
+ }
1054
+
1055
+ return {
1056
+ model: payload.model || fallbackModel || '',
1057
+ requestBody
1058
+ };
1059
+ }
1060
+
1061
+ function buildClaudeTargetUrl(baseUrl = '') {
1062
+ let targetUrl;
1063
+ try {
1064
+ targetUrl = new URL(String(baseUrl || '').trim() || 'https://api.anthropic.com');
1065
+ } catch {
1066
+ targetUrl = new URL('https://api.anthropic.com');
1067
+ }
1068
+
1069
+ let pathname = targetUrl.pathname.replace(/\/+$/, '');
1070
+ if (!pathname || pathname === '/') {
1071
+ pathname = '/v1/messages';
1072
+ } else if (pathname.endsWith('/messages')) {
1073
+ // noop
1074
+ } else if (pathname.endsWith('/v1')) {
1075
+ pathname = `${pathname}/messages`;
1076
+ } else {
1077
+ pathname = `${pathname}/v1/messages`;
1078
+ }
1079
+
1080
+ targetUrl.pathname = pathname;
1081
+ targetUrl.searchParams.set('beta', 'true');
1082
+ return targetUrl.toString();
1083
+ }
1084
+
1085
+ function shouldUseGeminiCliFormat(baseUrl = '') {
1086
+ let parsedUrl;
1087
+ try {
1088
+ parsedUrl = new URL(String(baseUrl || '').trim() || 'https://generativelanguage.googleapis.com');
1089
+ } catch {
1090
+ return false;
1091
+ }
1092
+
1093
+ const host = String(parsedUrl.hostname || '').toLowerCase();
1094
+ const pathname = parsedUrl.pathname.replace(/\/+$/, '');
1095
+
1096
+ if (pathname.includes('/v1internal') || pathname.endsWith(':generateContent') || pathname.endsWith(':streamGenerateContent')) {
1097
+ return true;
1098
+ }
1099
+ if (pathname.includes('/v1beta') || pathname.includes('/models/')) {
1100
+ return false;
1101
+ }
1102
+ if (host.includes('cloudcode-pa.googleapis.com')) {
1103
+ return true;
1104
+ }
1105
+ if (!pathname || pathname === '/') {
1106
+ return !host.includes('generativelanguage.googleapis.com') && !host.includes('aiplatform.googleapis.com');
1107
+ }
1108
+ return false;
1109
+ }
1110
+
1111
+ function buildGeminiCliTargetPath(parsedUrl, stream = false) {
1112
+ let pathname = parsedUrl.pathname.replace(/\/+$/, '');
1113
+ const method = stream ? 'streamGenerateContent' : 'generateContent';
1114
+
1115
+ if (!pathname || pathname === '/') {
1116
+ return `/v1internal:${method}`;
1117
+ }
1118
+ if (pathname.endsWith(':streamGenerateContent')) {
1119
+ return stream
1120
+ ? pathname
1121
+ : pathname.replace(/:streamGenerateContent$/, ':generateContent');
1122
+ }
1123
+ if (pathname.endsWith(':generateContent')) {
1124
+ return stream
1125
+ ? pathname.replace(/:generateContent$/, ':streamGenerateContent')
1126
+ : pathname;
1127
+ }
1128
+ if (pathname.endsWith('/v1internal')) {
1129
+ return `${pathname}:${method}`;
1130
+ }
1131
+ return `${pathname}/v1internal:${method}`;
1132
+ }
1133
+
1134
+ function buildGeminiCliTargetUrl(baseUrl = '', options = {}) {
1135
+ const stream = !!options.stream;
1136
+
1137
+ let targetUrl;
1138
+ try {
1139
+ targetUrl = new URL(String(baseUrl || '').trim() || 'https://cloudcode-pa.googleapis.com');
1140
+ } catch {
1141
+ targetUrl = new URL('https://cloudcode-pa.googleapis.com');
1142
+ }
1143
+
1144
+ targetUrl.pathname = buildGeminiCliTargetPath(targetUrl, stream);
1145
+ if (stream) {
1146
+ targetUrl.searchParams.set('alt', 'sse');
1147
+ }
1148
+ return targetUrl.toString();
1149
+ }
1150
+
1151
+ function buildGeminiNativeTargetUrl(baseUrl = '', model = '', apiKey = '', options = {}) {
1152
+ const modelName = String(model || '').trim();
1153
+ if (!modelName) return '';
1154
+ const stream = !!options.stream;
1155
+
1156
+ let targetUrl;
1157
+ try {
1158
+ targetUrl = new URL(String(baseUrl || '').trim() || 'https://generativelanguage.googleapis.com');
1159
+ } catch {
1160
+ targetUrl = new URL('https://generativelanguage.googleapis.com');
1161
+ }
1162
+
1163
+ let pathname = targetUrl.pathname.replace(/\/+$/, '');
1164
+ const modelsIndex = pathname.indexOf('/models');
1165
+ if (modelsIndex >= 0) {
1166
+ pathname = pathname.slice(0, modelsIndex);
1167
+ }
1168
+
1169
+ let apiBasePath;
1170
+ if (!pathname || pathname === '/') {
1171
+ apiBasePath = '/v1beta';
1172
+ } else if (pathname.endsWith('/v1beta') || pathname.endsWith('/v1')) {
1173
+ apiBasePath = pathname;
1174
+ } else {
1175
+ apiBasePath = `${pathname}/v1beta`;
1176
+ }
1177
+
1178
+ const method = stream ? 'streamGenerateContent' : 'generateContent';
1179
+ targetUrl.pathname = `${apiBasePath}/models/${encodeURIComponent(modelName)}:${method}`;
1180
+ if (apiKey) {
1181
+ targetUrl.searchParams.set('key', apiKey);
1182
+ }
1183
+ if (stream) {
1184
+ targetUrl.searchParams.set('alt', 'sse');
1185
+ }
1186
+
1187
+ return targetUrl.toString();
1188
+ }
1189
+
1190
+ function buildGeminiTargetUrl(baseUrl = '', model = '', apiKey = '', options = {}) {
1191
+ if (options.useCli) {
1192
+ return buildGeminiCliTargetUrl(baseUrl, options);
1193
+ }
1194
+ return buildGeminiNativeTargetUrl(baseUrl, model, apiKey, options);
1195
+ }
1196
+
1197
+ function buildCodexTargetUrl(baseUrl = '') {
1198
+ let targetUrl;
1199
+ try {
1200
+ targetUrl = new URL(String(baseUrl || '').trim());
1201
+ } catch {
1202
+ return '';
1203
+ }
1204
+
1205
+ let pathname = targetUrl.pathname.replace(/\/+$/, '');
1206
+ if (!pathname || pathname === '/') {
1207
+ pathname = '/responses';
1208
+ } else if (pathname.endsWith('/responses') || pathname.endsWith('/v1/responses')) {
1209
+ // noop
1210
+ } else if (pathname.endsWith('/v1')) {
1211
+ pathname = `${pathname}/responses`;
1212
+ } else {
1213
+ pathname = `${pathname}/responses`;
1214
+ }
1215
+
1216
+ targetUrl.pathname = pathname;
1217
+ return targetUrl.toString();
1218
+ }
1219
+
1220
+ function createDecodedStream(res) {
1221
+ const encoding = String(res.headers['content-encoding'] || '').toLowerCase();
1222
+ if (encoding.includes('gzip')) return res.pipe(zlib.createGunzip());
1223
+ if (encoding.includes('deflate')) return res.pipe(zlib.createInflate());
1224
+ if (encoding.includes('br') && typeof zlib.createBrotliDecompress === 'function') {
1225
+ return res.pipe(zlib.createBrotliDecompress());
1226
+ }
1227
+ return res;
1228
+ }
1229
+
1230
+ function collectHttpResponseBody(res) {
1231
+ return new Promise((resolve, reject) => {
1232
+ const stream = createDecodedStream(res);
1233
+ const chunks = [];
1234
+ stream.on('data', chunk => chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)));
1235
+ stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
1236
+ stream.on('error', reject);
1237
+ res.on('error', reject);
1238
+ });
1239
+ }
1240
+
1241
+ function postJson(url, headers, payload, timeoutMs = 120000) {
1242
+ return new Promise((resolve, reject) => {
1243
+ const target = new URL(url);
1244
+ const isHttps = target.protocol === 'https:';
1245
+ const client = isHttps ? https : http;
1246
+ const body = JSON.stringify(payload || {});
1247
+ const request = client.request({
1248
+ hostname: target.hostname,
1249
+ port: target.port || (isHttps ? 443 : 80),
1250
+ path: `${target.pathname}${target.search}`,
1251
+ method: 'POST',
1252
+ timeout: timeoutMs,
1253
+ headers: {
1254
+ ...headers,
1255
+ 'Content-Type': 'application/json',
1256
+ 'Content-Length': Buffer.byteLength(body)
1257
+ }
1258
+ }, (response) => {
1259
+ collectHttpResponseBody(response)
1260
+ .then((rawBody) => {
1261
+ resolve({
1262
+ statusCode: response.statusCode || 500,
1263
+ headers: response.headers || {},
1264
+ rawBody
1265
+ });
1266
+ })
1267
+ .catch(reject);
1268
+ });
1269
+
1270
+ request.on('error', reject);
1271
+ request.on('timeout', () => {
1272
+ request.destroy(new Error('Gateway request timeout'));
1273
+ });
1274
+ request.write(body);
1275
+ request.end();
1276
+ });
1277
+ }
1278
+
1279
+ function postJsonStream(url, headers, payload, timeoutMs = 120000) {
1280
+ return new Promise((resolve, reject) => {
1281
+ const target = new URL(url);
1282
+ const isHttps = target.protocol === 'https:';
1283
+ const client = isHttps ? https : http;
1284
+ const body = JSON.stringify(payload || {});
1285
+ const request = client.request({
1286
+ hostname: target.hostname,
1287
+ port: target.port || (isHttps ? 443 : 80),
1288
+ path: `${target.pathname}${target.search}`,
1289
+ method: 'POST',
1290
+ timeout: timeoutMs,
1291
+ headers: {
1292
+ ...headers,
1293
+ 'Content-Type': 'application/json',
1294
+ 'Content-Length': Buffer.byteLength(body)
1295
+ }
1296
+ }, (response) => {
1297
+ resolve({
1298
+ statusCode: response.statusCode || 500,
1299
+ headers: response.headers || {},
1300
+ response
1301
+ });
1302
+ });
1303
+
1304
+ request.on('error', reject);
1305
+ request.on('timeout', () => {
1306
+ request.destroy(new Error('Gateway request timeout'));
1307
+ });
1308
+ request.write(body);
1309
+ request.end();
1310
+ });
1311
+ }
1312
+
1313
+ function extractClaudeResponseContent(claudeResponse = {}) {
1314
+ const textFragments = [];
1315
+ const functionCalls = [];
1316
+ const reasoningItems = [];
1317
+
1318
+ if (!Array.isArray(claudeResponse.content)) {
1319
+ return { text: '', functionCalls: [], reasoningItems: [] };
1320
+ }
1321
+
1322
+ claudeResponse.content.forEach(block => {
1323
+ if (!block || typeof block !== 'object') return;
1324
+
1325
+ if (typeof block.text === 'string' && block.text.trim()) {
1326
+ textFragments.push(block.text);
1327
+ }
1328
+
1329
+ if (block.type === 'tool_use' && block.name) {
1330
+ const callId = String(block.id || generateToolCallId());
1331
+ const argsObject = (block.input && typeof block.input === 'object' && !Array.isArray(block.input))
1332
+ ? block.input
1333
+ : {};
1334
+ functionCalls.push({
1335
+ id: `fc_${callId}`,
1336
+ call_id: callId,
1337
+ name: block.name,
1338
+ arguments: JSON.stringify(argsObject)
1339
+ });
1340
+ }
1341
+
1342
+ if (block.type === 'thinking') {
1343
+ const thinkingText = String(block.thinking || block.text || '').trim();
1344
+ if (thinkingText) {
1345
+ reasoningItems.push({
1346
+ id: `rs_${Date.now()}_${reasoningItems.length}`,
1347
+ text: thinkingText
1348
+ });
1349
+ }
1350
+ }
1351
+ });
1352
+
1353
+ return {
1354
+ text: textFragments.join('\n').trim(),
1355
+ functionCalls,
1356
+ reasoningItems
1357
+ };
1358
+ }
1359
+
1360
+ function extractClaudeResponseText(claudeResponse = {}) {
1361
+ return extractClaudeResponseContent(claudeResponse).text;
1362
+ }
1363
+
1364
+ function buildGeminiFunctionCallRecord(functionCall = {}, callIndex = 0) {
1365
+ if (!functionCall || typeof functionCall !== 'object' || !functionCall.name) return null;
1366
+ const callId = String(functionCall.id || functionCall.callId || `call_${callIndex + 1}`);
1367
+ const argsObject = (functionCall.args && typeof functionCall.args === 'object' && !Array.isArray(functionCall.args))
1368
+ ? functionCall.args
1369
+ : {};
1370
+
1371
+ return {
1372
+ id: `fc_${callId}`,
1373
+ call_id: callId,
1374
+ name: functionCall.name,
1375
+ arguments: JSON.stringify(argsObject)
1376
+ };
1377
+ }
1378
+
1379
+ function extractGeminiResponseContent(geminiResponse = {}) {
1380
+ const fragments = [];
1381
+ const functionCalls = [];
1382
+ const reasoningItems = [];
1383
+ let functionIndex = 0;
1384
+ let reasoningIndex = 0;
1385
+
1386
+ if (!Array.isArray(geminiResponse.candidates)) {
1387
+ return { text: '', functionCalls: [], reasoningItems: [] };
1388
+ }
1389
+
1390
+ for (const candidate of geminiResponse.candidates) {
1391
+ if (!candidate || typeof candidate !== 'object') continue;
1392
+ const parts = candidate.content?.parts;
1393
+ if (!Array.isArray(parts)) continue;
1394
+
1395
+ for (const part of parts) {
1396
+ if (!part || typeof part !== 'object') continue;
1397
+ if (part.functionCall && typeof part.functionCall === 'object') {
1398
+ const functionCall = buildGeminiFunctionCallRecord(part.functionCall, functionIndex);
1399
+ if (functionCall) {
1400
+ functionCalls.push(functionCall);
1401
+ functionIndex += 1;
1402
+ }
1403
+ continue;
1404
+ }
1405
+
1406
+ if (typeof part.text === 'string' && part.text.trim() && part.thought === true) {
1407
+ reasoningItems.push({
1408
+ id: `rs_${Date.now()}_${reasoningIndex}`,
1409
+ text: part.text
1410
+ });
1411
+ reasoningIndex += 1;
1412
+ continue;
1413
+ }
1414
+
1415
+ if (typeof part.text === 'string' && part.text.trim()) {
1416
+ fragments.push(part.text);
1417
+ }
1418
+ }
1419
+ }
1420
+
1421
+ return {
1422
+ text: fragments.join('\n').trim(),
1423
+ functionCalls,
1424
+ reasoningItems
1425
+ };
1426
+ }
1427
+
1428
+ function extractGeminiUsage(geminiResponse = {}) {
1429
+ const usageMetadata = (geminiResponse.usageMetadata && typeof geminiResponse.usageMetadata === 'object')
1430
+ ? geminiResponse.usageMetadata
1431
+ : {};
1432
+ const inputTokens = Number(usageMetadata.promptTokenCount || 0);
1433
+ const outputTokens = Number(usageMetadata.candidatesTokenCount || 0);
1434
+ const totalTokens = Number(usageMetadata.totalTokenCount || (inputTokens + outputTokens));
1435
+ const cachedTokens = Number(usageMetadata.cachedContentTokenCount || 0);
1436
+ const reasoningTokens = Number(usageMetadata.thoughtsTokenCount || 0);
1437
+
1438
+ return {
1439
+ inputTokens,
1440
+ outputTokens,
1441
+ totalTokens,
1442
+ cachedTokens,
1443
+ reasoningTokens
1444
+ };
1445
+ }
1446
+
1447
+ function mapClaudeStopReasonToChatFinishReason(stopReason) {
1448
+ if (stopReason === 'max_tokens') return 'length';
1449
+ if (stopReason === 'tool_use') return 'tool_calls';
1450
+ if (stopReason === 'pause_turn') return 'stop';
1451
+ return 'stop';
1452
+ }
1453
+
1454
+ function mapGeminiFinishReasonToChatFinishReason(finishReason, hasToolCalls = false) {
1455
+ if (hasToolCalls) return 'tool_calls';
1456
+ const normalized = String(finishReason || '').trim().toUpperCase();
1457
+ if (normalized === 'MAX_TOKENS') return 'length';
1458
+ if (normalized === 'SAFETY' || normalized === 'RECITATION' || normalized === 'SPII') return 'content_filter';
1459
+ return 'stop';
1460
+ }
1461
+
1462
+ function buildOpenAiResponsesObject(claudeResponse = {}, fallbackModel = '') {
1463
+ const inputTokens = Number(claudeResponse?.usage?.input_tokens || 0);
1464
+ const outputTokens = Number(claudeResponse?.usage?.output_tokens || 0);
1465
+ const totalTokens = Number(claudeResponse?.usage?.total_tokens || (inputTokens + outputTokens));
1466
+ const parsedContent = extractClaudeResponseContent(claudeResponse);
1467
+ const text = parsedContent.text;
1468
+ const reasoningTokens = parsedContent.reasoningItems.reduce((acc, item) => acc + Math.floor((item.text || '').length / 4), 0);
1469
+ const model = claudeResponse.model || fallbackModel || '';
1470
+ const responseId = `resp_${String(claudeResponse.id || Date.now()).replace(/[^a-zA-Z0-9_]/g, '')}`;
1471
+ const messageId = claudeResponse.id || `msg_${Date.now()}`;
1472
+ const createdAt = Math.floor(Date.now() / 1000);
1473
+ const output = [];
1474
+
1475
+ parsedContent.reasoningItems.forEach(item => {
1476
+ output.push({
1477
+ id: item.id,
1478
+ type: 'reasoning',
1479
+ summary: [
1480
+ {
1481
+ type: 'summary_text',
1482
+ text: item.text
1483
+ }
1484
+ ]
1485
+ });
1486
+ });
1487
+
1488
+ if (text || parsedContent.functionCalls.length === 0) {
1489
+ output.push({
1490
+ id: messageId,
1491
+ type: 'message',
1492
+ status: 'completed',
1493
+ role: 'assistant',
1494
+ content: [
1495
+ {
1496
+ type: 'output_text',
1497
+ text,
1498
+ annotations: []
1499
+ }
1500
+ ]
1501
+ });
1502
+ }
1503
+
1504
+ parsedContent.functionCalls.forEach(call => {
1505
+ output.push({
1506
+ id: call.id,
1507
+ type: 'function_call',
1508
+ status: 'completed',
1509
+ arguments: call.arguments,
1510
+ call_id: call.call_id,
1511
+ name: call.name
1512
+ });
1513
+ });
1514
+
1515
+ return {
1516
+ id: responseId,
1517
+ object: 'response',
1518
+ created_at: createdAt,
1519
+ status: 'completed',
1520
+ model,
1521
+ output,
1522
+ usage: {
1523
+ input_tokens: inputTokens,
1524
+ output_tokens: outputTokens,
1525
+ total_tokens: totalTokens,
1526
+ ...(reasoningTokens > 0 ? { output_tokens_details: { reasoning_tokens: reasoningTokens } } : {})
1527
+ }
1528
+ };
1529
+ }
1530
+
1531
+ function buildOpenAiResponsesObjectFromGemini(geminiResponse = {}, fallbackModel = '') {
1532
+ const usage = extractGeminiUsage(geminiResponse);
1533
+ const parsedContent = extractGeminiResponseContent(geminiResponse);
1534
+ const text = parsedContent.text;
1535
+ const reasoningTokens = usage.reasoningTokens > 0
1536
+ ? usage.reasoningTokens
1537
+ : parsedContent.reasoningItems.reduce((acc, item) => acc + Math.floor((item.text || '').length / 4), 0);
1538
+ const model = geminiResponse.modelVersion || fallbackModel || '';
1539
+ const responseId = `resp_${Date.now()}`;
1540
+ const messageId = `msg_${Date.now()}`;
1541
+ const createdAt = Math.floor(Date.now() / 1000);
1542
+ const output = [];
1543
+
1544
+ parsedContent.reasoningItems.forEach(item => {
1545
+ output.push({
1546
+ id: item.id,
1547
+ type: 'reasoning',
1548
+ summary: [
1549
+ {
1550
+ type: 'summary_text',
1551
+ text: item.text
1552
+ }
1553
+ ]
1554
+ });
1555
+ });
1556
+
1557
+ if (text || parsedContent.functionCalls.length === 0) {
1558
+ output.push({
1559
+ id: messageId,
1560
+ type: 'message',
1561
+ status: 'completed',
1562
+ role: 'assistant',
1563
+ content: [
1564
+ {
1565
+ type: 'output_text',
1566
+ text,
1567
+ annotations: []
1568
+ }
1569
+ ]
1570
+ });
1571
+ }
1572
+
1573
+ parsedContent.functionCalls.forEach(call => {
1574
+ output.push({
1575
+ id: call.id,
1576
+ type: 'function_call',
1577
+ status: 'completed',
1578
+ arguments: call.arguments,
1579
+ call_id: call.call_id,
1580
+ name: call.name
1581
+ });
1582
+ });
1583
+
1584
+ return {
1585
+ id: responseId,
1586
+ object: 'response',
1587
+ created_at: createdAt,
1588
+ status: 'completed',
1589
+ model,
1590
+ output,
1591
+ usage: {
1592
+ input_tokens: usage.inputTokens,
1593
+ output_tokens: usage.outputTokens,
1594
+ total_tokens: usage.totalTokens,
1595
+ ...(usage.cachedTokens > 0 ? { input_tokens_details: { cached_tokens: usage.cachedTokens } } : {}),
1596
+ ...(reasoningTokens > 0 ? { output_tokens_details: { reasoning_tokens: reasoningTokens } } : {})
1597
+ }
1598
+ };
1599
+ }
1600
+
1601
+ function buildOpenAiChatCompletionsObject(claudeResponse = {}, fallbackModel = '') {
1602
+ const inputTokens = Number(claudeResponse?.usage?.input_tokens || 0);
1603
+ const outputTokens = Number(claudeResponse?.usage?.output_tokens || 0);
1604
+ const totalTokens = Number(claudeResponse?.usage?.total_tokens || (inputTokens + outputTokens));
1605
+ const parsedContent = extractClaudeResponseContent(claudeResponse);
1606
+ const text = parsedContent.text;
1607
+ const model = claudeResponse.model || fallbackModel || '';
1608
+ const chatId = `chatcmpl_${String(claudeResponse.id || Date.now()).replace(/[^a-zA-Z0-9_]/g, '')}`;
1609
+ const created = Math.floor(Date.now() / 1000);
1610
+ const hasToolCalls = parsedContent.functionCalls.length > 0;
1611
+ const message = {
1612
+ role: 'assistant',
1613
+ content: text || (hasToolCalls ? null : '')
1614
+ };
1615
+
1616
+ if (hasToolCalls) {
1617
+ message.tool_calls = parsedContent.functionCalls.map(call => ({
1618
+ id: call.call_id,
1619
+ type: 'function',
1620
+ function: {
1621
+ name: call.name,
1622
+ arguments: call.arguments
1623
+ }
1624
+ }));
1625
+ }
1626
+
1627
+ return {
1628
+ id: chatId,
1629
+ object: 'chat.completion',
1630
+ created,
1631
+ model,
1632
+ choices: [
1633
+ {
1634
+ index: 0,
1635
+ message,
1636
+ finish_reason: hasToolCalls ? 'tool_calls' : mapClaudeStopReasonToChatFinishReason(claudeResponse.stop_reason)
1637
+ }
1638
+ ],
1639
+ usage: {
1640
+ prompt_tokens: inputTokens,
1641
+ completion_tokens: outputTokens,
1642
+ total_tokens: totalTokens
1643
+ }
1644
+ };
1645
+ }
1646
+
1647
+ function buildOpenAiChatCompletionsObjectFromGemini(geminiResponse = {}, fallbackModel = '') {
1648
+ const usage = extractGeminiUsage(geminiResponse);
1649
+ const parsedContent = extractGeminiResponseContent(geminiResponse);
1650
+ const text = parsedContent.text;
1651
+ const model = geminiResponse.modelVersion || fallbackModel || '';
1652
+ const chatId = `chatcmpl_${Date.now()}`;
1653
+ const created = Math.floor(Date.now() / 1000);
1654
+ const firstCandidate = Array.isArray(geminiResponse.candidates) ? geminiResponse.candidates[0] : null;
1655
+ const functionCalls = parsedContent.functionCalls;
1656
+ const hasToolCalls = functionCalls.length > 0;
1657
+
1658
+ const message = {
1659
+ role: 'assistant',
1660
+ content: text || (hasToolCalls ? null : '')
1661
+ };
1662
+
1663
+ if (hasToolCalls) {
1664
+ message.tool_calls = functionCalls.map(call => ({
1665
+ id: call.call_id,
1666
+ type: 'function',
1667
+ function: {
1668
+ name: call.name,
1669
+ arguments: call.arguments
1670
+ }
1671
+ }));
1672
+ }
1673
+
1674
+ return {
1675
+ id: chatId,
1676
+ object: 'chat.completion',
1677
+ created,
1678
+ model,
1679
+ choices: [
1680
+ {
1681
+ index: 0,
1682
+ message,
1683
+ finish_reason: mapGeminiFinishReasonToChatFinishReason(firstCandidate?.finishReason, hasToolCalls)
1684
+ }
1685
+ ],
1686
+ usage: {
1687
+ prompt_tokens: usage.inputTokens,
1688
+ completion_tokens: usage.outputTokens,
1689
+ total_tokens: usage.totalTokens
1690
+ }
1691
+ };
1692
+ }
1693
+
1694
+ function sendOpenAiStyleError(res, statusCode, message, type = 'invalid_request_error') {
1695
+ const code = Number(statusCode) || 500;
1696
+ res.status(code).json({
1697
+ error: {
1698
+ message: message || 'Gateway request failed',
1699
+ type
1700
+ }
1701
+ });
1702
+ }
1703
+
1704
+ function publishOpenCodeUsageLog({ requestId, channel, model, usage, startTime }) {
1705
+ const inputTokens = Number(usage?.input_tokens || usage?.prompt_tokens || 0);
1706
+ const outputTokens = Number(usage?.output_tokens || usage?.completion_tokens || 0);
1707
+ const totalTokens = Number(usage?.total_tokens || (inputTokens + outputTokens));
1708
+ const cachedTokens = Number(usage?.input_tokens_details?.cached_tokens || 0);
1709
+ const reasoningTokens = Number(usage?.output_tokens_details?.reasoning_tokens || 0);
1710
+ const now = new Date();
1711
+ const time = now.toLocaleTimeString('zh-CN', {
1712
+ hour12: false,
1713
+ hour: '2-digit',
1714
+ minute: '2-digit',
1715
+ second: '2-digit'
1716
+ });
1717
+
1718
+ const tokens = {
1719
+ input: inputTokens,
1720
+ output: outputTokens,
1721
+ total: totalTokens
1722
+ };
1723
+ const cost = calculateCost(model || '', tokens);
1724
+
1725
+ broadcastLog({
1726
+ type: 'log',
1727
+ id: requestId,
1728
+ time,
1729
+ channel: channel.name,
1730
+ model: model || '',
1731
+ inputTokens,
1732
+ outputTokens,
1733
+ cachedTokens,
1734
+ reasoningTokens,
1735
+ totalTokens,
1736
+ cost,
1737
+ source: 'opencode'
1738
+ });
1739
+
1740
+ recordOpenCodeRequest({
1741
+ id: requestId,
1742
+ timestamp: new Date(startTime).toISOString(),
1743
+ toolType: 'opencode',
1744
+ channel: channel.name,
1745
+ channelId: channel.id,
1746
+ model: model || '',
1747
+ tokens: {
1748
+ input: inputTokens,
1749
+ output: outputTokens,
1750
+ reasoning: reasoningTokens,
1751
+ cached: cachedTokens,
1752
+ total: totalTokens
1753
+ },
1754
+ duration: Date.now() - startTime,
1755
+ success: true,
1756
+ cost
1757
+ });
1758
+ }
1759
+
1760
+ function setSseHeaders(res) {
1761
+ res.status(200);
1762
+ res.setHeader('Content-Type', 'text/event-stream; charset=utf-8');
1763
+ res.setHeader('Cache-Control', 'no-cache');
1764
+ res.setHeader('Connection', 'keep-alive');
1765
+ }
1766
+
1767
+ function writeSseData(res, payload) {
1768
+ res.write(`data: ${JSON.stringify(payload)}\n\n`);
1769
+ }
1770
+
1771
+ function writeSseDone(res) {
1772
+ res.write('data: [DONE]\n\n');
1773
+ }
1774
+
1775
+ function sendResponsesSse(res, responseObject) {
1776
+ const outputItems = Array.isArray(responseObject?.output) ? responseObject.output : [];
1777
+ const text = outputItems
1778
+ .filter(item => item?.type === 'message')
1779
+ .map(item => item?.content?.[0]?.text || '')
1780
+ .filter(Boolean)
1781
+ .join('\n')
1782
+ .trim();
1783
+ const functionCalls = outputItems.filter(item => item?.type === 'function_call');
1784
+
1785
+ setSseHeaders(res);
1786
+
1787
+ const createdPayload = {
1788
+ type: 'response.created',
1789
+ response: {
1790
+ id: responseObject.id,
1791
+ object: 'response',
1792
+ created_at: responseObject.created_at,
1793
+ model: responseObject.model,
1794
+ status: 'in_progress'
1795
+ }
1796
+ };
1797
+ writeSseData(res, createdPayload);
1798
+
1799
+ if (text) {
1800
+ const deltaPayload = {
1801
+ type: 'response.output_text.delta',
1802
+ delta: text
1803
+ };
1804
+ writeSseData(res, deltaPayload);
1805
+ }
1806
+
1807
+ if (functionCalls.length > 0) {
1808
+ functionCalls.forEach((item, index) => {
1809
+ const payload = {
1810
+ type: 'response.output_item.added',
1811
+ output_index: index,
1812
+ item
1813
+ };
1814
+ writeSseData(res, payload);
1815
+ });
1816
+ }
1817
+
1818
+ const completedPayload = {
1819
+ type: 'response.completed',
1820
+ response: responseObject
1821
+ };
1822
+ writeSseData(res, completedPayload);
1823
+ writeSseDone(res);
1824
+ res.end();
1825
+ }
1826
+
1827
+ function sendChatCompletionsSse(res, responseObject) {
1828
+ const message = responseObject?.choices?.[0]?.message || {};
1829
+ const text = message?.content || '';
1830
+ const toolCalls = Array.isArray(message?.tool_calls) ? message.tool_calls : [];
1831
+ const finishReason = responseObject?.choices?.[0]?.finish_reason || 'stop';
1832
+
1833
+ setSseHeaders(res);
1834
+
1835
+ const firstChunk = {
1836
+ id: responseObject.id,
1837
+ object: 'chat.completion.chunk',
1838
+ created: responseObject.created,
1839
+ model: responseObject.model,
1840
+ choices: [
1841
+ {
1842
+ index: 0,
1843
+ delta: {
1844
+ role: 'assistant',
1845
+ ...(text ? { content: text } : {}),
1846
+ ...(toolCalls.length > 0 ? { tool_calls: toolCalls } : {})
1847
+ },
1848
+ finish_reason: null
1849
+ }
1850
+ ]
1851
+ };
1852
+ writeSseData(res, firstChunk);
1853
+
1854
+ const doneChunk = {
1855
+ id: responseObject.id,
1856
+ object: 'chat.completion.chunk',
1857
+ created: responseObject.created,
1858
+ model: responseObject.model,
1859
+ choices: [
1860
+ {
1861
+ index: 0,
1862
+ delta: {},
1863
+ finish_reason: finishReason
1864
+ }
1865
+ ]
1866
+ };
1867
+ writeSseData(res, doneChunk);
1868
+ writeSseDone(res);
1869
+ res.end();
1870
+ }
1871
+
1872
+ function nextResponsesSequence(state) {
1873
+ state.sequence = Number(state.sequence || 0) + 1;
1874
+ return state.sequence;
1875
+ }
1876
+
1877
+ function createClaudeResponsesStreamState(fallbackModel = '') {
1878
+ return {
1879
+ sequence: 0,
1880
+ responseId: '',
1881
+ createdAt: Math.floor(Date.now() / 1000),
1882
+ model: fallbackModel || '',
1883
+ inputTokens: 0,
1884
+ outputTokens: 0,
1885
+ usageSeen: false,
1886
+ blockTypeByIndex: new Map(),
1887
+ messageIdByIndex: new Map(),
1888
+ messageTextByIndex: new Map(),
1889
+ functionCallIdByIndex: new Map(),
1890
+ functionNameByIndex: new Map(),
1891
+ functionArgsByIndex: new Map(),
1892
+ reasoningIdByIndex: new Map(),
1893
+ reasoningTextByIndex: new Map(),
1894
+ completed: false,
1895
+ completedResponse: null
1896
+ };
1897
+ }
1898
+
1899
+ function sortedNumericKeys(map) {
1900
+ return Array.from(map.keys())
1901
+ .map(v => Number(v))
1902
+ .filter(Number.isFinite)
1903
+ .sort((a, b) => a - b);
1904
+ }
1905
+
1906
+ function normalizeFunctionArgumentsString(value) {
1907
+ const raw = typeof value === 'string' ? value.trim() : '';
1908
+ if (!raw) return '{}';
1909
+ try {
1910
+ return JSON.stringify(JSON.parse(raw));
1911
+ } catch {
1912
+ return raw;
1913
+ }
1914
+ }
1915
+
1916
+ function buildResponsesOutputFromClaudeStreamState(state) {
1917
+ const output = [];
1918
+
1919
+ sortedNumericKeys(state.reasoningIdByIndex).forEach(index => {
1920
+ const reasoningId = state.reasoningIdByIndex.get(index) || `rs_${state.responseId || 'response'}_${index}`;
1921
+ const text = state.reasoningTextByIndex.get(index) || '';
1922
+ output.push({
1923
+ id: reasoningId,
1924
+ type: 'reasoning',
1925
+ summary: [
1926
+ {
1927
+ type: 'summary_text',
1928
+ text
1929
+ }
1930
+ ]
1931
+ });
1932
+ });
1933
+
1934
+ sortedNumericKeys(state.messageIdByIndex).forEach(index => {
1935
+ const messageId = state.messageIdByIndex.get(index) || `msg_${state.responseId || 'response'}_${index}`;
1936
+ const text = state.messageTextByIndex.get(index) || '';
1937
+ if (!text && state.functionCallIdByIndex.size > 0) {
1938
+ return;
1939
+ }
1940
+ output.push({
1941
+ id: messageId,
1942
+ type: 'message',
1943
+ status: 'completed',
1944
+ role: 'assistant',
1945
+ content: [
1946
+ {
1947
+ type: 'output_text',
1948
+ text,
1949
+ annotations: []
1950
+ }
1951
+ ]
1952
+ });
1953
+ });
1954
+
1955
+ sortedNumericKeys(state.functionCallIdByIndex).forEach(index => {
1956
+ const callId = state.functionCallIdByIndex.get(index) || generateToolCallId();
1957
+ const name = state.functionNameByIndex.get(index) || '';
1958
+ const args = normalizeFunctionArgumentsString(state.functionArgsByIndex.get(index));
1959
+ output.push({
1960
+ id: `fc_${callId}`,
1961
+ type: 'function_call',
1962
+ status: 'completed',
1963
+ arguments: args,
1964
+ call_id: callId,
1965
+ name
1966
+ });
1967
+ });
1968
+
1969
+ if (output.length === 0) {
1970
+ output.push({
1971
+ id: `msg_${state.responseId || Date.now()}_0`,
1972
+ type: 'message',
1973
+ status: 'completed',
1974
+ role: 'assistant',
1975
+ content: [
1976
+ {
1977
+ type: 'output_text',
1978
+ text: '',
1979
+ annotations: []
1980
+ }
1981
+ ]
1982
+ });
1983
+ }
1984
+
1985
+ return output;
1986
+ }
1987
+
1988
+ function buildCompletedResponsesObjectFromStreamState(state) {
1989
+ const output = buildResponsesOutputFromClaudeStreamState(state);
1990
+ const reasoningTokens = sortedNumericKeys(state.reasoningTextByIndex)
1991
+ .map(index => state.reasoningTextByIndex.get(index) || '')
1992
+ .reduce((acc, text) => acc + Math.floor(text.length / 4), 0);
1993
+ const totalTokens = Number(state.inputTokens || 0) + Number(state.outputTokens || 0);
1994
+
1995
+ const response = {
1996
+ id: state.responseId || `resp_${Date.now()}`,
1997
+ object: 'response',
1998
+ created_at: Number(state.createdAt) || Math.floor(Date.now() / 1000),
1999
+ status: 'completed',
2000
+ model: state.model || '',
2001
+ output
2002
+ };
2003
+
2004
+ if (state.usageSeen || totalTokens > 0 || reasoningTokens > 0) {
2005
+ response.usage = {
2006
+ input_tokens: Number(state.inputTokens || 0),
2007
+ output_tokens: Number(state.outputTokens || 0),
2008
+ total_tokens: totalTokens
2009
+ };
2010
+ if (reasoningTokens > 0) {
2011
+ response.usage.output_tokens_details = { reasoning_tokens: reasoningTokens };
2012
+ }
2013
+ }
2014
+
2015
+ return response;
2016
+ }
2017
+
2018
+ function processClaudeResponsesSseEvent(parsed, state, res) {
2019
+ if (!parsed || typeof parsed !== 'object') return;
2020
+
2021
+ const type = parsed.type;
2022
+ if (!type) return;
2023
+
2024
+ if (type === 'message_start') {
2025
+ const message = parsed.message && typeof parsed.message === 'object' ? parsed.message : {};
2026
+ state.responseId = message.id || state.responseId || `resp_${Date.now()}`;
2027
+ state.model = message.model || state.model;
2028
+ state.createdAt = Math.floor(Date.now() / 1000);
2029
+
2030
+ if (message.usage && typeof message.usage === 'object') {
2031
+ if (Number.isFinite(Number(message.usage.input_tokens))) {
2032
+ state.inputTokens = Number(message.usage.input_tokens);
2033
+ state.usageSeen = true;
2034
+ }
2035
+ if (Number.isFinite(Number(message.usage.output_tokens))) {
2036
+ state.outputTokens = Number(message.usage.output_tokens);
2037
+ state.usageSeen = true;
2038
+ }
2039
+ }
2040
+
2041
+ writeSseData(res, {
2042
+ type: 'response.created',
2043
+ sequence_number: nextResponsesSequence(state),
2044
+ response: {
2045
+ id: state.responseId,
2046
+ object: 'response',
2047
+ created_at: state.createdAt,
2048
+ model: state.model,
2049
+ status: 'in_progress'
2050
+ }
2051
+ });
2052
+
2053
+ writeSseData(res, {
2054
+ type: 'response.in_progress',
2055
+ sequence_number: nextResponsesSequence(state),
2056
+ response: {
2057
+ id: state.responseId,
2058
+ object: 'response',
2059
+ created_at: state.createdAt,
2060
+ model: state.model,
2061
+ status: 'in_progress'
2062
+ }
2063
+ });
2064
+ return;
2065
+ }
2066
+
2067
+ if (type === 'content_block_start') {
2068
+ const index = Number(parsed.index);
2069
+ const blockIndex = Number.isFinite(index) ? index : 0;
2070
+ const block = parsed.content_block && typeof parsed.content_block === 'object' ? parsed.content_block : {};
2071
+ const blockType = block.type;
2072
+ state.blockTypeByIndex.set(blockIndex, blockType);
2073
+
2074
+ if (blockType === 'text') {
2075
+ const messageId = `msg_${state.responseId || Date.now()}_${blockIndex}`;
2076
+ state.messageIdByIndex.set(blockIndex, messageId);
2077
+ if (!state.messageTextByIndex.has(blockIndex)) {
2078
+ state.messageTextByIndex.set(blockIndex, '');
2079
+ }
2080
+
2081
+ writeSseData(res, {
2082
+ type: 'response.output_item.added',
2083
+ sequence_number: nextResponsesSequence(state),
2084
+ output_index: blockIndex,
2085
+ item: {
2086
+ id: messageId,
2087
+ type: 'message',
2088
+ status: 'in_progress',
2089
+ role: 'assistant',
2090
+ content: []
2091
+ }
2092
+ });
2093
+
2094
+ writeSseData(res, {
2095
+ type: 'response.content_part.added',
2096
+ sequence_number: nextResponsesSequence(state),
2097
+ item_id: messageId,
2098
+ output_index: blockIndex,
2099
+ content_index: 0,
2100
+ part: {
2101
+ type: 'output_text',
2102
+ text: '',
2103
+ annotations: [],
2104
+ logprobs: []
2105
+ }
2106
+ });
2107
+ return;
2108
+ }
2109
+
2110
+ if (blockType === 'tool_use') {
2111
+ const callId = String(block.id || generateToolCallId());
2112
+ const name = block.name || '';
2113
+ state.functionCallIdByIndex.set(blockIndex, callId);
2114
+ state.functionNameByIndex.set(blockIndex, name);
2115
+ if (!state.functionArgsByIndex.has(blockIndex)) {
2116
+ state.functionArgsByIndex.set(blockIndex, '');
2117
+ }
2118
+ if (block.input && typeof block.input === 'object' && !Array.isArray(block.input)) {
2119
+ state.functionArgsByIndex.set(blockIndex, JSON.stringify(block.input));
2120
+ }
2121
+
2122
+ writeSseData(res, {
2123
+ type: 'response.output_item.added',
2124
+ sequence_number: nextResponsesSequence(state),
2125
+ output_index: blockIndex,
2126
+ item: {
2127
+ id: `fc_${callId}`,
2128
+ type: 'function_call',
2129
+ status: 'in_progress',
2130
+ arguments: '',
2131
+ call_id: callId,
2132
+ name
2133
+ }
2134
+ });
2135
+ return;
2136
+ }
2137
+
2138
+ if (blockType === 'thinking') {
2139
+ const reasoningId = `rs_${state.responseId || Date.now()}_${blockIndex}`;
2140
+ state.reasoningIdByIndex.set(blockIndex, reasoningId);
2141
+ if (!state.reasoningTextByIndex.has(blockIndex)) {
2142
+ state.reasoningTextByIndex.set(blockIndex, '');
2143
+ }
2144
+
2145
+ writeSseData(res, {
2146
+ type: 'response.output_item.added',
2147
+ sequence_number: nextResponsesSequence(state),
2148
+ output_index: blockIndex,
2149
+ item: {
2150
+ id: reasoningId,
2151
+ type: 'reasoning',
2152
+ status: 'in_progress',
2153
+ summary: []
2154
+ }
2155
+ });
2156
+
2157
+ writeSseData(res, {
2158
+ type: 'response.reasoning_summary_part.added',
2159
+ sequence_number: nextResponsesSequence(state),
2160
+ item_id: reasoningId,
2161
+ output_index: blockIndex,
2162
+ summary_index: 0,
2163
+ part: {
2164
+ type: 'summary_text',
2165
+ text: ''
2166
+ }
2167
+ });
2168
+ }
2169
+ return;
2170
+ }
2171
+
2172
+ if (type === 'content_block_delta') {
2173
+ const index = Number(parsed.index);
2174
+ const blockIndex = Number.isFinite(index) ? index : 0;
2175
+ const delta = parsed.delta && typeof parsed.delta === 'object' ? parsed.delta : {};
2176
+ const deltaType = delta.type;
2177
+
2178
+ if (deltaType === 'text_delta') {
2179
+ const text = typeof delta.text === 'string' ? delta.text : '';
2180
+ if (!text) return;
2181
+ const previous = state.messageTextByIndex.get(blockIndex) || '';
2182
+ state.messageTextByIndex.set(blockIndex, previous + text);
2183
+ const messageId = state.messageIdByIndex.get(blockIndex) || `msg_${state.responseId || Date.now()}_${blockIndex}`;
2184
+ state.messageIdByIndex.set(blockIndex, messageId);
2185
+
2186
+ writeSseData(res, {
2187
+ type: 'response.output_text.delta',
2188
+ sequence_number: nextResponsesSequence(state),
2189
+ item_id: messageId,
2190
+ output_index: blockIndex,
2191
+ content_index: 0,
2192
+ delta: text,
2193
+ logprobs: []
2194
+ });
2195
+ return;
2196
+ }
2197
+
2198
+ if (deltaType === 'input_json_delta') {
2199
+ const partialJson = typeof delta.partial_json === 'string' ? delta.partial_json : '';
2200
+ const previous = state.functionArgsByIndex.get(blockIndex) || '';
2201
+ state.functionArgsByIndex.set(blockIndex, previous + partialJson);
2202
+ const callId = state.functionCallIdByIndex.get(blockIndex) || generateToolCallId();
2203
+ state.functionCallIdByIndex.set(blockIndex, callId);
2204
+
2205
+ writeSseData(res, {
2206
+ type: 'response.function_call_arguments.delta',
2207
+ sequence_number: nextResponsesSequence(state),
2208
+ item_id: `fc_${callId}`,
2209
+ output_index: blockIndex,
2210
+ delta: partialJson
2211
+ });
2212
+ return;
2213
+ }
2214
+
2215
+ if (deltaType === 'thinking_delta') {
2216
+ const thinking = typeof delta.thinking === 'string' ? delta.thinking : '';
2217
+ if (!thinking) return;
2218
+ const previous = state.reasoningTextByIndex.get(blockIndex) || '';
2219
+ state.reasoningTextByIndex.set(blockIndex, previous + thinking);
2220
+ const reasoningId = state.reasoningIdByIndex.get(blockIndex) || `rs_${state.responseId || Date.now()}_${blockIndex}`;
2221
+ state.reasoningIdByIndex.set(blockIndex, reasoningId);
2222
+
2223
+ writeSseData(res, {
2224
+ type: 'response.reasoning_summary_text.delta',
2225
+ sequence_number: nextResponsesSequence(state),
2226
+ item_id: reasoningId,
2227
+ output_index: blockIndex,
2228
+ summary_index: 0,
2229
+ delta: thinking
2230
+ });
2231
+ }
2232
+ return;
2233
+ }
2234
+
2235
+ if (type === 'content_block_stop') {
2236
+ const index = Number(parsed.index);
2237
+ const blockIndex = Number.isFinite(index) ? index : 0;
2238
+ const blockType = state.blockTypeByIndex.get(blockIndex);
2239
+
2240
+ if (blockType === 'text') {
2241
+ const messageId = state.messageIdByIndex.get(blockIndex) || `msg_${state.responseId || Date.now()}_${blockIndex}`;
2242
+ const text = state.messageTextByIndex.get(blockIndex) || '';
2243
+
2244
+ writeSseData(res, {
2245
+ type: 'response.output_text.done',
2246
+ sequence_number: nextResponsesSequence(state),
2247
+ item_id: messageId,
2248
+ output_index: blockIndex,
2249
+ content_index: 0,
2250
+ text,
2251
+ logprobs: []
2252
+ });
2253
+
2254
+ writeSseData(res, {
2255
+ type: 'response.content_part.done',
2256
+ sequence_number: nextResponsesSequence(state),
2257
+ item_id: messageId,
2258
+ output_index: blockIndex,
2259
+ content_index: 0,
2260
+ part: {
2261
+ type: 'output_text',
2262
+ text,
2263
+ annotations: [],
2264
+ logprobs: []
2265
+ }
2266
+ });
2267
+
2268
+ writeSseData(res, {
2269
+ type: 'response.output_item.done',
2270
+ sequence_number: nextResponsesSequence(state),
2271
+ output_index: blockIndex,
2272
+ item: {
2273
+ id: messageId,
2274
+ type: 'message',
2275
+ status: 'completed',
2276
+ role: 'assistant',
2277
+ content: [
2278
+ {
2279
+ type: 'output_text',
2280
+ text,
2281
+ annotations: []
2282
+ }
2283
+ ]
2284
+ }
2285
+ });
2286
+ return;
2287
+ }
2288
+
2289
+ if (blockType === 'tool_use') {
2290
+ const callId = state.functionCallIdByIndex.get(blockIndex) || generateToolCallId();
2291
+ const name = state.functionNameByIndex.get(blockIndex) || '';
2292
+ const args = normalizeFunctionArgumentsString(state.functionArgsByIndex.get(blockIndex));
2293
+
2294
+ writeSseData(res, {
2295
+ type: 'response.function_call_arguments.done',
2296
+ sequence_number: nextResponsesSequence(state),
2297
+ item_id: `fc_${callId}`,
2298
+ output_index: blockIndex,
2299
+ arguments: args
2300
+ });
2301
+
2302
+ writeSseData(res, {
2303
+ type: 'response.output_item.done',
2304
+ sequence_number: nextResponsesSequence(state),
2305
+ output_index: blockIndex,
2306
+ item: {
2307
+ id: `fc_${callId}`,
2308
+ type: 'function_call',
2309
+ status: 'completed',
2310
+ arguments: args,
2311
+ call_id: callId,
2312
+ name
2313
+ }
2314
+ });
2315
+ return;
2316
+ }
2317
+
2318
+ if (blockType === 'thinking') {
2319
+ const reasoningId = state.reasoningIdByIndex.get(blockIndex) || `rs_${state.responseId || Date.now()}_${blockIndex}`;
2320
+ const text = state.reasoningTextByIndex.get(blockIndex) || '';
2321
+
2322
+ writeSseData(res, {
2323
+ type: 'response.reasoning_summary_text.done',
2324
+ sequence_number: nextResponsesSequence(state),
2325
+ item_id: reasoningId,
2326
+ output_index: blockIndex,
2327
+ summary_index: 0,
2328
+ text
2329
+ });
2330
+
2331
+ writeSseData(res, {
2332
+ type: 'response.reasoning_summary_part.done',
2333
+ sequence_number: nextResponsesSequence(state),
2334
+ item_id: reasoningId,
2335
+ output_index: blockIndex,
2336
+ summary_index: 0,
2337
+ part: {
2338
+ type: 'summary_text',
2339
+ text
2340
+ }
2341
+ });
2342
+ }
2343
+ return;
2344
+ }
2345
+
2346
+ if (type === 'message_delta') {
2347
+ const usage = parsed.usage && typeof parsed.usage === 'object' ? parsed.usage : {};
2348
+ if (Number.isFinite(Number(usage.input_tokens))) {
2349
+ state.inputTokens = Number(usage.input_tokens);
2350
+ state.usageSeen = true;
2351
+ }
2352
+ if (Number.isFinite(Number(usage.output_tokens))) {
2353
+ state.outputTokens = Number(usage.output_tokens);
2354
+ state.usageSeen = true;
2355
+ }
2356
+ return;
2357
+ }
2358
+
2359
+ if (type === 'message_stop') {
2360
+ const completedResponse = buildCompletedResponsesObjectFromStreamState(state);
2361
+ state.completed = true;
2362
+ state.completedResponse = completedResponse;
2363
+ writeSseData(res, {
2364
+ type: 'response.completed',
2365
+ sequence_number: nextResponsesSequence(state),
2366
+ response: completedResponse
2367
+ });
2368
+ }
2369
+ }
2370
+
2371
+ async function relayClaudeResponsesStream(upstreamResponse, res, fallbackModel = '') {
2372
+ setSseHeaders(res);
2373
+ const state = createClaudeResponsesStreamState(fallbackModel);
2374
+ const stream = createDecodedStream(upstreamResponse);
2375
+
2376
+ return new Promise((resolve, reject) => {
2377
+ let buffer = '';
2378
+ let settled = false;
2379
+
2380
+ const safeResolve = (value) => {
2381
+ if (settled) return;
2382
+ settled = true;
2383
+ resolve(value);
2384
+ };
2385
+
2386
+ const safeReject = (error) => {
2387
+ if (settled) return;
2388
+ settled = true;
2389
+ reject(error);
2390
+ };
2391
+
2392
+ const processSseBlock = (block) => {
2393
+ if (!block || !block.trim()) return;
2394
+ const dataLines = block
2395
+ .split('\n')
2396
+ .map(line => line.trimEnd())
2397
+ .filter(line => line.trim().startsWith('data:'))
2398
+ .map(line => line.replace(/^data:\s?/, ''));
2399
+ if (dataLines.length === 0) return;
2400
+ const payload = dataLines.join('\n').trim();
2401
+ if (!payload || payload === '[DONE]') return;
2402
+
2403
+ let parsed;
2404
+ try {
2405
+ parsed = JSON.parse(payload);
2406
+ } catch {
2407
+ return;
2408
+ }
2409
+ processClaudeResponsesSseEvent(parsed, state, res);
2410
+ };
2411
+
2412
+ stream.on('data', (chunk) => {
2413
+ buffer += chunk.toString('utf8').replace(/\r\n/g, '\n');
2414
+ let separatorIndex = buffer.indexOf('\n\n');
2415
+ while (separatorIndex >= 0) {
2416
+ const block = buffer.slice(0, separatorIndex);
2417
+ buffer = buffer.slice(separatorIndex + 2);
2418
+ processSseBlock(block);
2419
+ separatorIndex = buffer.indexOf('\n\n');
2420
+ }
2421
+ });
2422
+
2423
+ stream.on('end', () => {
2424
+ if (buffer.trim()) {
2425
+ processSseBlock(buffer);
2426
+ }
2427
+
2428
+ if (!state.completed) {
2429
+ const completedResponse = buildCompletedResponsesObjectFromStreamState(state);
2430
+ state.completed = true;
2431
+ state.completedResponse = completedResponse;
2432
+ writeSseData(res, {
2433
+ type: 'response.completed',
2434
+ sequence_number: nextResponsesSequence(state),
2435
+ response: completedResponse
2436
+ });
2437
+ }
2438
+
2439
+ if (!res.writableEnded) {
2440
+ writeSseDone(res);
2441
+ res.end();
2442
+ }
2443
+
2444
+ safeResolve(state.completedResponse || buildCompletedResponsesObjectFromStreamState(state));
2445
+ });
2446
+
2447
+ stream.on('error', (error) => {
2448
+ if (!res.writableEnded) {
2449
+ writeSseData(res, {
2450
+ type: 'error',
2451
+ error: {
2452
+ message: `Claude stream decode error: ${error.message || String(error)}`
2453
+ }
2454
+ });
2455
+ writeSseDone(res);
2456
+ res.end();
2457
+ }
2458
+ safeReject(error);
2459
+ });
2460
+
2461
+ upstreamResponse.on('error', (error) => {
2462
+ if (!res.writableEnded) {
2463
+ writeSseData(res, {
2464
+ type: 'error',
2465
+ error: {
2466
+ message: `Claude stream upstream error: ${error.message || String(error)}`
2467
+ }
2468
+ });
2469
+ writeSseDone(res);
2470
+ res.end();
2471
+ }
2472
+ safeReject(error);
2473
+ });
2474
+ });
2475
+ }
2476
+
2477
+ function buildProxyAbsoluteTargetUrl(baseUrl = '', requestPath = '') {
2478
+ const target = String(resolveOpenCodeTarget(baseUrl, requestPath) || '').trim().replace(/\/+$/, '');
2479
+ if (!target || !/^https?:\/\//i.test(target)) return '';
2480
+ const pathname = String(requestPath || '').trim();
2481
+ const normalizedPath = pathname.startsWith('/') ? pathname : `/${pathname}`;
2482
+ return `${target}${normalizedPath}`;
2483
+ }
2484
+
2485
+ function patchCodexResponsesInstructionsEvent(parsed, originalPayload = {}) {
2486
+ if (!parsed || typeof parsed !== 'object') return parsed;
2487
+ const type = parsed.type;
2488
+ if (type !== 'response.created' && type !== 'response.in_progress' && type !== 'response.completed') {
2489
+ return parsed;
2490
+ }
2491
+ const response = parsed.response;
2492
+ if (!response || typeof response !== 'object') return parsed;
2493
+ if (!Object.prototype.hasOwnProperty.call(response, 'instructions')) return parsed;
2494
+ if (typeof originalPayload.instructions !== 'string') return parsed;
2495
+ return {
2496
+ ...parsed,
2497
+ response: {
2498
+ ...response,
2499
+ instructions: originalPayload.instructions
2500
+ }
2501
+ };
2502
+ }
2503
+
2504
+ async function relayCodexResponsesStream(upstreamResponse, res, originalPayload = {}) {
2505
+ setSseHeaders(res);
2506
+ const stream = createDecodedStream(upstreamResponse);
2507
+
2508
+ return new Promise((resolve, reject) => {
2509
+ let buffer = '';
2510
+ let completedResponse = null;
2511
+ let doneWritten = false;
2512
+ let settled = false;
2513
+
2514
+ const safeResolve = (value) => {
2515
+ if (settled) return;
2516
+ settled = true;
2517
+ resolve(value);
2518
+ };
2519
+
2520
+ const safeReject = (error) => {
2521
+ if (settled) return;
2522
+ settled = true;
2523
+ reject(error);
2524
+ };
2525
+
2526
+ const processSseBlock = (block) => {
2527
+ if (!block || !block.trim()) return;
2528
+ const dataLines = block
2529
+ .split('\n')
2530
+ .map(line => line.trimEnd())
2531
+ .filter(line => line.trim().startsWith('data:'))
2532
+ .map(line => line.replace(/^data:\s?/, ''));
2533
+ if (dataLines.length === 0) return;
2534
+ const payload = dataLines.join('\n').trim();
2535
+ if (!payload) return;
2536
+
2537
+ if (payload === '[DONE]') {
2538
+ if (!doneWritten) {
2539
+ writeSseDone(res);
2540
+ doneWritten = true;
2541
+ }
2542
+ return;
2543
+ }
2544
+
2545
+ let parsed;
2546
+ try {
2547
+ parsed = JSON.parse(payload);
2548
+ } catch {
2549
+ return;
2550
+ }
2551
+ const patched = patchCodexResponsesInstructionsEvent(parsed, originalPayload);
2552
+ if (patched?.type === 'response.completed' && patched?.response && typeof patched.response === 'object') {
2553
+ completedResponse = patched.response;
2554
+ }
2555
+ writeSseData(res, patched);
2556
+ };
2557
+
2558
+ stream.on('data', (chunk) => {
2559
+ buffer += chunk.toString('utf8').replace(/\r\n/g, '\n');
2560
+ let separatorIndex = buffer.indexOf('\n\n');
2561
+ while (separatorIndex >= 0) {
2562
+ const block = buffer.slice(0, separatorIndex);
2563
+ buffer = buffer.slice(separatorIndex + 2);
2564
+ processSseBlock(block);
2565
+ separatorIndex = buffer.indexOf('\n\n');
2566
+ }
2567
+ });
2568
+
2569
+ stream.on('end', () => {
2570
+ if (buffer.trim()) {
2571
+ processSseBlock(buffer);
2572
+ }
2573
+
2574
+ if (!doneWritten && !res.writableEnded) {
2575
+ writeSseDone(res);
2576
+ }
2577
+ if (!res.writableEnded) {
2578
+ res.end();
2579
+ }
2580
+ safeResolve(completedResponse);
2581
+ });
2582
+
2583
+ stream.on('error', (error) => {
2584
+ if (!res.writableEnded) {
2585
+ writeSseData(res, {
2586
+ type: 'error',
2587
+ error: {
2588
+ message: `Codex stream decode error: ${error.message || String(error)}`
2589
+ }
2590
+ });
2591
+ writeSseDone(res);
2592
+ res.end();
2593
+ }
2594
+ safeReject(error);
2595
+ });
2596
+
2597
+ upstreamResponse.on('error', (error) => {
2598
+ if (!res.writableEnded) {
2599
+ writeSseData(res, {
2600
+ type: 'error',
2601
+ error: {
2602
+ message: `Codex stream upstream error: ${error.message || String(error)}`
2603
+ }
2604
+ });
2605
+ writeSseDone(res);
2606
+ res.end();
2607
+ }
2608
+ safeReject(error);
2609
+ });
2610
+ });
2611
+ }
2612
+
2613
+ async function collectCodexResponsesNonStream(upstreamResponse, originalPayload = {}) {
2614
+ const stream = createDecodedStream(upstreamResponse);
2615
+
2616
+ return new Promise((resolve, reject) => {
2617
+ let buffer = '';
2618
+ let rawBuffer = '';
2619
+ let completedResponse = null;
2620
+
2621
+ const processSseBlock = (block) => {
2622
+ if (!block || !block.trim()) return;
2623
+ const dataLines = block
2624
+ .split('\n')
2625
+ .map(line => line.trimEnd())
2626
+ .filter(line => line.trim().startsWith('data:'))
2627
+ .map(line => line.replace(/^data:\s?/, ''));
2628
+ if (dataLines.length === 0) return;
2629
+ const payload = dataLines.join('\n').trim();
2630
+ if (!payload || payload === '[DONE]') return;
2631
+
2632
+ let parsed;
2633
+ try {
2634
+ parsed = JSON.parse(payload);
2635
+ } catch {
2636
+ return;
2637
+ }
2638
+ const patched = patchCodexResponsesInstructionsEvent(parsed, originalPayload);
2639
+ if (patched?.type === 'response.completed' && patched?.response && typeof patched.response === 'object') {
2640
+ completedResponse = patched.response;
2641
+ }
2642
+ };
2643
+
2644
+ stream.on('data', (chunk) => {
2645
+ const textChunk = chunk.toString('utf8').replace(/\r\n/g, '\n');
2646
+ rawBuffer += textChunk;
2647
+ buffer += textChunk;
2648
+ let separatorIndex = buffer.indexOf('\n\n');
2649
+ while (separatorIndex >= 0) {
2650
+ const block = buffer.slice(0, separatorIndex);
2651
+ buffer = buffer.slice(separatorIndex + 2);
2652
+ processSseBlock(block);
2653
+ separatorIndex = buffer.indexOf('\n\n');
2654
+ }
2655
+ });
2656
+
2657
+ stream.on('end', () => {
2658
+ if (buffer.trim()) {
2659
+ processSseBlock(buffer);
2660
+ }
2661
+ if (completedResponse) {
2662
+ resolve(completedResponse);
2663
+ return;
2664
+ }
2665
+
2666
+ const trimmedRaw = rawBuffer.trim();
2667
+ if (trimmedRaw) {
2668
+ try {
2669
+ const parsed = JSON.parse(trimmedRaw);
2670
+ if (parsed?.response && typeof parsed.response === 'object') {
2671
+ resolve(parsed.response);
2672
+ return;
2673
+ }
2674
+ if (parsed && typeof parsed === 'object') {
2675
+ resolve(parsed);
2676
+ return;
2677
+ }
2678
+ } catch {
2679
+ // ignore JSON fallback parse error
2680
+ }
2681
+ }
2682
+ resolve(null);
2683
+ });
2684
+
2685
+ stream.on('error', reject);
2686
+ upstreamResponse.on('error', reject);
2687
+ });
2688
+ }
2689
+
2690
+ async function handleClaudeGatewayRequest(req, res, channel, effectiveKey) {
2691
+ const pathname = getRequestPathname(req.url);
2692
+ if (!isResponsesPath(pathname) && !isChatCompletionsPath(pathname)) {
2693
+ return false;
2694
+ }
2695
+
2696
+ if (!shouldParseJson(req)) {
2697
+ sendOpenAiStyleError(res, 400, 'Claude gateway only supports JSON POST payload');
2698
+ return true;
2699
+ }
2700
+
2701
+ const requestId = `opencode-${Date.now()}-${Math.random()}`;
2702
+ const startTime = Date.now();
2703
+ const originalPayload = (req.body && typeof req.body === 'object') ? req.body : {};
2704
+ const wantsStream = !!originalPayload.stream;
2705
+ const streamResponses = wantsStream && isResponsesPath(pathname);
2706
+ const sessionKey = extractSessionIdFromRequest(req, originalPayload);
2707
+ const sessionScope = normalizeSessionKeyValue(channel?.id || channel?.name || '');
2708
+ const scopedSessionKey = sessionKey && sessionScope
2709
+ ? `${sessionScope}::${sessionKey}`
2710
+ : sessionKey;
2711
+ const preferredUserId = normalizeSessionKeyValue(originalPayload?.metadata?.user_id);
2712
+ const sessionUserId = resolveClaudeUserIdBySession(scopedSessionKey, preferredUserId);
2713
+ const claudePayload = convertOpenCodePayloadToClaude(pathname, originalPayload, channel.model, {
2714
+ sessionUserId
2715
+ });
2716
+ claudePayload.stream = streamResponses;
2717
+
2718
+ const headers = {
2719
+ 'x-api-key': effectiveKey,
2720
+ 'authorization': `Bearer ${effectiveKey}`,
2721
+ 'anthropic-version': '2023-06-01',
2722
+ 'anthropic-beta': CLAUDE_CODE_BETA_HEADER,
2723
+ 'anthropic-dangerous-direct-browser-access': 'true',
2724
+ 'x-app': 'cli',
2725
+ 'x-stainless-helper-method': 'stream',
2726
+ 'x-stainless-retry-count': '0',
2727
+ 'x-stainless-runtime-version': 'v24.3.0',
2728
+ 'x-stainless-package-version': '0.74.0',
2729
+ 'x-stainless-runtime': 'node',
2730
+ 'x-stainless-lang': 'js',
2731
+ 'x-stainless-arch': mapStainlessArch(),
2732
+ 'x-stainless-os': mapStainlessOs(),
2733
+ 'x-stainless-timeout': '600',
2734
+ 'content-type': 'application/json',
2735
+ 'accept': streamResponses ? 'text/event-stream' : 'application/json',
2736
+ 'accept-encoding': 'gzip, deflate, br, zstd',
2737
+ 'connection': 'keep-alive',
2738
+ 'user-agent': CLAUDE_CODE_USER_AGENT
2739
+ };
2740
+
2741
+ if (streamResponses) {
2742
+ let streamUpstream;
2743
+ try {
2744
+ streamUpstream = await postJsonStream(buildClaudeTargetUrl(channel.baseUrl), headers, claudePayload, 120000);
2745
+ } catch (error) {
2746
+ recordFailure(channel.id, 'opencode', error);
2747
+ sendOpenAiStyleError(res, 502, `Claude gateway network error: ${error.message}`, 'proxy_error');
2748
+ return true;
2749
+ }
2750
+
2751
+ const statusCode = Number(streamUpstream.statusCode) || 500;
2752
+ if (statusCode < 200 || statusCode >= 300) {
2753
+ let rawBody = '';
2754
+ try {
2755
+ rawBody = await collectHttpResponseBody(streamUpstream.response);
2756
+ } catch {
2757
+ rawBody = '';
2758
+ }
2759
+
2760
+ let parsedError = null;
2761
+ try {
2762
+ parsedError = rawBody ? JSON.parse(rawBody) : null;
2763
+ } catch {
2764
+ parsedError = null;
2765
+ }
2766
+ const upstreamMessage = parsedError?.error?.message || parsedError?.message || rawBody || `HTTP ${statusCode}`;
2767
+ recordFailure(channel.id, 'opencode', new Error(String(upstreamMessage).slice(0, 200)));
2768
+ sendOpenAiStyleError(res, statusCode, String(upstreamMessage).slice(0, 1000), 'upstream_error');
2769
+ return true;
2770
+ }
2771
+
2772
+ try {
2773
+ const streamedResponseObject = await relayClaudeResponsesStream(streamUpstream.response, res, originalPayload.model || '');
2774
+ publishOpenCodeUsageLog({
2775
+ requestId,
2776
+ channel,
2777
+ model: streamedResponseObject?.model || originalPayload.model || '',
2778
+ usage: streamedResponseObject?.usage || {},
2779
+ startTime
2780
+ });
2781
+ recordSuccess(channel.id, 'opencode');
2782
+ } catch (error) {
2783
+ recordFailure(channel.id, 'opencode', error);
2784
+ if (!res.headersSent) {
2785
+ sendOpenAiStyleError(res, 502, `Claude stream relay error: ${error.message}`, 'proxy_error');
2786
+ }
2787
+ }
2788
+ return true;
2789
+ }
2790
+
2791
+ let upstream;
2792
+ try {
2793
+ upstream = await postJson(buildClaudeTargetUrl(channel.baseUrl), headers, claudePayload, 120000);
2794
+ } catch (error) {
2795
+ recordFailure(channel.id, 'opencode', error);
2796
+ sendOpenAiStyleError(res, 502, `Claude gateway network error: ${error.message}`, 'proxy_error');
2797
+ return true;
2798
+ }
2799
+
2800
+ const statusCode = Number(upstream.statusCode) || 500;
2801
+ let parsedBody = null;
2802
+ try {
2803
+ parsedBody = upstream.rawBody ? JSON.parse(upstream.rawBody) : {};
2804
+ } catch {
2805
+ parsedBody = null;
2806
+ }
2807
+
2808
+ if (statusCode < 200 || statusCode >= 300) {
2809
+ const upstreamMessage = parsedBody?.error?.message || parsedBody?.message || upstream.rawBody || `HTTP ${statusCode}`;
2810
+ recordFailure(channel.id, 'opencode', new Error(String(upstreamMessage).slice(0, 200)));
2811
+ sendOpenAiStyleError(res, statusCode, String(upstreamMessage).slice(0, 1000), 'upstream_error');
2812
+ return true;
2813
+ }
2814
+
2815
+ if (!parsedBody || typeof parsedBody !== 'object') {
2816
+ recordFailure(channel.id, 'opencode', new Error('Invalid Claude gateway response'));
2817
+ sendOpenAiStyleError(res, 502, 'Invalid Claude gateway response', 'proxy_error');
2818
+ return true;
2819
+ }
2820
+
2821
+ if (isResponsesPath(pathname)) {
2822
+ const responseObject = buildOpenAiResponsesObject(parsedBody, originalPayload.model);
2823
+ if (wantsStream) {
2824
+ sendResponsesSse(res, responseObject);
2825
+ } else {
2826
+ res.json(responseObject);
2827
+ }
2828
+ publishOpenCodeUsageLog({
2829
+ requestId,
2830
+ channel,
2831
+ model: responseObject.model,
2832
+ usage: responseObject.usage,
2833
+ startTime
2834
+ });
2835
+ recordSuccess(channel.id, 'opencode');
2836
+ return true;
2837
+ }
2838
+
2839
+ const chatResponseObject = buildOpenAiChatCompletionsObject(parsedBody, originalPayload.model);
2840
+ if (wantsStream) {
2841
+ sendChatCompletionsSse(res, chatResponseObject);
2842
+ } else {
2843
+ res.json(chatResponseObject);
2844
+ }
2845
+ publishOpenCodeUsageLog({
2846
+ requestId,
2847
+ channel,
2848
+ model: chatResponseObject.model,
2849
+ usage: chatResponseObject.usage,
2850
+ startTime
2851
+ });
2852
+ recordSuccess(channel.id, 'opencode');
2853
+ return true;
2854
+ }
2855
+
2856
+ async function handleCodexGatewayRequest(req, res, channel, effectiveKey) {
2857
+ const pathname = getRequestPathname(req.url);
2858
+ if (!isResponsesPath(pathname)) {
2859
+ return false;
2860
+ }
2861
+
2862
+ if (!shouldParseJson(req)) {
2863
+ sendOpenAiStyleError(res, 400, 'Codex gateway only supports JSON POST payload');
2864
+ return true;
2865
+ }
2866
+
2867
+ const requestId = `opencode-${Date.now()}-${Math.random()}`;
2868
+ const startTime = Date.now();
2869
+ const originalPayload = (req.body && typeof req.body === 'object') ? req.body : {};
2870
+ const wantsStream = !!originalPayload.stream;
2871
+ const converted = convertOpenCodePayloadToCodexResponses(originalPayload, channel.model);
2872
+ const targetModel = converted.model;
2873
+
2874
+ if (!targetModel) {
2875
+ sendOpenAiStyleError(res, 400, 'Missing model in request and channel configuration');
2876
+ return true;
2877
+ }
2878
+
2879
+ const targetUrl = buildCodexTargetUrl(channel.baseUrl);
2880
+ if (!targetUrl) {
2881
+ sendOpenAiStyleError(res, 400, 'Failed to build Codex target URL');
2882
+ return true;
2883
+ }
2884
+
2885
+ const codexSessionId = `${Date.now()}-${Math.random().toString(36).slice(2, 15)}`;
2886
+ const promptCacheKey = (typeof converted.requestBody.prompt_cache_key === 'string' && converted.requestBody.prompt_cache_key.trim())
2887
+ ? converted.requestBody.prompt_cache_key.trim()
2888
+ : codexSessionId;
2889
+ converted.requestBody.prompt_cache_key = promptCacheKey;
2890
+
2891
+ const headers = {
2892
+ authorization: `Bearer ${effectiveKey}`,
2893
+ 'openai-beta': 'responses=experimental',
2894
+ accept: 'text/event-stream',
2895
+ 'accept-encoding': 'gzip, deflate, br',
2896
+ connection: 'Keep-Alive',
2897
+ 'content-type': 'application/json',
2898
+ Version: CODEX_CLI_VERSION,
2899
+ Session_id: promptCacheKey,
2900
+ Conversation_id: promptCacheKey,
2901
+ Originator: 'codex_cli_rs',
2902
+ 'user-agent': CODEX_CLI_USER_AGENT
2903
+ };
2904
+
2905
+ let streamUpstream;
2906
+ try {
2907
+ streamUpstream = await postJsonStream(targetUrl, headers, converted.requestBody, 120000);
2908
+ } catch (error) {
2909
+ recordFailure(channel.id, 'opencode', error);
2910
+ sendOpenAiStyleError(res, 502, `Codex gateway network error: ${error.message}`, 'proxy_error');
2911
+ return true;
2912
+ }
2913
+
2914
+ const statusCode = Number(streamUpstream.statusCode) || 500;
2915
+ if (statusCode < 200 || statusCode >= 300) {
2916
+ let rawBody = '';
2917
+ try {
2918
+ rawBody = await collectHttpResponseBody(streamUpstream.response);
2919
+ } catch {
2920
+ rawBody = '';
2921
+ }
2922
+
2923
+ let parsedError = null;
2924
+ try {
2925
+ parsedError = rawBody ? JSON.parse(rawBody) : null;
2926
+ } catch {
2927
+ parsedError = null;
2928
+ }
2929
+
2930
+ const upstreamMessage = parsedError?.error?.message || parsedError?.message || rawBody || `HTTP ${statusCode}`;
2931
+ recordFailure(channel.id, 'opencode', new Error(String(upstreamMessage).slice(0, 200)));
2932
+ sendOpenAiStyleError(res, statusCode, String(upstreamMessage).slice(0, 1000), 'upstream_error');
2933
+ return true;
2934
+ }
2935
+
2936
+ try {
2937
+ if (wantsStream) {
2938
+ const completedResponse = await relayCodexResponsesStream(streamUpstream.response, res, originalPayload);
2939
+ publishOpenCodeUsageLog({
2940
+ requestId,
2941
+ channel,
2942
+ model: completedResponse?.model || targetModel,
2943
+ usage: completedResponse?.usage || {},
2944
+ startTime
2945
+ });
2946
+ recordSuccess(channel.id, 'opencode');
2947
+ return true;
2948
+ }
2949
+
2950
+ const responseObject = await collectCodexResponsesNonStream(streamUpstream.response, originalPayload);
2951
+ if (!responseObject || typeof responseObject !== 'object') {
2952
+ recordFailure(channel.id, 'opencode', new Error('Invalid Codex gateway response'));
2953
+ sendOpenAiStyleError(res, 502, 'Invalid Codex gateway response', 'proxy_error');
2954
+ return true;
2955
+ }
2956
+ res.json(responseObject);
2957
+ publishOpenCodeUsageLog({
2958
+ requestId,
2959
+ channel,
2960
+ model: responseObject.model || targetModel,
2961
+ usage: responseObject.usage || {},
2962
+ startTime
2963
+ });
2964
+ recordSuccess(channel.id, 'opencode');
2965
+ return true;
2966
+ } catch (error) {
2967
+ recordFailure(channel.id, 'opencode', error);
2968
+ if (!res.headersSent) {
2969
+ sendOpenAiStyleError(res, 502, `Codex stream relay error: ${error.message}`, 'proxy_error');
2970
+ }
2971
+ return true;
2972
+ }
2973
+ }
2974
+
2975
+ function createGeminiResponsesStreamState(fallbackModel = '') {
2976
+ return {
2977
+ sequence: 0,
2978
+ responseId: `resp_${Date.now()}_${Math.random().toString(36).slice(2, 10)}`,
2979
+ createdAt: Math.floor(Date.now() / 1000),
2980
+ model: fallbackModel || '',
2981
+ inputTokens: 0,
2982
+ outputTokens: 0,
2983
+ totalTokens: 0,
2984
+ cachedTokens: 0,
2985
+ reasoningTokens: 0,
2986
+ usageSeen: false,
2987
+ started: false,
2988
+ completed: false,
2989
+ completedResponse: null,
2990
+ itemTypeByIndex: new Map(),
2991
+ messageIdByIndex: new Map(),
2992
+ messageTextByIndex: new Map(),
2993
+ functionCallIdByIndex: new Map(),
2994
+ functionNameByIndex: new Map(),
2995
+ functionArgsByIndex: new Map(),
2996
+ reasoningIdByIndex: new Map(),
2997
+ reasoningTextByIndex: new Map()
2998
+ };
2999
+ }
3000
+
3001
+ function ensureGeminiResponsesStarted(state, res) {
3002
+ if (state.started) return;
3003
+ state.started = true;
3004
+
3005
+ const inProgressResponse = {
3006
+ id: state.responseId,
3007
+ object: 'response',
3008
+ created_at: state.createdAt,
3009
+ model: state.model,
3010
+ status: 'in_progress'
3011
+ };
3012
+
3013
+ writeSseData(res, {
3014
+ type: 'response.created',
3015
+ sequence_number: nextResponsesSequence(state),
3016
+ response: inProgressResponse
3017
+ });
3018
+
3019
+ writeSseData(res, {
3020
+ type: 'response.in_progress',
3021
+ sequence_number: nextResponsesSequence(state),
3022
+ response: inProgressResponse
3023
+ });
3024
+ }
3025
+
3026
+ function mergeGeminiStreamText(previousValue, incomingValue) {
3027
+ const previous = typeof previousValue === 'string' ? previousValue : '';
3028
+ const incoming = typeof incomingValue === 'string' ? incomingValue : '';
3029
+ if (!incoming) return previous;
3030
+ if (!previous) return incoming;
3031
+ if (incoming.startsWith(previous)) return incoming;
3032
+ return `${previous}${incoming}`;
3033
+ }
3034
+
3035
+ function mergeGeminiStreamArguments(previousValue, incomingValue) {
3036
+ const previous = typeof previousValue === 'string' ? previousValue : '';
3037
+ const incoming = typeof incomingValue === 'string' ? incomingValue : '';
3038
+ if (!incoming) return previous;
3039
+ if (!previous) return incoming;
3040
+ if (incoming.startsWith(previous)) return incoming;
3041
+ if (previous.startsWith(incoming)) return previous;
3042
+
3043
+ const trimmed = incoming.trim();
3044
+ if (trimmed.startsWith('{') && trimmed.endsWith('}')) {
3045
+ return incoming;
3046
+ }
3047
+ return `${previous}${incoming}`;
3048
+ }
3049
+
3050
+ function computeIncrementalDelta(previousValue, nextValue) {
3051
+ const previous = typeof previousValue === 'string' ? previousValue : '';
3052
+ const next = typeof nextValue === 'string' ? nextValue : '';
3053
+ if (!next || next === previous) return '';
3054
+ if (!previous) return next;
3055
+ if (next.startsWith(previous)) return next.slice(previous.length);
3056
+ return next;
3057
+ }
3058
+
3059
+ function applyGeminiUsageMetadataToStreamState(parsed, state) {
3060
+ const usageMetadata = (parsed?.usageMetadata && typeof parsed.usageMetadata === 'object')
3061
+ ? parsed.usageMetadata
3062
+ : null;
3063
+ if (!usageMetadata) return;
3064
+
3065
+ if (Number.isFinite(Number(usageMetadata.promptTokenCount))) {
3066
+ state.inputTokens = Number(usageMetadata.promptTokenCount);
3067
+ state.usageSeen = true;
3068
+ }
3069
+ if (Number.isFinite(Number(usageMetadata.candidatesTokenCount))) {
3070
+ state.outputTokens = Number(usageMetadata.candidatesTokenCount);
3071
+ state.usageSeen = true;
3072
+ }
3073
+ if (Number.isFinite(Number(usageMetadata.totalTokenCount))) {
3074
+ state.totalTokens = Number(usageMetadata.totalTokenCount);
3075
+ state.usageSeen = true;
3076
+ } else if (state.usageSeen) {
3077
+ state.totalTokens = state.inputTokens + state.outputTokens;
3078
+ }
3079
+ if (Number.isFinite(Number(usageMetadata.cachedContentTokenCount))) {
3080
+ state.cachedTokens = Number(usageMetadata.cachedContentTokenCount);
3081
+ state.usageSeen = true;
3082
+ }
3083
+ if (Number.isFinite(Number(usageMetadata.thoughtsTokenCount))) {
3084
+ state.reasoningTokens = Number(usageMetadata.thoughtsTokenCount);
3085
+ state.usageSeen = true;
3086
+ }
3087
+ }
3088
+
3089
+ function processGeminiResponsesSseEvent(parsed, state, res) {
3090
+ if (!parsed || typeof parsed !== 'object') return;
3091
+ if (typeof parsed.modelVersion === 'string' && parsed.modelVersion.trim()) {
3092
+ state.model = parsed.modelVersion;
3093
+ }
3094
+ ensureGeminiResponsesStarted(state, res);
3095
+ applyGeminiUsageMetadataToStreamState(parsed, state);
3096
+
3097
+ const firstCandidate = Array.isArray(parsed.candidates)
3098
+ ? parsed.candidates.find(candidate => candidate && typeof candidate === 'object')
3099
+ : null;
3100
+ if (!firstCandidate) return;
3101
+
3102
+ const parts = Array.isArray(firstCandidate.content?.parts) ? firstCandidate.content.parts : [];
3103
+ parts.forEach((part, index) => {
3104
+ if (!part || typeof part !== 'object') return;
3105
+
3106
+ if (part.functionCall && typeof part.functionCall === 'object') {
3107
+ const existingType = state.itemTypeByIndex.get(index);
3108
+ if (existingType && existingType !== 'function_call') return;
3109
+ state.itemTypeByIndex.set(index, 'function_call');
3110
+
3111
+ const functionCall = part.functionCall;
3112
+ const callId = String(
3113
+ state.functionCallIdByIndex.get(index)
3114
+ || functionCall.id
3115
+ || functionCall.callId
3116
+ || `call_${index + 1}`
3117
+ );
3118
+ const name = typeof functionCall.name === 'string'
3119
+ ? functionCall.name
3120
+ : (state.functionNameByIndex.get(index) || '');
3121
+ const argsObject = (functionCall.args && typeof functionCall.args === 'object' && !Array.isArray(functionCall.args))
3122
+ ? functionCall.args
3123
+ : {};
3124
+ const argsString = JSON.stringify(argsObject);
3125
+ const previousArgs = state.functionArgsByIndex.get(index) || '';
3126
+ const mergedArgs = mergeGeminiStreamArguments(previousArgs, argsString);
3127
+ const delta = computeIncrementalDelta(previousArgs, mergedArgs);
3128
+
3129
+ if (!state.functionCallIdByIndex.has(index)) {
3130
+ writeSseData(res, {
3131
+ type: 'response.output_item.added',
3132
+ sequence_number: nextResponsesSequence(state),
3133
+ output_index: index,
3134
+ item: {
3135
+ id: `fc_${callId}`,
3136
+ type: 'function_call',
3137
+ status: 'in_progress',
3138
+ arguments: '',
3139
+ call_id: callId,
3140
+ name
3141
+ }
3142
+ });
3143
+ }
3144
+
3145
+ if (delta) {
3146
+ writeSseData(res, {
3147
+ type: 'response.function_call_arguments.delta',
3148
+ sequence_number: nextResponsesSequence(state),
3149
+ item_id: `fc_${callId}`,
3150
+ output_index: index,
3151
+ delta
3152
+ });
3153
+ }
3154
+
3155
+ state.functionCallIdByIndex.set(index, callId);
3156
+ state.functionNameByIndex.set(index, name);
3157
+ state.functionArgsByIndex.set(index, mergedArgs);
3158
+ return;
3159
+ }
3160
+
3161
+ if (typeof part.text !== 'string' || !part.text) {
3162
+ return;
3163
+ }
3164
+
3165
+ if (part.thought === true) {
3166
+ const existingType = state.itemTypeByIndex.get(index);
3167
+ if (existingType && existingType !== 'reasoning') return;
3168
+ state.itemTypeByIndex.set(index, 'reasoning');
3169
+
3170
+ const reasoningId = state.reasoningIdByIndex.get(index) || `rs_${state.responseId}_${index}`;
3171
+ const previousText = state.reasoningTextByIndex.get(index) || '';
3172
+ const mergedText = mergeGeminiStreamText(previousText, part.text);
3173
+ const delta = computeIncrementalDelta(previousText, mergedText);
3174
+
3175
+ if (!state.reasoningIdByIndex.has(index)) {
3176
+ writeSseData(res, {
3177
+ type: 'response.output_item.added',
3178
+ sequence_number: nextResponsesSequence(state),
3179
+ output_index: index,
3180
+ item: {
3181
+ id: reasoningId,
3182
+ type: 'reasoning',
3183
+ status: 'in_progress',
3184
+ summary: []
3185
+ }
3186
+ });
3187
+
3188
+ writeSseData(res, {
3189
+ type: 'response.reasoning_summary_part.added',
3190
+ sequence_number: nextResponsesSequence(state),
3191
+ item_id: reasoningId,
3192
+ output_index: index,
3193
+ summary_index: 0,
3194
+ part: {
3195
+ type: 'summary_text',
3196
+ text: ''
3197
+ }
3198
+ });
3199
+ }
3200
+
3201
+ if (delta) {
3202
+ writeSseData(res, {
3203
+ type: 'response.reasoning_summary_text.delta',
3204
+ sequence_number: nextResponsesSequence(state),
3205
+ item_id: reasoningId,
3206
+ output_index: index,
3207
+ summary_index: 0,
3208
+ delta
3209
+ });
3210
+ }
3211
+
3212
+ state.reasoningIdByIndex.set(index, reasoningId);
3213
+ state.reasoningTextByIndex.set(index, mergedText);
3214
+ return;
3215
+ }
3216
+
3217
+ const existingType = state.itemTypeByIndex.get(index);
3218
+ if (existingType && existingType !== 'message') return;
3219
+ state.itemTypeByIndex.set(index, 'message');
3220
+
3221
+ const messageId = state.messageIdByIndex.get(index) || `msg_${state.responseId}_${index}`;
3222
+ const previousText = state.messageTextByIndex.get(index) || '';
3223
+ const mergedText = mergeGeminiStreamText(previousText, part.text);
3224
+ const delta = computeIncrementalDelta(previousText, mergedText);
3225
+
3226
+ if (!state.messageIdByIndex.has(index)) {
3227
+ writeSseData(res, {
3228
+ type: 'response.output_item.added',
3229
+ sequence_number: nextResponsesSequence(state),
3230
+ output_index: index,
3231
+ item: {
3232
+ id: messageId,
3233
+ type: 'message',
3234
+ status: 'in_progress',
3235
+ role: 'assistant',
3236
+ content: []
3237
+ }
3238
+ });
3239
+
3240
+ writeSseData(res, {
3241
+ type: 'response.content_part.added',
3242
+ sequence_number: nextResponsesSequence(state),
3243
+ item_id: messageId,
3244
+ output_index: index,
3245
+ content_index: 0,
3246
+ part: {
3247
+ type: 'output_text',
3248
+ text: '',
3249
+ annotations: [],
3250
+ logprobs: []
3251
+ }
3252
+ });
3253
+ }
3254
+
3255
+ if (delta) {
3256
+ writeSseData(res, {
3257
+ type: 'response.output_text.delta',
3258
+ sequence_number: nextResponsesSequence(state),
3259
+ item_id: messageId,
3260
+ output_index: index,
3261
+ content_index: 0,
3262
+ delta,
3263
+ logprobs: []
3264
+ });
3265
+ }
3266
+
3267
+ state.messageIdByIndex.set(index, messageId);
3268
+ state.messageTextByIndex.set(index, mergedText);
3269
+ });
3270
+ }
3271
+
3272
+ function buildCompletedResponsesObjectFromGeminiStreamState(state) {
3273
+ const output = [];
3274
+ sortedNumericKeys(state.itemTypeByIndex).forEach(index => {
3275
+ const itemType = state.itemTypeByIndex.get(index);
3276
+ if (itemType === 'message') {
3277
+ const messageId = state.messageIdByIndex.get(index) || `msg_${state.responseId}_${index}`;
3278
+ const text = state.messageTextByIndex.get(index) || '';
3279
+ output.push({
3280
+ id: messageId,
3281
+ type: 'message',
3282
+ status: 'completed',
3283
+ role: 'assistant',
3284
+ content: [
3285
+ {
3286
+ type: 'output_text',
3287
+ text,
3288
+ annotations: []
3289
+ }
3290
+ ]
3291
+ });
3292
+ return;
3293
+ }
3294
+
3295
+ if (itemType === 'reasoning') {
3296
+ const reasoningId = state.reasoningIdByIndex.get(index) || `rs_${state.responseId}_${index}`;
3297
+ const text = state.reasoningTextByIndex.get(index) || '';
3298
+ output.push({
3299
+ id: reasoningId,
3300
+ type: 'reasoning',
3301
+ summary: [
3302
+ {
3303
+ type: 'summary_text',
3304
+ text
3305
+ }
3306
+ ]
3307
+ });
3308
+ return;
3309
+ }
3310
+
3311
+ if (itemType === 'function_call') {
3312
+ const callId = state.functionCallIdByIndex.get(index) || `call_${index + 1}`;
3313
+ const name = state.functionNameByIndex.get(index) || '';
3314
+ const args = normalizeFunctionArgumentsString(state.functionArgsByIndex.get(index));
3315
+ output.push({
3316
+ id: `fc_${callId}`,
3317
+ type: 'function_call',
3318
+ status: 'completed',
3319
+ arguments: args,
3320
+ call_id: callId,
3321
+ name
3322
+ });
3323
+ }
3324
+ });
3325
+
3326
+ if (output.length === 0) {
3327
+ output.push({
3328
+ id: `msg_${state.responseId}_0`,
3329
+ type: 'message',
3330
+ status: 'completed',
3331
+ role: 'assistant',
3332
+ content: [
3333
+ {
3334
+ type: 'output_text',
3335
+ text: '',
3336
+ annotations: []
3337
+ }
3338
+ ]
3339
+ });
3340
+ }
3341
+
3342
+ const estimatedReasoningTokens = sortedNumericKeys(state.reasoningTextByIndex)
3343
+ .map(index => state.reasoningTextByIndex.get(index) || '')
3344
+ .reduce((acc, text) => acc + Math.floor(text.length / 4), 0);
3345
+ const reasoningTokens = state.reasoningTokens > 0 ? state.reasoningTokens : estimatedReasoningTokens;
3346
+ const totalTokens = state.totalTokens > 0
3347
+ ? state.totalTokens
3348
+ : Number(state.inputTokens || 0) + Number(state.outputTokens || 0);
3349
+
3350
+ const response = {
3351
+ id: state.responseId,
3352
+ object: 'response',
3353
+ created_at: state.createdAt,
3354
+ status: 'completed',
3355
+ model: state.model || '',
3356
+ output
3357
+ };
3358
+
3359
+ if (state.usageSeen || totalTokens > 0 || state.cachedTokens > 0 || reasoningTokens > 0) {
3360
+ response.usage = {
3361
+ input_tokens: Number(state.inputTokens || 0),
3362
+ output_tokens: Number(state.outputTokens || 0),
3363
+ total_tokens: totalTokens
3364
+ };
3365
+ if (state.cachedTokens > 0) {
3366
+ response.usage.input_tokens_details = {
3367
+ cached_tokens: Number(state.cachedTokens || 0)
3368
+ };
3369
+ }
3370
+ if (reasoningTokens > 0) {
3371
+ response.usage.output_tokens_details = {
3372
+ reasoning_tokens: Number(reasoningTokens || 0)
3373
+ };
3374
+ }
3375
+ }
3376
+
3377
+ return response;
3378
+ }
3379
+
3380
+ function finalizeGeminiResponsesStream(state, res) {
3381
+ if (state.completed) {
3382
+ return state.completedResponse || buildCompletedResponsesObjectFromGeminiStreamState(state);
3383
+ }
3384
+
3385
+ ensureGeminiResponsesStarted(state, res);
3386
+ sortedNumericKeys(state.itemTypeByIndex).forEach(index => {
3387
+ const itemType = state.itemTypeByIndex.get(index);
3388
+ if (itemType === 'message') {
3389
+ const messageId = state.messageIdByIndex.get(index) || `msg_${state.responseId}_${index}`;
3390
+ const text = state.messageTextByIndex.get(index) || '';
3391
+ writeSseData(res, {
3392
+ type: 'response.output_text.done',
3393
+ sequence_number: nextResponsesSequence(state),
3394
+ item_id: messageId,
3395
+ output_index: index,
3396
+ content_index: 0,
3397
+ text,
3398
+ logprobs: []
3399
+ });
3400
+
3401
+ writeSseData(res, {
3402
+ type: 'response.content_part.done',
3403
+ sequence_number: nextResponsesSequence(state),
3404
+ item_id: messageId,
3405
+ output_index: index,
3406
+ content_index: 0,
3407
+ part: {
3408
+ type: 'output_text',
3409
+ text,
3410
+ annotations: [],
3411
+ logprobs: []
3412
+ }
3413
+ });
3414
+
3415
+ writeSseData(res, {
3416
+ type: 'response.output_item.done',
3417
+ sequence_number: nextResponsesSequence(state),
3418
+ output_index: index,
3419
+ item: {
3420
+ id: messageId,
3421
+ type: 'message',
3422
+ status: 'completed',
3423
+ role: 'assistant',
3424
+ content: [
3425
+ {
3426
+ type: 'output_text',
3427
+ text,
3428
+ annotations: []
3429
+ }
3430
+ ]
3431
+ }
3432
+ });
3433
+ return;
3434
+ }
3435
+
3436
+ if (itemType === 'reasoning') {
3437
+ const reasoningId = state.reasoningIdByIndex.get(index) || `rs_${state.responseId}_${index}`;
3438
+ const text = state.reasoningTextByIndex.get(index) || '';
3439
+
3440
+ writeSseData(res, {
3441
+ type: 'response.reasoning_summary_text.done',
3442
+ sequence_number: nextResponsesSequence(state),
3443
+ item_id: reasoningId,
3444
+ output_index: index,
3445
+ summary_index: 0,
3446
+ text
3447
+ });
3448
+
3449
+ writeSseData(res, {
3450
+ type: 'response.reasoning_summary_part.done',
3451
+ sequence_number: nextResponsesSequence(state),
3452
+ item_id: reasoningId,
3453
+ output_index: index,
3454
+ summary_index: 0,
3455
+ part: {
3456
+ type: 'summary_text',
3457
+ text
3458
+ }
3459
+ });
3460
+
3461
+ writeSseData(res, {
3462
+ type: 'response.output_item.done',
3463
+ sequence_number: nextResponsesSequence(state),
3464
+ output_index: index,
3465
+ item: {
3466
+ id: reasoningId,
3467
+ type: 'reasoning',
3468
+ status: 'completed',
3469
+ summary: [
3470
+ {
3471
+ type: 'summary_text',
3472
+ text
3473
+ }
3474
+ ]
3475
+ }
3476
+ });
3477
+ return;
3478
+ }
3479
+
3480
+ if (itemType === 'function_call') {
3481
+ const callId = state.functionCallIdByIndex.get(index) || `call_${index + 1}`;
3482
+ const name = state.functionNameByIndex.get(index) || '';
3483
+ const args = normalizeFunctionArgumentsString(state.functionArgsByIndex.get(index));
3484
+
3485
+ writeSseData(res, {
3486
+ type: 'response.function_call_arguments.done',
3487
+ sequence_number: nextResponsesSequence(state),
3488
+ item_id: `fc_${callId}`,
3489
+ output_index: index,
3490
+ arguments: args
3491
+ });
3492
+
3493
+ writeSseData(res, {
3494
+ type: 'response.output_item.done',
3495
+ sequence_number: nextResponsesSequence(state),
3496
+ output_index: index,
3497
+ item: {
3498
+ id: `fc_${callId}`,
3499
+ type: 'function_call',
3500
+ status: 'completed',
3501
+ arguments: args,
3502
+ call_id: callId,
3503
+ name
3504
+ }
3505
+ });
3506
+ }
3507
+ });
3508
+
3509
+ const completedResponse = buildCompletedResponsesObjectFromGeminiStreamState(state);
3510
+ state.completed = true;
3511
+ state.completedResponse = completedResponse;
3512
+ writeSseData(res, {
3513
+ type: 'response.completed',
3514
+ sequence_number: nextResponsesSequence(state),
3515
+ response: completedResponse
3516
+ });
3517
+ return completedResponse;
3518
+ }
3519
+
3520
+ async function relayGeminiResponsesStream(upstreamResponse, res, fallbackModel = '') {
3521
+ setSseHeaders(res);
3522
+ const state = createGeminiResponsesStreamState(fallbackModel);
3523
+ const stream = createDecodedStream(upstreamResponse);
3524
+
3525
+ return new Promise((resolve, reject) => {
3526
+ let buffer = '';
3527
+ let settled = false;
3528
+
3529
+ const safeResolve = (value) => {
3530
+ if (settled) return;
3531
+ settled = true;
3532
+ resolve(value);
3533
+ };
3534
+
3535
+ const safeReject = (error) => {
3536
+ if (settled) return;
3537
+ settled = true;
3538
+ reject(error);
3539
+ };
3540
+
3541
+ const processSseBlock = (block) => {
3542
+ if (!block || !block.trim()) return;
3543
+ const dataLines = block
3544
+ .split('\n')
3545
+ .map(line => line.trimEnd())
3546
+ .filter(line => line.trim().startsWith('data:'))
3547
+ .map(line => line.replace(/^data:\s?/, ''));
3548
+ if (dataLines.length === 0) return;
3549
+ const payload = dataLines.join('\n').trim();
3550
+ if (!payload) return;
3551
+
3552
+ if (payload === '[DONE]') {
3553
+ finalizeGeminiResponsesStream(state, res);
3554
+ return;
3555
+ }
3556
+
3557
+ let parsed;
3558
+ try {
3559
+ parsed = JSON.parse(payload);
3560
+ } catch {
3561
+ return;
3562
+ }
3563
+
3564
+ if (Array.isArray(parsed)) {
3565
+ parsed.forEach(item => processGeminiResponsesSseEvent(item, state, res));
3566
+ return;
3567
+ }
3568
+ processGeminiResponsesSseEvent(parsed, state, res);
3569
+ };
3570
+
3571
+ stream.on('data', (chunk) => {
3572
+ buffer += chunk.toString('utf8').replace(/\r\n/g, '\n');
3573
+ let separatorIndex = buffer.indexOf('\n\n');
3574
+ while (separatorIndex >= 0) {
3575
+ const block = buffer.slice(0, separatorIndex);
3576
+ buffer = buffer.slice(separatorIndex + 2);
3577
+ processSseBlock(block);
3578
+ separatorIndex = buffer.indexOf('\n\n');
3579
+ }
3580
+ });
3581
+
3582
+ stream.on('end', () => {
3583
+ if (buffer.trim()) {
3584
+ processSseBlock(buffer);
3585
+ }
3586
+
3587
+ if (!state.completed) {
3588
+ finalizeGeminiResponsesStream(state, res);
3589
+ }
3590
+
3591
+ if (!res.writableEnded) {
3592
+ writeSseDone(res);
3593
+ res.end();
3594
+ }
3595
+
3596
+ safeResolve(state.completedResponse || buildCompletedResponsesObjectFromGeminiStreamState(state));
3597
+ });
3598
+
3599
+ stream.on('error', (error) => {
3600
+ if (!res.writableEnded) {
3601
+ writeSseData(res, {
3602
+ type: 'error',
3603
+ error: {
3604
+ message: `Gemini stream decode error: ${error.message || String(error)}`
3605
+ }
3606
+ });
3607
+ writeSseDone(res);
3608
+ res.end();
3609
+ }
3610
+ safeReject(error);
3611
+ });
3612
+
3613
+ upstreamResponse.on('error', (error) => {
3614
+ if (!res.writableEnded) {
3615
+ writeSseData(res, {
3616
+ type: 'error',
3617
+ error: {
3618
+ message: `Gemini stream upstream error: ${error.message || String(error)}`
3619
+ }
3620
+ });
3621
+ writeSseDone(res);
3622
+ res.end();
3623
+ }
3624
+ safeReject(error);
3625
+ });
3626
+ });
3627
+ }
3628
+
3629
+ async function handleGeminiGatewayRequest(req, res, channel, effectiveKey) {
3630
+ const pathname = getRequestPathname(req.url);
3631
+ if (!isResponsesPath(pathname) && !isChatCompletionsPath(pathname)) {
3632
+ return false;
3633
+ }
3634
+
3635
+ if (!shouldParseJson(req)) {
3636
+ sendOpenAiStyleError(res, 400, 'Gemini gateway only supports JSON POST payload');
3637
+ return true;
3638
+ }
3639
+
3640
+ const requestId = `opencode-${Date.now()}-${Math.random()}`;
3641
+ const startTime = Date.now();
3642
+ const originalPayload = (req.body && typeof req.body === 'object') ? req.body : {};
3643
+ const wantsStream = !!originalPayload.stream;
3644
+ const streamResponses = wantsStream && isResponsesPath(pathname);
3645
+ const converted = convertOpenCodePayloadToGemini(pathname, originalPayload, channel.model);
3646
+ const targetModel = converted.model;
3647
+ const useGeminiCli = shouldUseGeminiCliFormat(channel.baseUrl);
3648
+
3649
+ if (!targetModel) {
3650
+ sendOpenAiStyleError(res, 400, 'Missing model in request and channel configuration');
3651
+ return true;
3652
+ }
3653
+
3654
+ const targetUrl = buildGeminiTargetUrl(channel.baseUrl, targetModel, effectiveKey, {
3655
+ stream: streamResponses,
3656
+ useCli: useGeminiCli
3657
+ });
3658
+ if (!targetUrl) {
3659
+ sendOpenAiStyleError(res, 400, 'Failed to build Gemini target URL');
3660
+ return true;
3661
+ }
3662
+
3663
+ const geminiPayload = useGeminiCli
3664
+ ? {
3665
+ project: '',
3666
+ model: targetModel,
3667
+ request: converted.requestBody
3668
+ }
3669
+ : converted.requestBody;
3670
+
3671
+ const headers = useGeminiCli
3672
+ ? {
3673
+ 'x-goog-api-key': effectiveKey,
3674
+ 'authorization': `Bearer ${effectiveKey}`,
3675
+ 'content-type': 'application/json',
3676
+ 'accept': streamResponses ? 'text/event-stream' : 'application/json',
3677
+ 'accept-encoding': 'gzip, deflate, br',
3678
+ 'user-agent': GEMINI_CLI_USER_AGENT,
3679
+ 'x-goog-api-client': GEMINI_CLI_API_CLIENT,
3680
+ 'client-metadata': GEMINI_CLI_CLIENT_METADATA
3681
+ }
3682
+ : {
3683
+ 'x-goog-api-key': effectiveKey,
3684
+ 'authorization': `Bearer ${effectiveKey}`,
3685
+ 'content-type': 'application/json',
3686
+ 'accept': streamResponses ? 'text/event-stream' : 'application/json',
3687
+ 'accept-encoding': 'gzip, deflate, br',
3688
+ 'user-agent': 'google-genai-sdk/0.8.0'
3689
+ };
3690
+
3691
+ if (streamResponses) {
3692
+ let streamUpstream;
3693
+ try {
3694
+ streamUpstream = await postJsonStream(targetUrl, headers, geminiPayload, 120000);
3695
+ } catch (error) {
3696
+ recordFailure(channel.id, 'opencode', error);
3697
+ sendOpenAiStyleError(res, 502, `Gemini gateway network error: ${error.message}`, 'proxy_error');
3698
+ return true;
3699
+ }
3700
+
3701
+ const statusCode = Number(streamUpstream.statusCode) || 500;
3702
+ if (statusCode < 200 || statusCode >= 300) {
3703
+ let rawBody = '';
3704
+ try {
3705
+ rawBody = await collectHttpResponseBody(streamUpstream.response);
3706
+ } catch {
3707
+ rawBody = '';
3708
+ }
3709
+
3710
+ let parsedError = null;
3711
+ try {
3712
+ parsedError = rawBody ? JSON.parse(rawBody) : null;
3713
+ } catch {
3714
+ parsedError = null;
3715
+ }
3716
+ const upstreamMessage = parsedError?.error?.message || parsedError?.message || rawBody || `HTTP ${statusCode}`;
3717
+ recordFailure(channel.id, 'opencode', new Error(String(upstreamMessage).slice(0, 200)));
3718
+ sendOpenAiStyleError(res, statusCode, String(upstreamMessage).slice(0, 1000), 'upstream_error');
3719
+ return true;
3720
+ }
3721
+
3722
+ try {
3723
+ const streamedResponseObject = await relayGeminiResponsesStream(streamUpstream.response, res, originalPayload.model || targetModel);
3724
+ publishOpenCodeUsageLog({
3725
+ requestId,
3726
+ channel,
3727
+ model: streamedResponseObject?.model || originalPayload.model || targetModel || '',
3728
+ usage: streamedResponseObject?.usage || {},
3729
+ startTime
3730
+ });
3731
+ recordSuccess(channel.id, 'opencode');
3732
+ } catch (error) {
3733
+ recordFailure(channel.id, 'opencode', error);
3734
+ if (!res.headersSent) {
3735
+ sendOpenAiStyleError(res, 502, `Gemini stream relay error: ${error.message}`, 'proxy_error');
3736
+ }
3737
+ }
3738
+ return true;
3739
+ }
3740
+
3741
+ let upstream;
3742
+ try {
3743
+ upstream = await postJson(targetUrl, headers, geminiPayload, 120000);
3744
+ } catch (error) {
3745
+ recordFailure(channel.id, 'opencode', error);
3746
+ sendOpenAiStyleError(res, 502, `Gemini gateway network error: ${error.message}`, 'proxy_error');
3747
+ return true;
3748
+ }
3749
+
3750
+ const statusCode = Number(upstream.statusCode) || 500;
3751
+ let parsedBody = null;
3752
+ try {
3753
+ parsedBody = upstream.rawBody ? JSON.parse(upstream.rawBody) : {};
3754
+ } catch {
3755
+ parsedBody = null;
3756
+ }
3757
+
3758
+ if (statusCode < 200 || statusCode >= 300) {
3759
+ const upstreamMessage = parsedBody?.error?.message || parsedBody?.message || upstream.rawBody || `HTTP ${statusCode}`;
3760
+ recordFailure(channel.id, 'opencode', new Error(String(upstreamMessage).slice(0, 200)));
3761
+ sendOpenAiStyleError(res, statusCode, String(upstreamMessage).slice(0, 1000), 'upstream_error');
3762
+ return true;
3763
+ }
3764
+
3765
+ if (!parsedBody || typeof parsedBody !== 'object') {
3766
+ recordFailure(channel.id, 'opencode', new Error('Invalid Gemini gateway response'));
3767
+ sendOpenAiStyleError(res, 502, 'Invalid Gemini gateway response', 'proxy_error');
3768
+ return true;
3769
+ }
3770
+
3771
+ if (isResponsesPath(pathname)) {
3772
+ const responseObject = buildOpenAiResponsesObjectFromGemini(parsedBody, targetModel);
3773
+ res.json(responseObject);
3774
+ publishOpenCodeUsageLog({
3775
+ requestId,
3776
+ channel,
3777
+ model: responseObject.model,
3778
+ usage: responseObject.usage,
3779
+ startTime
3780
+ });
3781
+ recordSuccess(channel.id, 'opencode');
3782
+ return true;
3783
+ }
3784
+
3785
+ const chatResponseObject = buildOpenAiChatCompletionsObjectFromGemini(parsedBody, targetModel);
3786
+ if (wantsStream) {
3787
+ sendChatCompletionsSse(res, chatResponseObject);
3788
+ } else {
3789
+ res.json(chatResponseObject);
3790
+ }
3791
+ publishOpenCodeUsageLog({
3792
+ requestId,
3793
+ channel,
3794
+ model: chatResponseObject.model,
3795
+ usage: chatResponseObject.usage,
3796
+ startTime
3797
+ });
3798
+ recordSuccess(channel.id, 'opencode');
3799
+ return true;
3800
+ }
3801
+
3802
+ async function collectProxyModelList(channels = [], options = {}) {
3803
+ const seen = new Set();
3804
+ const models = [];
3805
+
3806
+ const add = (value) => {
3807
+ if (typeof value !== 'string') return;
3808
+ const trimmed = value.trim();
3809
+ if (!trimmed) return;
3810
+ const key = trimmed.toLowerCase();
3811
+ if (seen.has(key)) return;
3812
+ seen.add(key);
3813
+ models.push(trimmed);
3814
+ };
3815
+
3816
+ const forceRefresh = options.forceRefresh === true;
3817
+ // 模型列表聚合改为串行探测,避免并发触发上游会话窗口限流
3818
+ for (const channel of channels) {
3819
+ try {
3820
+ // eslint-disable-next-line no-await-in-loop
3821
+ const listResult = await fetchModelsFromProvider(channel, 'openai_compatible', { forceRefresh });
3822
+ const listedModels = Array.isArray(listResult?.models) ? listResult.models : [];
3823
+ if (listedModels.length > 0) {
3824
+ listedModels.forEach(add);
3825
+ continue;
3826
+ }
3827
+
3828
+ const shouldProbeByDefault = !!listResult?.disabledByConfig;
3829
+
3830
+ // 默认仅入口转换器渠道执行模型探测;若已禁用 /v1/models 则对全部渠道启用默认探测
3831
+ if (!shouldProbeByDefault && !isConverterPresetChannel(channel)) {
3832
+ continue;
3833
+ }
3834
+
3835
+ const channelType = normalizeGatewaySourceType(channel);
3836
+ // eslint-disable-next-line no-await-in-loop
3837
+ const probe = await probeModelAvailability(channel, channelType, {
3838
+ forceRefresh,
3839
+ stopOnFirstAvailable: false,
3840
+ preferredModels: collectPreferredProbeModels(channel)
3841
+ });
3842
+ const available = Array.isArray(probe?.availableModels) ? probe.availableModels : [];
3843
+ available.forEach(add);
3844
+ } catch (err) {
3845
+ console.warn(`[OpenCode Proxy] Build model list failed for ${channel?.name || channel?.id || 'unknown'}:`, err.message);
3846
+ }
3847
+ }
3848
+
3849
+ return models;
3850
+ }
3851
+
3852
+ // 启动 OpenCode 代理服务器
3853
+ async function startOpenCodeProxyServer(options = {}) {
3854
+ // 兼容旧调用:startOpenCodeProxyServer(portNumber)
3855
+ if (typeof options === 'number') {
3856
+ options = { port: options };
3857
+ }
3858
+
3859
+ // options.preserveStartTime - 是否保留现有的启动时间(用于切换渠道时)
3860
+ const preserveStartTime = options.preserveStartTime || false;
3861
+
3862
+ if (proxyServer) {
3863
+ console.log('OpenCode proxy server already running on port', currentPort);
3864
+ return { success: true, port: currentPort };
3865
+ }
3866
+
3867
+ try {
3868
+ const config = loadConfig();
3869
+ const configuredPort = config.ports?.opencodeProxy || 20091;
3870
+ const port = options.port !== undefined ? Number(options.port) : configuredPort;
3871
+
3872
+ if (!Number.isFinite(port) || port < 0) {
3873
+ throw new Error(`Invalid proxy port: ${options.port}`);
3874
+ }
3875
+
3876
+ currentPort = port;
3877
+
3878
+ proxyApp = express();
3879
+
3880
+ proxyApp.use((req, res, next) => {
3881
+ if (shouldParseJson(req)) {
3882
+ return jsonBodyParser(req, res, next);
3883
+ }
3884
+ return next();
3885
+ });
3886
+
3887
+ const proxy = httpProxy.createProxyServer({});
3888
+
3889
+ proxy.on('proxyReq', (proxyReq, req) => {
3890
+ const activeChannel = req.selectedChannel;
3891
+ if (!activeChannel) return;
3892
+
3893
+ const requestId = `opencode-${Date.now()}-${Math.random()}`;
3894
+ requestMetadata.set(req, {
3895
+ id: requestId,
3896
+ channel: activeChannel.name,
3897
+ channelId: activeChannel.id,
3898
+ startTime: Date.now()
3899
+ });
3900
+
3901
+ proxyReq.removeHeader('authorization');
3902
+ // Use pre-fetched effective key from async middleware
3903
+ const effectiveKey = req.effectiveApiKey;
3904
+ proxyReq.setHeader('authorization', `Bearer ${effectiveKey}`);
3905
+ proxyReq.setHeader('openai-beta', 'responses=experimental');
3906
+ if (!proxyReq.getHeader('content-type')) {
3907
+ proxyReq.setHeader('content-type', 'application/json');
3908
+ }
3909
+
3910
+ if (shouldParseJson(req) && (req.rawBody || req.body)) {
3911
+ const bodyBuffer = req.rawBody
3912
+ ? Buffer.isBuffer(req.rawBody) ? req.rawBody : Buffer.from(req.rawBody)
3913
+ : Buffer.from(JSON.stringify(req.body));
3914
+ proxyReq.setHeader('Content-Length', bodyBuffer.length);
3915
+ proxyReq.write(bodyBuffer);
3916
+ proxyReq.end();
3917
+ }
3918
+ });
3919
+
3920
+ // OpenCode 会先调用 /v1/models(or /models) 获取模型列表
3921
+ // 但很多第三方 OpenAI 兼容端点并不实现该接口(例如返回 404)。
3922
+ // 为保证 OpenCode 可用,这里优先返回本地聚合的模型列表。
3923
+ proxyApp.get(['/v1/models', '/models'], async (req, res) => {
3924
+ try {
3925
+ const channels = getEnabledChannels();
3926
+ const models = await collectProxyModelList(channels, { forceRefresh: false });
3927
+ res.json({
3928
+ object: 'list',
3929
+ data: models.map(id => ({ id, object: 'model' }))
3930
+ });
3931
+ } catch (err) {
3932
+ console.error('[OpenCode Proxy] Failed to build models list:', err);
3933
+ res.status(500).json({
3934
+ error: {
3935
+ message: err.message || 'Failed to list models',
3936
+ type: 'internal_error'
3937
+ }
3938
+ });
3939
+ }
3940
+ });
3941
+
3942
+ proxyApp.use(async (req, res) => {
3943
+ try {
3944
+ const channel = await allocateChannel({ source: 'opencode', enableSessionBinding: false });
3945
+ req.selectedChannel = channel;
3946
+
3947
+ // 检查 API key 是否有效
3948
+ const effectiveKey = await getEffectiveApiKey(channel);
3949
+ if (!effectiveKey) {
3950
+ releaseChannel(channel.id, 'opencode');
3951
+ broadcastSchedulerState('opencode', getSchedulerState('opencode'));
3952
+ return res.status(401).json({
3953
+ error: {
3954
+ message: 'API key not configured or expired. Please update your channel key.',
3955
+ type: 'authentication_error'
3956
+ }
3957
+ });
3958
+ }
3959
+
3960
+ // Store the effective key on the request for use in proxyReq handler
3961
+ req.effectiveApiKey = effectiveKey;
3962
+
3963
+ // 应用模型重定向(当 proxy 开启时)
3964
+ if (req.body && typeof req.body === 'object' && !Array.isArray(req.body) && req.body.model) {
3965
+ const originalModel = req.body.model;
3966
+ const redirectedModel = redirectModel(originalModel, channel);
3967
+
3968
+ if (redirectedModel !== originalModel) {
3969
+ req.body.model = redirectedModel;
3970
+ // 更新 rawBody 以匹配修改后的 body
3971
+ req.rawBody = Buffer.from(JSON.stringify(req.body));
3972
+
3973
+ // 只在重定向规则变化时打印日志(避免每次请求都打印)
3974
+ const cachedRedirects = printedRedirectCache.get(channel.id) || {};
3975
+ if (cachedRedirects[originalModel] !== redirectedModel) {
3976
+ cachedRedirects[originalModel] = redirectedModel;
3977
+ printedRedirectCache.set(channel.id, cachedRedirects);
3978
+ console.log(`[OpenCode Model Redirect] ${originalModel} → ${redirectedModel} (channel: ${channel.name})`);
3979
+ }
3980
+ }
3981
+ }
3982
+
3983
+ const release = (() => {
3984
+ let released = false;
3985
+ return () => {
3986
+ if (released) return;
3987
+ released = true;
3988
+ releaseChannel(channel.id, 'opencode');
3989
+ broadcastSchedulerState('opencode', getSchedulerState('opencode'));
3990
+ };
3991
+ })();
3992
+
3993
+ res.on('close', release);
3994
+ res.on('error', release);
3995
+
3996
+ broadcastSchedulerState('opencode', getSchedulerState('opencode'));
3997
+
3998
+ const gatewaySourceType = normalizeGatewaySourceType(channel);
3999
+ if (gatewaySourceType === 'codex') {
4000
+ const handled = await handleCodexGatewayRequest(req, res, channel, effectiveKey);
4001
+ if (handled) {
4002
+ return;
4003
+ }
4004
+ }
4005
+ if (gatewaySourceType === 'claude') {
4006
+ const handled = await handleClaudeGatewayRequest(req, res, channel, effectiveKey);
4007
+ if (handled) {
4008
+ return;
4009
+ }
4010
+ }
4011
+ if (gatewaySourceType === 'gemini') {
4012
+ const handled = await handleGeminiGatewayRequest(req, res, channel, effectiveKey);
4013
+ if (handled) {
4014
+ return;
4015
+ }
4016
+ }
4017
+
4018
+ const target = resolveOpenCodeTarget(channel.baseUrl, req.url);
4019
+
4020
+ proxy.web(req, res, {
4021
+ target,
4022
+ changeOrigin: true,
4023
+ proxyTimeout: 120000, // 代理连接超时 2 分钟
4024
+ timeout: 120000 // 请求超时 2 分钟
4025
+ }, (err) => {
4026
+ release();
4027
+ if (err) {
4028
+ recordFailure(channel.id, 'opencode', err);
4029
+ console.error('OpenCode proxy error:', err);
4030
+ if (res && !res.headersSent) {
4031
+ res.status(502).json({
4032
+ error: {
4033
+ message: 'Proxy error: ' + err.message,
4034
+ type: 'proxy_error'
4035
+ }
4036
+ });
4037
+ }
4038
+ }
4039
+ });
4040
+ } catch (error) {
4041
+ console.error('OpenCode channel allocation error:', error);
4042
+ if (!res.headersSent) {
4043
+ res.status(503).json({
4044
+ error: {
4045
+ message: error.message || 'No OpenCode channel available',
4046
+ type: 'channel_pool_exhausted'
4047
+ }
4048
+ });
4049
+ }
4050
+ }
4051
+ });
4052
+
4053
+ // 监听代理响应 (OpenAI 格式)
4054
+ proxy.on('proxyRes', (proxyRes, req, res) => {
4055
+ const metadata = requestMetadata.get(req);
4056
+ if (!metadata) {
4057
+ return;
4058
+ }
4059
+
4060
+ // 检查响应是否已关闭
4061
+ if (res.writableEnded || res.destroyed) {
4062
+ requestMetadata.delete(req);
4063
+ return;
4064
+ }
4065
+
4066
+ // 标记响应是否已关闭
4067
+ let isResponseClosed = false;
4068
+
4069
+ // 监听响应关闭事件
4070
+ res.on('close', () => {
4071
+ isResponseClosed = true;
4072
+ requestMetadata.delete(req);
4073
+ });
4074
+
4075
+ // 监听响应错误事件
4076
+ res.on('error', (err) => {
4077
+ isResponseClosed = true;
4078
+ // 忽略客户端断开连接的常见错误
4079
+ if (err.code !== 'EPIPE' && err.code !== 'ECONNRESET') {
4080
+ console.error('Response error:', err);
4081
+ }
4082
+ requestMetadata.delete(req);
4083
+ });
4084
+
4085
+ let buffer = '';
4086
+ let tokenData = {
4087
+ inputTokens: 0,
4088
+ outputTokens: 0,
4089
+ cachedTokens: 0,
4090
+ reasoningTokens: 0,
4091
+ totalTokens: 0,
4092
+ model: ''
4093
+ };
4094
+
4095
+ proxyRes.on('data', (chunk) => {
4096
+ // 如果响应已关闭,停止处理
4097
+ if (isResponseClosed) {
4098
+ return;
4099
+ }
4100
+
4101
+ buffer += chunk.toString();
4102
+
4103
+ // 检查是否是 SSE 流
4104
+ if (proxyRes.headers['content-type']?.includes('text/event-stream')) {
4105
+ // 处理 SSE 事件
4106
+ const events = buffer.split('\n\n');
4107
+ buffer = events.pop() || '';
4108
+
4109
+ events.forEach((eventText, index) => {
4110
+ if (!eventText.trim()) return;
4111
+
4112
+ try {
4113
+ const lines = eventText.split('\n');
4114
+ let data = '';
4115
+
4116
+ lines.forEach(line => {
4117
+ if (line.startsWith('data:')) {
4118
+ data = line.substring(5).trim();
4119
+ }
4120
+ });
4121
+
4122
+ if (!data) return;
4123
+
4124
+ if (data === '[DONE]') return;
4125
+
4126
+ const parsed = JSON.parse(data);
4127
+
4128
+ // OpenAI Responses API: 在 response.completed 事件中获取 usage
4129
+ if (parsed.type === 'response.completed' && parsed.response) {
4130
+ // 从 response 对象中提取模型和 usage
4131
+ if (parsed.response.model) {
4132
+ tokenData.model = parsed.response.model;
4133
+ }
4134
+
4135
+ if (parsed.response.usage) {
4136
+ tokenData.inputTokens = parsed.response.usage.input_tokens || 0;
4137
+ tokenData.outputTokens = parsed.response.usage.output_tokens || 0;
4138
+ tokenData.totalTokens = parsed.response.usage.total_tokens || 0;
4139
+
4140
+ // 提取详细信息
4141
+ if (parsed.response.usage.input_tokens_details) {
4142
+ tokenData.cachedTokens = parsed.response.usage.input_tokens_details.cached_tokens || 0;
4143
+ }
4144
+ if (parsed.response.usage.output_tokens_details) {
4145
+ tokenData.reasoningTokens = parsed.response.usage.output_tokens_details.reasoning_tokens || 0;
4146
+ }
4147
+ }
4148
+ }
4149
+
4150
+ // 兼容其他格式:直接在顶层的 model 和 usage
4151
+ if (parsed.model && !tokenData.model) {
4152
+ tokenData.model = parsed.model;
4153
+ }
4154
+
4155
+ if (parsed.usage && tokenData.inputTokens === 0) {
4156
+ // 兼容 Responses API 和 Chat Completions API
4157
+ tokenData.inputTokens = parsed.usage.input_tokens || parsed.usage.prompt_tokens || 0;
4158
+ tokenData.outputTokens = parsed.usage.output_tokens || parsed.usage.completion_tokens || 0;
4159
+ }
4160
+ } catch (err) {
4161
+ // 忽略解析错误
4162
+ }
4163
+ });
4164
+ }
4165
+ });
4166
+
4167
+ proxyRes.on('end', () => {
4168
+ // 如果不是流式响应,尝试从完整响应中解析
4169
+ if (!proxyRes.headers['content-type']?.includes('text/event-stream')) {
4170
+ try {
4171
+ const parsed = JSON.parse(buffer);
4172
+ if (parsed.model) {
4173
+ tokenData.model = parsed.model;
4174
+ }
4175
+ if (parsed.usage) {
4176
+ // 兼容两种格式
4177
+ tokenData.inputTokens = parsed.usage.input_tokens || parsed.usage.prompt_tokens || 0;
4178
+ tokenData.outputTokens = parsed.usage.output_tokens || parsed.usage.completion_tokens || 0;
4179
+ }
4180
+ } catch (err) {
4181
+ // 忽略解析错误
4182
+ }
4183
+ }
4184
+
4185
+ // 只有当有 token 数据时才记录
4186
+ if (tokenData.inputTokens > 0 || tokenData.outputTokens > 0) {
4187
+ const now = new Date();
4188
+ const time = now.toLocaleTimeString('zh-CN', {
4189
+ hour12: false,
4190
+ hour: '2-digit',
4191
+ minute: '2-digit',
4192
+ second: '2-digit'
4193
+ });
4194
+
4195
+ // 记录统计数据(先计算)
4196
+ const tokens = {
4197
+ input: tokenData.inputTokens,
4198
+ output: tokenData.outputTokens,
4199
+ total: tokenData.inputTokens + tokenData.outputTokens
4200
+ };
4201
+ const cost = calculateCost(tokenData.model, tokens);
4202
+
4203
+ // 广播日志(仅当响应仍然开放时)
4204
+ if (!isResponseClosed) {
4205
+ broadcastLog({
4206
+ type: 'log',
4207
+ id: metadata.id,
4208
+ time: time,
4209
+ channel: metadata.channel,
4210
+ model: tokenData.model,
4211
+ inputTokens: tokenData.inputTokens,
4212
+ outputTokens: tokenData.outputTokens,
4213
+ cachedTokens: tokenData.cachedTokens,
4214
+ reasoningTokens: tokenData.reasoningTokens,
4215
+ totalTokens: tokenData.totalTokens,
4216
+ cost: cost,
4217
+ source: 'opencode'
4218
+ });
4219
+ }
4220
+
4221
+ const duration = Date.now() - metadata.startTime;
4222
+
4223
+ recordOpenCodeRequest({
4224
+ id: metadata.id,
4225
+ timestamp: new Date(metadata.startTime).toISOString(),
4226
+ toolType: 'opencode',
4227
+ channel: metadata.channel,
4228
+ channelId: metadata.channelId,
4229
+ model: tokenData.model,
4230
+ tokens: {
4231
+ input: tokenData.inputTokens,
4232
+ output: tokenData.outputTokens,
4233
+ reasoning: tokenData.reasoningTokens,
4234
+ cached: tokenData.cachedTokens,
4235
+ total: tokens.total
4236
+ },
4237
+ duration: duration,
4238
+ success: true,
4239
+ cost: cost
4240
+ });
4241
+
4242
+ recordSuccess(metadata.channelId, 'opencode');
4243
+ }
4244
+
4245
+ if (!isResponseClosed) {
4246
+ requestMetadata.delete(req);
4247
+ }
4248
+ });
4249
+
4250
+ proxyRes.on('error', (err) => {
4251
+ // 忽略代理响应错误(可能是网络问题)
4252
+ if (err.code !== 'EPIPE' && err.code !== 'ECONNRESET') {
4253
+ console.error('Proxy response error:', err);
4254
+ }
4255
+ isResponseClosed = true;
4256
+ recordFailure(metadata.channelId, 'opencode', err);
4257
+ requestMetadata.delete(req);
4258
+ });
4259
+ });
4260
+
4261
+ // 处理代理错误
4262
+ proxy.on('error', (err, req, res) => {
4263
+ console.error('OpenCode proxy error:', err);
4264
+ if (req && req.selectedChannel) {
4265
+ recordFailure(req.selectedChannel.id, 'opencode', err);
4266
+ releaseChannel(req.selectedChannel.id, 'opencode');
4267
+ broadcastSchedulerState('opencode', getSchedulerState('opencode'));
4268
+ }
4269
+ if (res && !res.headersSent) {
4270
+ res.status(502).json({
4271
+ error: {
4272
+ message: 'Proxy error: ' + err.message,
4273
+ type: 'proxy_error'
4274
+ }
4275
+ });
4276
+ }
4277
+ });
4278
+
4279
+ // 启动服务器
4280
+ proxyServer = http.createServer(proxyApp);
4281
+
4282
+ return new Promise((resolve, reject) => {
4283
+ proxyServer.listen(port, '127.0.0.1', () => {
4284
+ const actualPort = proxyServer.address()?.port || port;
4285
+ currentPort = actualPort;
4286
+ console.log(`OpenCode proxy server started on http://127.0.0.1:${actualPort}`);
4287
+
4288
+ // 保存代理启动时间(如果是切换渠道,保留原有启动时间)
4289
+ saveProxyStartTime('opencode', preserveStartTime);
4290
+
4291
+ resolve({ success: true, port: actualPort });
4292
+ });
4293
+
4294
+ proxyServer.on('error', (err) => {
4295
+ if (err.code === 'EADDRINUSE') {
4296
+ console.error(chalk.red(`\nOpenCode proxy port ${port} is already in use`));
4297
+ } else {
4298
+ console.error('Failed to start OpenCode proxy server:', err);
4299
+ }
4300
+ proxyServer = null;
4301
+ proxyApp = null;
4302
+ currentPort = null;
4303
+ reject(err);
4304
+ });
4305
+ });
4306
+ } catch (err) {
4307
+ console.error('Error starting OpenCode proxy server:', err);
4308
+ throw err;
4309
+ }
4310
+ }
4311
+
4312
+ // 停止 OpenCode 代理服务器
4313
+ async function stopOpenCodeProxyServer(options = {}) {
4314
+ // options.clearStartTime - 是否清除启动时间(默认 true)
4315
+ const clearStartTime = options.clearStartTime !== false;
4316
+
4317
+ if (!proxyServer) {
4318
+ return { success: true, message: 'OpenCode proxy server not running' };
4319
+ }
4320
+
4321
+ requestMetadata.clear();
4322
+
4323
+ return new Promise((resolve) => {
4324
+ proxyServer.close(() => {
4325
+ console.log('OpenCode proxy server stopped');
4326
+
4327
+ // 清除代理启动时间(仅当明确要求时)
4328
+ if (clearStartTime) {
4329
+ clearProxyStartTime('opencode');
4330
+ }
4331
+
4332
+ proxyServer = null;
4333
+ proxyApp = null;
4334
+ const stoppedPort = currentPort;
4335
+ currentPort = null;
4336
+ resolve({ success: true, port: stoppedPort });
4337
+ });
4338
+ });
4339
+ }
4340
+
4341
+ // 获取代理服务器状态
4342
+ function getOpenCodeProxyStatus() {
4343
+ const config = loadConfig();
4344
+ const startTime = getProxyStartTime('opencode');
4345
+ const runtime = getProxyRuntime('opencode');
4346
+
4347
+ return {
4348
+ running: !!proxyServer,
4349
+ port: currentPort,
4350
+ defaultPort: config.ports?.opencodeProxy || 20091,
4351
+ startTime,
4352
+ runtime
4353
+ };
4354
+ }
4355
+
4356
+ /**
4357
+ * 清除指定渠道的模型重定向日志缓存
4358
+ * 用于在渠道配置更新后触发重新打印日志
4359
+ * @param {string} channelId - 渠道 ID
4360
+ */
4361
+ function clearOpenCodeRedirectCache(channelId) {
4362
+ if (channelId) {
4363
+ printedRedirectCache.delete(channelId);
4364
+ } else {
4365
+ printedRedirectCache.clear();
4366
+ }
4367
+ }
4368
+
4369
+ module.exports = {
4370
+ startOpenCodeProxyServer,
4371
+ stopOpenCodeProxyServer,
4372
+ getOpenCodeProxyStatus,
4373
+ clearOpenCodeRedirectCache,
4374
+ collectProxyModelList
4375
+ };