protocol-proxy 2.2.0 → 2.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/converters/anthropic-to-gemini.js +253 -0
- package/lib/converters/gemini-to-anthropic.js +275 -0
- package/lib/converters/gemini-to-openai.js +238 -0
- package/lib/converters/openai-to-gemini.js +284 -0
- package/lib/detector.js +4 -0
- package/lib/proxy-server.js +44 -5
- package/lib/stats-store.js +11 -13
- package/package.json +2 -3
- package/public/app.js +24 -4
- package/public/index.html +9 -4
- package/public/style.css +44 -2
- package/server.js +4 -0
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Gemini → OpenAI 协议转换
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
const { encodeOpenAIEvent, encodeOpenAIDone } = require('./sse-helpers');
|
|
6
|
+
|
|
7
|
+
function generateCallId() {
|
|
8
|
+
return 'call_' + Math.random().toString(36).slice(2, 14);
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
// ==================== 请求转换 ====================
|
|
12
|
+
|
|
13
|
+
function convertRequest(body, targetModel) {
|
|
14
|
+
const messages = [];
|
|
15
|
+
|
|
16
|
+
// system_instruction → system message
|
|
17
|
+
const sysText = body.systemInstruction?.parts?.map(p => p.text || '').join('') || '';
|
|
18
|
+
if (sysText) {
|
|
19
|
+
messages.push({ role: 'system', content: sysText });
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// tools: functionDeclarations → OpenAI tools
|
|
23
|
+
let tools = undefined;
|
|
24
|
+
if (body.tools && Array.isArray(body.tools)) {
|
|
25
|
+
const allDeclarations = [];
|
|
26
|
+
for (const tool of body.tools) {
|
|
27
|
+
if (tool.functionDeclarations) {
|
|
28
|
+
allDeclarations.push(...tool.functionDeclarations);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
if (allDeclarations.length > 0) {
|
|
32
|
+
tools = allDeclarations.map(fd => ({
|
|
33
|
+
type: 'function',
|
|
34
|
+
function: {
|
|
35
|
+
name: fd.name,
|
|
36
|
+
description: fd.description || '',
|
|
37
|
+
parameters: fd.parameters || { type: 'object', properties: {} },
|
|
38
|
+
},
|
|
39
|
+
}));
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// contents → messages, functionCall/functionResponse → tool_calls/tool results
|
|
44
|
+
for (const msg of (body.contents || [])) {
|
|
45
|
+
const role = msg.role === 'model' ? 'assistant' : 'user';
|
|
46
|
+
const parts = msg.parts || [];
|
|
47
|
+
|
|
48
|
+
// 检查是否有 functionCall
|
|
49
|
+
const functionCalls = parts.filter(p => p.functionCall);
|
|
50
|
+
if (functionCalls.length > 0) {
|
|
51
|
+
const text = parts.filter(p => p.text).map(p => p.text).join('');
|
|
52
|
+
const tool_calls = functionCalls.map(fc => ({
|
|
53
|
+
id: generateCallId(),
|
|
54
|
+
type: 'function',
|
|
55
|
+
function: { name: fc.functionCall.name, arguments: JSON.stringify(fc.functionCall.args || {}) },
|
|
56
|
+
}));
|
|
57
|
+
messages.push({ role: 'assistant', content: text || null, tool_calls });
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// 检查是否有 functionResponse → tool messages
|
|
62
|
+
const functionResponses = parts.filter(p => p.functionResponse);
|
|
63
|
+
for (const fr of functionResponses) {
|
|
64
|
+
messages.push({
|
|
65
|
+
role: 'tool',
|
|
66
|
+
tool_call_id: fr.functionResponse.name || 'unknown',
|
|
67
|
+
content: typeof fr.functionResponse.response === 'string'
|
|
68
|
+
? fr.functionResponse.response
|
|
69
|
+
: JSON.stringify(fr.functionResponse.response || {}),
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// 纯文本 part(跳过已处理 functionCall/functionResponse 的消息)
|
|
74
|
+
const textParts = parts.filter(p => p.text).map(p => p.text).join('');
|
|
75
|
+
if (textParts && functionCalls.length === 0 && functionResponses.length === 0) {
|
|
76
|
+
messages.push({ role, content: textParts });
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
const result = {
|
|
81
|
+
model: targetModel,
|
|
82
|
+
messages,
|
|
83
|
+
stream: false,
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
if (tools) result.tools = tools;
|
|
87
|
+
|
|
88
|
+
// generationConfig → OpenAI params
|
|
89
|
+
const gc = body.generationConfig || {};
|
|
90
|
+
if (gc.maxOutputTokens !== undefined) result.max_tokens = gc.maxOutputTokens;
|
|
91
|
+
if (gc.temperature !== undefined) result.temperature = gc.temperature;
|
|
92
|
+
if (gc.topP !== undefined) result.top_p = gc.topP;
|
|
93
|
+
if (gc.stopSequences) result.stop = gc.stopSequences;
|
|
94
|
+
|
|
95
|
+
return result;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// ==================== 响应转换 ====================
|
|
99
|
+
|
|
100
|
+
function convertResponse(geminiBody) {
|
|
101
|
+
const candidate = geminiBody.candidates?.[0];
|
|
102
|
+
if (!candidate) {
|
|
103
|
+
return { id: '', object: 'chat.completion', choices: [], usage: convertUsage(geminiBody.usageMetadata) };
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
const parts = candidate.content?.parts || [];
|
|
107
|
+
const textParts = [];
|
|
108
|
+
const toolCalls = [];
|
|
109
|
+
|
|
110
|
+
for (const part of parts) {
|
|
111
|
+
if (part.text) textParts.push(part.text);
|
|
112
|
+
if (part.functionCall) {
|
|
113
|
+
toolCalls.push({
|
|
114
|
+
id: generateCallId(),
|
|
115
|
+
type: 'function',
|
|
116
|
+
function: { name: part.functionCall.name, arguments: JSON.stringify(part.functionCall.args || {}) },
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const message = { role: 'assistant', content: textParts.join('') || null };
|
|
122
|
+
if (toolCalls.length > 0) message.tool_calls = toolCalls;
|
|
123
|
+
|
|
124
|
+
return {
|
|
125
|
+
id: '',
|
|
126
|
+
object: 'chat.completion',
|
|
127
|
+
choices: [{
|
|
128
|
+
index: 0,
|
|
129
|
+
message,
|
|
130
|
+
finish_reason: toolCalls.length > 0 ? 'tool_calls' : mapFinishReason(candidate.finishReason),
|
|
131
|
+
}],
|
|
132
|
+
usage: convertUsage(geminiBody.usageMetadata),
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
function convertUsage(meta) {
|
|
137
|
+
return {
|
|
138
|
+
prompt_tokens: meta?.promptTokenCount || 0,
|
|
139
|
+
completion_tokens: meta?.candidatesTokenCount || 0,
|
|
140
|
+
total_tokens: meta?.totalTokenCount || 0,
|
|
141
|
+
};
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
function mapFinishReason(reason) {
|
|
145
|
+
if (!reason) return null;
|
|
146
|
+
if (reason === 'STOP') return 'stop';
|
|
147
|
+
if (reason === 'MAX_TOKENS') return 'length';
|
|
148
|
+
if (reason === 'SAFETY') return 'content_filter';
|
|
149
|
+
return 'stop';
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// ==================== SSE 流式转换 ====================
|
|
153
|
+
|
|
154
|
+
function createSSEConverter() {
|
|
155
|
+
const state = { started: false, sentFunctionCall: new Map() };
|
|
156
|
+
|
|
157
|
+
return {
|
|
158
|
+
convertChunk(chunkText) {
|
|
159
|
+
let output = '';
|
|
160
|
+
const lines = chunkText.split('\n');
|
|
161
|
+
|
|
162
|
+
for (const line of lines) {
|
|
163
|
+
const trimmed = line.trim();
|
|
164
|
+
if (!trimmed.startsWith('data: ')) continue;
|
|
165
|
+
const dataStr = trimmed.slice(6);
|
|
166
|
+
if (!dataStr) continue;
|
|
167
|
+
|
|
168
|
+
let chunk;
|
|
169
|
+
try { chunk = JSON.parse(dataStr); } catch { continue; }
|
|
170
|
+
|
|
171
|
+
const candidate = chunk.candidates?.[0];
|
|
172
|
+
if (!candidate) continue;
|
|
173
|
+
|
|
174
|
+
const parts = candidate.content?.parts || [];
|
|
175
|
+
|
|
176
|
+
// 首个 chunk 发送 role
|
|
177
|
+
if (!state.started && (parts.length > 0)) {
|
|
178
|
+
state.started = true;
|
|
179
|
+
output += encodeOpenAIEvent({
|
|
180
|
+
id: '',
|
|
181
|
+
object: 'chat.completion.chunk',
|
|
182
|
+
choices: [{ index: 0, delta: { role: 'assistant', content: null }, finish_reason: null }],
|
|
183
|
+
});
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// 文本增量
|
|
187
|
+
const text = parts.filter(p => p.text).map(p => p.text).join('') || '';
|
|
188
|
+
if (text) {
|
|
189
|
+
output += encodeOpenAIEvent({
|
|
190
|
+
id: '',
|
|
191
|
+
object: 'chat.completion.chunk',
|
|
192
|
+
choices: [{ index: 0, delta: { content: text }, finish_reason: null }],
|
|
193
|
+
});
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
// functionCall 增量(去重,首次生成 ID 后缓存)
|
|
197
|
+
for (const part of parts) {
|
|
198
|
+
if (!part.functionCall) continue;
|
|
199
|
+
const key = part.functionCall.name + (typeof part.functionCall.args === 'string' ? part.functionCall.args : JSON.stringify(part.functionCall.args || {}));
|
|
200
|
+
if (state.sentFunctionCall.has(key)) continue;
|
|
201
|
+
const callId = generateCallId();
|
|
202
|
+
state.sentFunctionCall.set(key, callId);
|
|
203
|
+
output += encodeOpenAIEvent({
|
|
204
|
+
id: '',
|
|
205
|
+
object: 'chat.completion.chunk',
|
|
206
|
+
choices: [{
|
|
207
|
+
index: 0,
|
|
208
|
+
delta: {
|
|
209
|
+
tool_calls: [{
|
|
210
|
+
index: 0,
|
|
211
|
+
id: callId,
|
|
212
|
+
type: 'function',
|
|
213
|
+
function: { name: part.functionCall.name, arguments: JSON.stringify(part.functionCall.args || {}) },
|
|
214
|
+
}],
|
|
215
|
+
},
|
|
216
|
+
finish_reason: null,
|
|
217
|
+
}],
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
if (candidate.finishReason) {
|
|
222
|
+
const reason = mapFinishReason(candidate.finishReason);
|
|
223
|
+
output += encodeOpenAIEvent({
|
|
224
|
+
id: '',
|
|
225
|
+
object: 'chat.completion.chunk',
|
|
226
|
+
choices: [{ index: 0, delta: {}, finish_reason: reason }],
|
|
227
|
+
});
|
|
228
|
+
output += encodeOpenAIDone();
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
return output || null;
|
|
233
|
+
},
|
|
234
|
+
flush() { return ''; },
|
|
235
|
+
};
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
module.exports = { convertRequest, convertResponse, createSSEConverter };
|
|
@@ -0,0 +1,284 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI → Gemini 协议转换
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
const { encodeOpenAIEvent, encodeOpenAIDone } = require('./sse-helpers');
|
|
6
|
+
|
|
7
|
+
// ==================== 请求转换 ====================
|
|
8
|
+
|
|
9
|
+
function generateCallId() {
|
|
10
|
+
return 'call_' + Math.random().toString(36).slice(2, 14);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function convertRequest(body, targetModel) {
|
|
14
|
+
const contents = [];
|
|
15
|
+
let systemInstruction = null;
|
|
16
|
+
|
|
17
|
+
for (const msg of (body.messages || [])) {
|
|
18
|
+
if (msg.role === 'system') {
|
|
19
|
+
// 多段 system 合并为一个
|
|
20
|
+
const text = typeof msg.content === 'string' ? msg.content : '';
|
|
21
|
+
if (!systemInstruction) {
|
|
22
|
+
systemInstruction = { parts: [{ text }] };
|
|
23
|
+
} else {
|
|
24
|
+
systemInstruction.parts[0].text += '\n' + text;
|
|
25
|
+
}
|
|
26
|
+
continue;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// tool role → functionResponse
|
|
30
|
+
if (msg.role === 'tool') {
|
|
31
|
+
contents.push({
|
|
32
|
+
role: 'user',
|
|
33
|
+
parts: [{
|
|
34
|
+
functionResponse: {
|
|
35
|
+
name: msg.tool_call_id || 'unknown',
|
|
36
|
+
response: typeof msg.content === 'string' ? { result: msg.content } : msg.content || {},
|
|
37
|
+
},
|
|
38
|
+
}],
|
|
39
|
+
});
|
|
40
|
+
continue;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// assistant with tool_calls → functionCall
|
|
44
|
+
if (msg.role === 'assistant' && msg.tool_calls && msg.tool_calls.length > 0) {
|
|
45
|
+
const parts = [];
|
|
46
|
+
if (msg.content) parts.push({ text: typeof msg.content === 'string' ? msg.content : '' });
|
|
47
|
+
for (const tc of msg.tool_calls) {
|
|
48
|
+
let args = {};
|
|
49
|
+
try {
|
|
50
|
+
args = tc.function?.arguments ? JSON.parse(tc.function.arguments) : {};
|
|
51
|
+
} catch { args = {}; }
|
|
52
|
+
parts.push({
|
|
53
|
+
functionCall: {
|
|
54
|
+
name: tc.function?.name || '',
|
|
55
|
+
args,
|
|
56
|
+
},
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
contents.push({ role: 'model', parts });
|
|
60
|
+
continue;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const role = msg.role === 'assistant' ? 'model' : 'user';
|
|
64
|
+
const text = typeof msg.content === 'string' ? msg.content : '';
|
|
65
|
+
|
|
66
|
+
// assistant with array content (Anthropic-originated)
|
|
67
|
+
if (Array.isArray(msg.content)) {
|
|
68
|
+
const parts = [];
|
|
69
|
+
for (const block of msg.content) {
|
|
70
|
+
if (block.type === 'text' && block.text) parts.push({ text: block.text });
|
|
71
|
+
if (block.type === 'tool_use') {
|
|
72
|
+
parts.push({
|
|
73
|
+
functionCall: { name: block.name, args: block.input || {} },
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
if (block.type === 'tool_result') {
|
|
77
|
+
parts.push({
|
|
78
|
+
functionResponse: {
|
|
79
|
+
name: block.tool_use_id || 'unknown',
|
|
80
|
+
response: typeof block.content === 'string' ? { result: block.content } : block.content || {},
|
|
81
|
+
},
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
if (parts.length > 0) {
|
|
86
|
+
contents.push({ role, parts });
|
|
87
|
+
}
|
|
88
|
+
continue;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if (text) contents.push({ role, parts: [{ text }] });
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
const result = { contents };
|
|
95
|
+
|
|
96
|
+
if (systemInstruction) {
|
|
97
|
+
result.systemInstruction = systemInstruction;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// 转换 tools → functionDeclarations
|
|
101
|
+
if (body.tools && Array.isArray(body.tools)) {
|
|
102
|
+
const functionDeclarations = [];
|
|
103
|
+
for (const tool of body.tools) {
|
|
104
|
+
if (tool.type === 'function' && tool.function) {
|
|
105
|
+
functionDeclarations.push({
|
|
106
|
+
name: tool.function.name,
|
|
107
|
+
description: tool.function.description || '',
|
|
108
|
+
parameters: tool.function.parameters || { type: 'object', properties: {} },
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
if (functionDeclarations.length > 0) {
|
|
113
|
+
result.tools = [{ functionDeclarations }];
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// generationConfig
|
|
118
|
+
const gc = {};
|
|
119
|
+
if (body.max_tokens !== undefined) gc.maxOutputTokens = body.max_tokens;
|
|
120
|
+
if (body.temperature !== undefined) gc.temperature = body.temperature;
|
|
121
|
+
if (body.top_p !== undefined) gc.topP = body.top_p;
|
|
122
|
+
if (body.stop) gc.stopSequences = Array.isArray(body.stop) ? body.stop : [body.stop];
|
|
123
|
+
if (Object.keys(gc).length > 0) result.generationConfig = gc;
|
|
124
|
+
|
|
125
|
+
return result;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// ==================== 响应转换 ====================
|
|
129
|
+
|
|
130
|
+
function convertResponse(geminiBody) {
|
|
131
|
+
const candidate = geminiBody.candidates?.[0];
|
|
132
|
+
if (!candidate) {
|
|
133
|
+
return { id: '', object: 'chat.completion', choices: [], usage: {} };
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
const parts = candidate.content?.parts || [];
|
|
137
|
+
const textParts = [];
|
|
138
|
+
const toolCalls = [];
|
|
139
|
+
|
|
140
|
+
for (const part of parts) {
|
|
141
|
+
if (part.text) {
|
|
142
|
+
textParts.push(part.text);
|
|
143
|
+
}
|
|
144
|
+
if (part.functionCall) {
|
|
145
|
+
toolCalls.push({
|
|
146
|
+
id: generateCallId(),
|
|
147
|
+
type: 'function',
|
|
148
|
+
function: {
|
|
149
|
+
name: part.functionCall.name,
|
|
150
|
+
arguments: JSON.stringify(part.functionCall.args || {}),
|
|
151
|
+
},
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
const message = { role: 'assistant', content: textParts.join('') || null };
|
|
157
|
+
if (toolCalls.length > 0) message.tool_calls = toolCalls;
|
|
158
|
+
|
|
159
|
+
return {
|
|
160
|
+
id: '',
|
|
161
|
+
object: 'chat.completion',
|
|
162
|
+
choices: [{
|
|
163
|
+
index: 0,
|
|
164
|
+
message,
|
|
165
|
+
finish_reason: toolCalls.length > 0 ? 'tool_calls' : mapFinishReason(candidate.finishReason),
|
|
166
|
+
}],
|
|
167
|
+
usage: {
|
|
168
|
+
prompt_tokens: geminiBody.usageMetadata?.promptTokenCount || 0,
|
|
169
|
+
completion_tokens: geminiBody.usageMetadata?.candidatesTokenCount || 0,
|
|
170
|
+
total_tokens: geminiBody.usageMetadata?.totalTokenCount || 0,
|
|
171
|
+
},
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
function mapFinishReason(reason) {
|
|
176
|
+
if (!reason) return null;
|
|
177
|
+
if (reason === 'STOP') return 'stop';
|
|
178
|
+
if (reason === 'MAX_TOKENS') return 'length';
|
|
179
|
+
if (reason === 'SAFETY') return 'content_filter';
|
|
180
|
+
return 'stop';
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// ==================== SSE 流式转换 ====================
|
|
184
|
+
|
|
185
|
+
function createSSEConverter() {
|
|
186
|
+
const state = { started: false, sentFunctionCall: new Map() };
|
|
187
|
+
|
|
188
|
+
return {
|
|
189
|
+
convertChunk(chunkText) {
|
|
190
|
+
let output = '';
|
|
191
|
+
const lines = chunkText.split('\n');
|
|
192
|
+
|
|
193
|
+
for (const line of lines) {
|
|
194
|
+
const trimmed = line.trim();
|
|
195
|
+
if (!trimmed.startsWith('data: ')) continue;
|
|
196
|
+
const dataStr = trimmed.slice(6);
|
|
197
|
+
if (!dataStr) continue;
|
|
198
|
+
|
|
199
|
+
let chunk;
|
|
200
|
+
try { chunk = JSON.parse(dataStr); } catch { continue; }
|
|
201
|
+
|
|
202
|
+
const candidate = chunk.candidates?.[0];
|
|
203
|
+
if (!candidate) continue;
|
|
204
|
+
|
|
205
|
+
const parts = candidate.content?.parts || [];
|
|
206
|
+
|
|
207
|
+
// 首个 chunk 发送 role
|
|
208
|
+
if (!state.started && (parts.length > 0)) {
|
|
209
|
+
state.started = true;
|
|
210
|
+
output += encodeOpenAIEvent({
|
|
211
|
+
id: '',
|
|
212
|
+
object: 'chat.completion.chunk',
|
|
213
|
+
choices: [{
|
|
214
|
+
index: 0,
|
|
215
|
+
delta: { role: 'assistant', content: null },
|
|
216
|
+
finish_reason: null,
|
|
217
|
+
}],
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// 文本增量
|
|
222
|
+
const text = parts.filter(p => p.text).map(p => p.text).join('') || '';
|
|
223
|
+
if (text) {
|
|
224
|
+
output += encodeOpenAIEvent({
|
|
225
|
+
id: '',
|
|
226
|
+
object: 'chat.completion.chunk',
|
|
227
|
+
choices: [{
|
|
228
|
+
index: 0,
|
|
229
|
+
delta: { content: text },
|
|
230
|
+
finish_reason: null,
|
|
231
|
+
}],
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
// functionCall 增量(去重,首次生成 ID 后缓存)
|
|
236
|
+
for (const part of parts) {
|
|
237
|
+
if (!part.functionCall) continue;
|
|
238
|
+
const key = part.functionCall.name + (typeof part.functionCall.args === 'string' ? part.functionCall.args : JSON.stringify(part.functionCall.args || {}));
|
|
239
|
+
if (state.sentFunctionCall.has(key)) continue;
|
|
240
|
+
const callId = generateCallId();
|
|
241
|
+
state.sentFunctionCall.set(key, callId);
|
|
242
|
+
|
|
243
|
+
output += encodeOpenAIEvent({
|
|
244
|
+
id: '',
|
|
245
|
+
object: 'chat.completion.chunk',
|
|
246
|
+
choices: [{
|
|
247
|
+
index: 0,
|
|
248
|
+
delta: {
|
|
249
|
+
tool_calls: [{
|
|
250
|
+
index: 0,
|
|
251
|
+
id: callId,
|
|
252
|
+
type: 'function',
|
|
253
|
+
function: {
|
|
254
|
+
name: part.functionCall.name,
|
|
255
|
+
arguments: JSON.stringify(part.functionCall.args || {}),
|
|
256
|
+
},
|
|
257
|
+
}],
|
|
258
|
+
},
|
|
259
|
+
finish_reason: null,
|
|
260
|
+
}],
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// finish
|
|
265
|
+
if (candidate.finishReason) {
|
|
266
|
+
const reason = mapFinishReason(candidate.finishReason);
|
|
267
|
+
output += encodeOpenAIEvent({
|
|
268
|
+
id: '',
|
|
269
|
+
object: 'chat.completion.chunk',
|
|
270
|
+
choices: [{ index: 0, delta: {}, finish_reason: reason }],
|
|
271
|
+
});
|
|
272
|
+
output += encodeOpenAIDone();
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
return output || null;
|
|
277
|
+
},
|
|
278
|
+
flush() {
|
|
279
|
+
return '';
|
|
280
|
+
},
|
|
281
|
+
};
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
module.exports = { convertRequest, convertResponse, createSSEConverter };
|
package/lib/detector.js
CHANGED
|
@@ -17,6 +17,10 @@ function detectInboundProtocol(req, body) {
|
|
|
17
17
|
|
|
18
18
|
// 根据 body 结构推断
|
|
19
19
|
if (body && typeof body === 'object') {
|
|
20
|
+
// Gemini: contents 数组且每个元素有 parts
|
|
21
|
+
if (Array.isArray(body.contents) && body.contents[0]?.parts) {
|
|
22
|
+
return 'gemini';
|
|
23
|
+
}
|
|
20
24
|
// Anthropic: 有 system 顶级字段,messages 中角色没有 system
|
|
21
25
|
if (body.system !== undefined && Array.isArray(body.messages)) {
|
|
22
26
|
return 'anthropic';
|
package/lib/proxy-server.js
CHANGED
|
@@ -2,6 +2,10 @@ const express = require('express');
|
|
|
2
2
|
const { detectInboundProtocol } = require('./detector');
|
|
3
3
|
const o2a = require('./converters/openai-to-anthropic');
|
|
4
4
|
const a2o = require('./converters/anthropic-to-openai');
|
|
5
|
+
const o2g = require('./converters/openai-to-gemini');
|
|
6
|
+
const g2o = require('./converters/gemini-to-openai');
|
|
7
|
+
const a2g = require('./converters/anthropic-to-gemini');
|
|
8
|
+
const g2a = require('./converters/gemini-to-anthropic');
|
|
5
9
|
const { recordUsage } = require('./stats-store');
|
|
6
10
|
|
|
7
11
|
function createProxyApp(proxyConfigOrGetter) {
|
|
@@ -137,7 +141,7 @@ function createProxyApp(proxyConfigOrGetter) {
|
|
|
137
141
|
console.log(`[${requestId}] ⬅️ ${(inboundProtocol || 'unknown').toUpperCase()} → ${targetProtocol.toUpperCase()} | path=${req.path}`);
|
|
138
142
|
|
|
139
143
|
// 决定转换方向
|
|
140
|
-
let convertReq, convertRes, createSSEConv;
|
|
144
|
+
let convertReq, convertRes, createSSEConv, nameToId = null;
|
|
141
145
|
if (inboundProtocol === 'openai' && targetProtocol === 'anthropic') {
|
|
142
146
|
convertReq = o2a.convertRequest;
|
|
143
147
|
convertRes = o2a.convertResponse;
|
|
@@ -146,6 +150,28 @@ function createProxyApp(proxyConfigOrGetter) {
|
|
|
146
150
|
convertReq = a2o.convertRequest;
|
|
147
151
|
convertRes = a2o.convertResponse;
|
|
148
152
|
createSSEConv = a2o.createSSEConverter;
|
|
153
|
+
} else if (inboundProtocol === 'openai' && targetProtocol === 'gemini') {
|
|
154
|
+
convertReq = o2g.convertRequest;
|
|
155
|
+
convertRes = o2g.convertResponse;
|
|
156
|
+
createSSEConv = o2g.createSSEConverter;
|
|
157
|
+
} else if (inboundProtocol === 'gemini' && targetProtocol === 'openai') {
|
|
158
|
+
convertReq = g2o.convertRequest;
|
|
159
|
+
convertRes = g2o.convertResponse;
|
|
160
|
+
createSSEConv = g2o.createSSEConverter;
|
|
161
|
+
} else if (inboundProtocol === 'anthropic' && targetProtocol === 'gemini') {
|
|
162
|
+
convertReq = a2g.convertRequest;
|
|
163
|
+
convertRes = a2g.convertResponse;
|
|
164
|
+
createSSEConv = a2g.createSSEConverter;
|
|
165
|
+
} else if (inboundProtocol === 'gemini' && targetProtocol === 'anthropic') {
|
|
166
|
+
// g2a.convertRequest 返回 { ...body, nameToId },需要提取映射
|
|
167
|
+
convertReq = (body, model) => {
|
|
168
|
+
const result = g2a.convertRequest(body, model);
|
|
169
|
+
nameToId = result.nameToId;
|
|
170
|
+
const { nameToId: _, ...bodyOnly } = result;
|
|
171
|
+
return bodyOnly;
|
|
172
|
+
};
|
|
173
|
+
convertRes = g2a.convertResponse;
|
|
174
|
+
createSSEConv = (model) => g2a.createSSEConverter(nameToId);
|
|
149
175
|
} else {
|
|
150
176
|
convertReq = (body, model) => ({ ...body, model: body.model || model });
|
|
151
177
|
convertRes = (body) => body;
|
|
@@ -161,7 +187,7 @@ function createProxyApp(proxyConfigOrGetter) {
|
|
|
161
187
|
|
|
162
188
|
const targetBody = convertReq(req.body, effectiveModel);
|
|
163
189
|
|
|
164
|
-
const isAzure = !!target.azureDeployment;
|
|
190
|
+
const isAzure = !!target.azureDeployment && /azure/i.test(target.providerUrl);
|
|
165
191
|
|
|
166
192
|
// 流式请求时注入 stream_options 以获取 usage 统计(Azure 不支持)
|
|
167
193
|
if (isStream && targetProtocol === 'openai' && !isAzure) {
|
|
@@ -172,7 +198,7 @@ function createProxyApp(proxyConfigOrGetter) {
|
|
|
172
198
|
injectReasoningToMessages(targetBody.messages);
|
|
173
199
|
|
|
174
200
|
// 构建目标 URL
|
|
175
|
-
const targetUrl = buildTargetUrl(target, req.path);
|
|
201
|
+
const targetUrl = buildTargetUrl(target, req.path, isStream, effectiveModel);
|
|
176
202
|
console.log(`[${requestId}] 🔗 ${targetUrl} | model=${effectiveModel}`);
|
|
177
203
|
|
|
178
204
|
// 构建请求头
|
|
@@ -187,6 +213,8 @@ function createProxyApp(proxyConfigOrGetter) {
|
|
|
187
213
|
} else {
|
|
188
214
|
headers['Authorization'] = `Bearer ${target.apiKey}`;
|
|
189
215
|
}
|
|
216
|
+
} else if (targetProtocol === 'gemini') {
|
|
217
|
+
headers['x-goog-api-key'] = target.apiKey;
|
|
190
218
|
} else if (targetProtocol === 'anthropic') {
|
|
191
219
|
headers['X-Api-Key'] = target.apiKey;
|
|
192
220
|
headers['Anthropic-Version'] = '2023-06-01';
|
|
@@ -197,7 +225,7 @@ function createProxyApp(proxyConfigOrGetter) {
|
|
|
197
225
|
method: 'POST',
|
|
198
226
|
headers,
|
|
199
227
|
body: JSON.stringify(targetBody),
|
|
200
|
-
signal: AbortSignal.timeout(
|
|
228
|
+
signal: AbortSignal.timeout(300000),
|
|
201
229
|
});
|
|
202
230
|
|
|
203
231
|
if (!fetchRes.ok) {
|
|
@@ -272,6 +300,11 @@ function createProxyApp(proxyConfigOrGetter) {
|
|
|
272
300
|
}
|
|
273
301
|
} catch (err) {
|
|
274
302
|
console.error(`[${requestId}] Stream error:`, err.message);
|
|
303
|
+
if (!res.writableEnded) {
|
|
304
|
+
try {
|
|
305
|
+
res.write(`data: ${JSON.stringify({ error: { message: err.message, type: 'proxy_error' } })}\n\n`);
|
|
306
|
+
} catch { /* ignore */ }
|
|
307
|
+
}
|
|
275
308
|
} finally {
|
|
276
309
|
res.end();
|
|
277
310
|
}
|
|
@@ -293,7 +326,7 @@ function createProxyApp(proxyConfigOrGetter) {
|
|
|
293
326
|
return app;
|
|
294
327
|
}
|
|
295
328
|
|
|
296
|
-
function buildTargetUrl(target, originalPath) {
|
|
329
|
+
function buildTargetUrl(target, originalPath, isStream, effectiveModel) {
|
|
297
330
|
const base = target.providerUrl.replace(/\/$/, '');
|
|
298
331
|
const hasV1Suffix = base.endsWith('/v1');
|
|
299
332
|
|
|
@@ -312,6 +345,12 @@ function buildTargetUrl(target, originalPath) {
|
|
|
312
345
|
return `${base}/v1/messages`;
|
|
313
346
|
}
|
|
314
347
|
|
|
348
|
+
if (target.protocol === 'gemini') {
|
|
349
|
+
const model = effectiveModel || 'gemini-pro';
|
|
350
|
+
const action = isStream ? 'streamGenerateContent?alt=sse' : 'generateContent';
|
|
351
|
+
return `${base}/v1beta/models/${model}:${action}`;
|
|
352
|
+
}
|
|
353
|
+
|
|
315
354
|
return base + originalPath;
|
|
316
355
|
}
|
|
317
356
|
|
package/lib/stats-store.js
CHANGED
|
@@ -225,23 +225,23 @@ function getStats(opts = {}) {
|
|
|
225
225
|
for (const [key, val] of Object.entries(bucket)) {
|
|
226
226
|
if (!matchPrefix(key, proxyId)) continue;
|
|
227
227
|
|
|
228
|
-
summary.prompt += val.prompt;
|
|
229
|
-
summary.completion += val.completion;
|
|
230
|
-
summary.requests += val.requests;
|
|
231
|
-
if (val.estimated) summary.estimatedCount += val.requests;
|
|
232
|
-
|
|
233
228
|
const parts = key.split(':');
|
|
234
|
-
|
|
229
|
+
// 只从 model 层(parts.length >= 4)聚合,避免三层重复计数
|
|
230
|
+
if (parts.length >= 4) {
|
|
235
231
|
const prov = parts[2];
|
|
232
|
+
const mdl = parts.slice(3).join(':');
|
|
233
|
+
|
|
234
|
+
summary.prompt += val.prompt;
|
|
235
|
+
summary.completion += val.completion;
|
|
236
|
+
summary.requests += val.requests;
|
|
237
|
+
if (val.estimated) summary.estimatedCount += val.requests;
|
|
238
|
+
|
|
236
239
|
if (!byProvider[prov]) byProvider[prov] = { prompt: 0, completion: 0, requests: 0, estimatedCount: 0 };
|
|
237
240
|
byProvider[prov].prompt += val.prompt;
|
|
238
241
|
byProvider[prov].completion += val.completion;
|
|
239
242
|
byProvider[prov].requests += val.requests;
|
|
240
243
|
if (val.estimated) byProvider[prov].estimatedCount += val.requests;
|
|
241
|
-
|
|
242
|
-
if (parts.length >= 4) {
|
|
243
|
-
const prov = parts[2];
|
|
244
|
-
const mdl = parts.slice(3).join(':');
|
|
244
|
+
|
|
245
245
|
const mk = prov + '/' + mdl;
|
|
246
246
|
if (!byModel[mk]) byModel[mk] = { provider: prov, model: mdl, prompt: 0, completion: 0, requests: 0, estimatedCount: 0 };
|
|
247
247
|
byModel[mk].prompt += val.prompt;
|
|
@@ -279,9 +279,7 @@ function matchPrefix(key, proxyId) {
|
|
|
279
279
|
return key.length === prefix.length || key[prefix.length] === ':';
|
|
280
280
|
}
|
|
281
281
|
|
|
282
|
-
//
|
|
282
|
+
// 进程退出时刷盘(信号处理统一由 server.js 管理)
|
|
283
283
|
process.on('exit', flush);
|
|
284
|
-
process.on('SIGINT', () => { flush(); process.exit(0); });
|
|
285
|
-
process.on('SIGTERM', () => { flush(); process.exit(0); });
|
|
286
284
|
|
|
287
285
|
module.exports = { recordUsage, getStats, flush };
|