protocol-proxy 2.3.4 → 2.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/config-store.js +295 -225
- package/lib/converters/gemini-to-anthropic.js +286 -277
- package/lib/converters/gemini-to-openai.js +255 -240
- package/lib/converters/openai-to-anthropic.js +368 -329
- package/lib/logger.js +58 -0
- package/lib/proxy-manager.js +4 -0
- package/lib/proxy-server.js +636 -357
- package/lib/stats-store.js +3 -5
- package/package.json +51 -51
- package/public/app.js +1296 -972
- package/public/index.html +321 -277
- package/public/style.css +1448 -1189
- package/server.js +767 -655
|
@@ -1,240 +1,255 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Gemini → OpenAI 协议转换
|
|
3
|
-
*/
|
|
4
|
-
|
|
5
|
-
const { encodeOpenAIEvent, encodeOpenAIDone } = require('./sse-helpers');
|
|
6
|
-
|
|
7
|
-
function generateCallId() {
|
|
8
|
-
return 'call_' + Math.random().toString(36).slice(2, 14);
|
|
9
|
-
}
|
|
10
|
-
|
|
11
|
-
// ==================== 请求转换 ====================
|
|
12
|
-
|
|
13
|
-
function convertRequest(body, targetModel) {
|
|
14
|
-
const messages = [];
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
const
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
const
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
const
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
const
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
}
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
1
|
+
/**
|
|
2
|
+
* Gemini → OpenAI 协议转换
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
const { encodeOpenAIEvent, encodeOpenAIDone } = require('./sse-helpers');
|
|
6
|
+
|
|
7
|
+
function generateCallId() {
|
|
8
|
+
return 'call_' + Math.random().toString(36).slice(2, 14);
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
// ==================== 请求转换 ====================
|
|
12
|
+
|
|
13
|
+
function convertRequest(body, targetModel) {
|
|
14
|
+
const messages = [];
|
|
15
|
+
// 追踪 Gemini 函数名 → 生成的 tool_call id,用于后续 tool_result 转换
|
|
16
|
+
const nameToId = new Map();
|
|
17
|
+
const nameCount = new Map();
|
|
18
|
+
|
|
19
|
+
// system_instruction → system message
|
|
20
|
+
const sysText = body.systemInstruction?.parts?.map(p => p.text || '').join('') || '';
|
|
21
|
+
if (sysText) {
|
|
22
|
+
messages.push({ role: 'system', content: sysText });
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// tools: functionDeclarations → OpenAI tools
|
|
26
|
+
let tools = undefined;
|
|
27
|
+
if (body.tools && Array.isArray(body.tools)) {
|
|
28
|
+
const allDeclarations = [];
|
|
29
|
+
for (const tool of body.tools) {
|
|
30
|
+
if (tool.functionDeclarations) {
|
|
31
|
+
allDeclarations.push(...tool.functionDeclarations);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
if (allDeclarations.length > 0) {
|
|
35
|
+
tools = allDeclarations.map(fd => ({
|
|
36
|
+
type: 'function',
|
|
37
|
+
function: {
|
|
38
|
+
name: fd.name,
|
|
39
|
+
description: fd.description || '',
|
|
40
|
+
parameters: fd.parameters || { type: 'object', properties: {} },
|
|
41
|
+
},
|
|
42
|
+
}));
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// contents → messages, functionCall/functionResponse → tool_calls/tool results
|
|
47
|
+
for (const msg of (body.contents || [])) {
|
|
48
|
+
const role = msg.role === 'model' ? 'assistant' : 'user';
|
|
49
|
+
const parts = msg.parts || [];
|
|
50
|
+
|
|
51
|
+
// 检查是否有 functionCall
|
|
52
|
+
const functionCalls = parts.filter(p => p.functionCall);
|
|
53
|
+
if (functionCalls.length > 0) {
|
|
54
|
+
const text = parts.filter(p => p.text).map(p => p.text).join('');
|
|
55
|
+
const tool_calls = functionCalls.map(fc => {
|
|
56
|
+
const fnName = fc.functionCall.name || 'unknown';
|
|
57
|
+
const count = nameCount.get(fnName) || 0;
|
|
58
|
+
nameCount.set(fnName, count + 1);
|
|
59
|
+
const callId = generateCallId();
|
|
60
|
+
nameToId.set(fnName + '#' + count, callId);
|
|
61
|
+
return {
|
|
62
|
+
id: callId,
|
|
63
|
+
type: 'function',
|
|
64
|
+
function: { name: fnName, arguments: JSON.stringify(fc.functionCall.args || {}) },
|
|
65
|
+
};
|
|
66
|
+
});
|
|
67
|
+
messages.push({ role: 'assistant', content: text || null, tool_calls });
|
|
68
|
+
continue;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// 检查是否有 functionResponse → tool messages
|
|
72
|
+
const functionResponses = parts.filter(p => p.functionResponse);
|
|
73
|
+
const respCount = new Map();
|
|
74
|
+
for (const fr of functionResponses) {
|
|
75
|
+
const fnName = fr.functionResponse.name || 'unknown';
|
|
76
|
+
const count = respCount.get(fnName) || 0;
|
|
77
|
+
respCount.set(fnName, count + 1);
|
|
78
|
+
const toolCallId = nameToId.get(fnName + '#' + count) || fnName;
|
|
79
|
+
messages.push({
|
|
80
|
+
role: 'tool',
|
|
81
|
+
tool_call_id: toolCallId,
|
|
82
|
+
content: typeof fr.functionResponse.response === 'string'
|
|
83
|
+
? fr.functionResponse.response
|
|
84
|
+
: JSON.stringify(fr.functionResponse.response || {}),
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// 纯文本 part(跳过已处理 functionCall/functionResponse 的消息)
|
|
89
|
+
const textParts = parts.filter(p => p.text).map(p => p.text).join('');
|
|
90
|
+
if (textParts && functionCalls.length === 0 && functionResponses.length === 0) {
|
|
91
|
+
messages.push({ role, content: textParts });
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const result = {
|
|
96
|
+
model: targetModel,
|
|
97
|
+
messages,
|
|
98
|
+
stream: false,
|
|
99
|
+
};
|
|
100
|
+
|
|
101
|
+
if (tools) result.tools = tools;
|
|
102
|
+
|
|
103
|
+
// generationConfig → OpenAI params
|
|
104
|
+
const gc = body.generationConfig || {};
|
|
105
|
+
if (gc.maxOutputTokens !== undefined) result.max_tokens = gc.maxOutputTokens;
|
|
106
|
+
if (gc.temperature !== undefined) result.temperature = gc.temperature;
|
|
107
|
+
if (gc.topP !== undefined) result.top_p = gc.topP;
|
|
108
|
+
if (gc.stopSequences) result.stop = gc.stopSequences;
|
|
109
|
+
|
|
110
|
+
return { ...result, nameToId };
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// ==================== 响应转换 ====================
|
|
114
|
+
|
|
115
|
+
function convertResponse(geminiBody) {
|
|
116
|
+
const candidate = geminiBody.candidates?.[0];
|
|
117
|
+
if (!candidate) {
|
|
118
|
+
return { id: '', object: 'chat.completion', choices: [], usage: convertUsage(geminiBody.usageMetadata) };
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const parts = candidate.content?.parts || [];
|
|
122
|
+
const textParts = [];
|
|
123
|
+
const toolCalls = [];
|
|
124
|
+
|
|
125
|
+
for (const part of parts) {
|
|
126
|
+
if (part.text) textParts.push(part.text);
|
|
127
|
+
if (part.functionCall) {
|
|
128
|
+
toolCalls.push({
|
|
129
|
+
id: generateCallId(),
|
|
130
|
+
type: 'function',
|
|
131
|
+
function: { name: part.functionCall.name, arguments: JSON.stringify(part.functionCall.args || {}) },
|
|
132
|
+
});
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
const message = { role: 'assistant', content: textParts.join('') || null };
|
|
137
|
+
if (toolCalls.length > 0) message.tool_calls = toolCalls;
|
|
138
|
+
|
|
139
|
+
return {
|
|
140
|
+
id: '',
|
|
141
|
+
object: 'chat.completion',
|
|
142
|
+
choices: [{
|
|
143
|
+
index: 0,
|
|
144
|
+
message,
|
|
145
|
+
finish_reason: toolCalls.length > 0 ? 'tool_calls' : mapFinishReason(candidate.finishReason),
|
|
146
|
+
}],
|
|
147
|
+
usage: convertUsage(geminiBody.usageMetadata),
|
|
148
|
+
};
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
function convertUsage(meta) {
|
|
152
|
+
return {
|
|
153
|
+
prompt_tokens: meta?.promptTokenCount || 0,
|
|
154
|
+
completion_tokens: meta?.candidatesTokenCount || 0,
|
|
155
|
+
total_tokens: meta?.totalTokenCount || 0,
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
function mapFinishReason(reason) {
|
|
160
|
+
if (!reason) return null;
|
|
161
|
+
if (reason === 'STOP') return 'stop';
|
|
162
|
+
if (reason === 'MAX_TOKENS') return 'length';
|
|
163
|
+
if (reason === 'SAFETY') return 'content_filter';
|
|
164
|
+
return 'stop';
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// ==================== SSE 流式转换 ====================
|
|
168
|
+
|
|
169
|
+
function createSSEConverter() {
|
|
170
|
+
const state = { started: false, sentFunctionCall: new Map(), buffer: '' };
|
|
171
|
+
|
|
172
|
+
return {
|
|
173
|
+
convertChunk(chunkText) {
|
|
174
|
+
let output = '';
|
|
175
|
+
state.buffer += chunkText;
|
|
176
|
+
const lines = state.buffer.split('\n');
|
|
177
|
+
state.buffer = lines.pop() || '';
|
|
178
|
+
|
|
179
|
+
for (const line of lines) {
|
|
180
|
+
const trimmed = line.trim();
|
|
181
|
+
if (!trimmed.startsWith('data: ')) continue;
|
|
182
|
+
const dataStr = trimmed.slice(6);
|
|
183
|
+
if (!dataStr) continue;
|
|
184
|
+
|
|
185
|
+
let chunk;
|
|
186
|
+
try { chunk = JSON.parse(dataStr); } catch { continue; }
|
|
187
|
+
|
|
188
|
+
const candidate = chunk.candidates?.[0];
|
|
189
|
+
if (!candidate) continue;
|
|
190
|
+
|
|
191
|
+
const parts = candidate.content?.parts || [];
|
|
192
|
+
|
|
193
|
+
// 首个 chunk 发送 role
|
|
194
|
+
if (!state.started && (parts.length > 0)) {
|
|
195
|
+
state.started = true;
|
|
196
|
+
output += encodeOpenAIEvent({
|
|
197
|
+
id: '',
|
|
198
|
+
object: 'chat.completion.chunk',
|
|
199
|
+
choices: [{ index: 0, delta: { role: 'assistant', content: null }, finish_reason: null }],
|
|
200
|
+
});
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// 文本增量
|
|
204
|
+
const text = parts.filter(p => p.text).map(p => p.text).join('') || '';
|
|
205
|
+
if (text) {
|
|
206
|
+
output += encodeOpenAIEvent({
|
|
207
|
+
id: '',
|
|
208
|
+
object: 'chat.completion.chunk',
|
|
209
|
+
choices: [{ index: 0, delta: { content: text }, finish_reason: null }],
|
|
210
|
+
});
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
// functionCall 增量(去重,首次生成 ID 后缓存)
|
|
214
|
+
for (const part of parts) {
|
|
215
|
+
if (!part.functionCall) continue;
|
|
216
|
+
const key = part.functionCall.name + (typeof part.functionCall.args === 'string' ? part.functionCall.args : JSON.stringify(part.functionCall.args || {}));
|
|
217
|
+
if (state.sentFunctionCall.has(key)) continue;
|
|
218
|
+
const callId = generateCallId();
|
|
219
|
+
state.sentFunctionCall.set(key, callId);
|
|
220
|
+
output += encodeOpenAIEvent({
|
|
221
|
+
id: '',
|
|
222
|
+
object: 'chat.completion.chunk',
|
|
223
|
+
choices: [{
|
|
224
|
+
index: 0,
|
|
225
|
+
delta: {
|
|
226
|
+
tool_calls: [{
|
|
227
|
+
index: 0,
|
|
228
|
+
id: callId,
|
|
229
|
+
type: 'function',
|
|
230
|
+
function: { name: part.functionCall.name, arguments: JSON.stringify(part.functionCall.args || {}) },
|
|
231
|
+
}],
|
|
232
|
+
},
|
|
233
|
+
finish_reason: null,
|
|
234
|
+
}],
|
|
235
|
+
});
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
if (candidate.finishReason) {
|
|
239
|
+
const reason = mapFinishReason(candidate.finishReason);
|
|
240
|
+
output += encodeOpenAIEvent({
|
|
241
|
+
id: '',
|
|
242
|
+
object: 'chat.completion.chunk',
|
|
243
|
+
choices: [{ index: 0, delta: {}, finish_reason: reason }],
|
|
244
|
+
});
|
|
245
|
+
output += encodeOpenAIDone();
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
return output || null;
|
|
250
|
+
},
|
|
251
|
+
flush() { return ''; },
|
|
252
|
+
};
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
module.exports = { convertRequest, convertResponse, createSSEConverter };
|