open-sse 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +180 -0
- package/config/constants.js +206 -0
- package/config/defaultThinkingSignature.js +7 -0
- package/config/ollamaModels.js +19 -0
- package/config/providerModels.js +161 -0
- package/handlers/chatCore.js +277 -0
- package/handlers/responsesHandler.js +69 -0
- package/index.js +69 -0
- package/package.json +44 -0
- package/services/accountFallback.js +148 -0
- package/services/combo.js +69 -0
- package/services/compact.js +64 -0
- package/services/model.js +109 -0
- package/services/provider.js +237 -0
- package/services/tokenRefresh.js +542 -0
- package/services/usage.js +398 -0
- package/translator/formats.js +12 -0
- package/translator/from-openai/claude.js +341 -0
- package/translator/from-openai/gemini.js +469 -0
- package/translator/from-openai/openai-responses.js +361 -0
- package/translator/helpers/claudeHelper.js +179 -0
- package/translator/helpers/geminiHelper.js +131 -0
- package/translator/helpers/openaiHelper.js +80 -0
- package/translator/helpers/responsesApiHelper.js +103 -0
- package/translator/helpers/toolCallHelper.js +111 -0
- package/translator/index.js +167 -0
- package/translator/to-openai/claude.js +238 -0
- package/translator/to-openai/gemini.js +151 -0
- package/translator/to-openai/openai-responses.js +140 -0
- package/translator/to-openai/openai.js +371 -0
- package/utils/bypassHandler.js +258 -0
- package/utils/error.js +133 -0
- package/utils/ollamaTransform.js +82 -0
- package/utils/requestLogger.js +217 -0
- package/utils/stream.js +274 -0
- package/utils/streamHandler.js +131 -0
|
@@ -0,0 +1,469 @@
|
|
|
1
|
+
import { register } from "../index.js";
|
|
2
|
+
import { FORMATS } from "../formats.js";
|
|
3
|
+
import { DEFAULT_THINKING_GEMINI_SIGNATURE } from "../../config/defaultThinkingSignature.js";
|
|
4
|
+
import {
|
|
5
|
+
UNSUPPORTED_SCHEMA_CONSTRAINTS,
|
|
6
|
+
DEFAULT_SAFETY_SETTINGS,
|
|
7
|
+
convertOpenAIContentToParts,
|
|
8
|
+
extractTextContent,
|
|
9
|
+
tryParseJSON,
|
|
10
|
+
generateRequestId,
|
|
11
|
+
generateSessionId,
|
|
12
|
+
generateProjectId,
|
|
13
|
+
cleanJSONSchemaForAntigravity
|
|
14
|
+
} from "../helpers/geminiHelper.js";
|
|
15
|
+
|
|
16
|
+
// ============================================
|
|
17
|
+
// REQUEST TRANSLATORS: OpenAI -> Gemini/GeminiCLI/Antigravity
|
|
18
|
+
// ============================================
|
|
19
|
+
|
|
20
|
+
// Core: Convert OpenAI request to Gemini format (base for all variants)
|
|
21
|
+
function openaiToGeminiBase(model, body, stream) {
|
|
22
|
+
const result = {
|
|
23
|
+
model: model,
|
|
24
|
+
contents: [],
|
|
25
|
+
generationConfig: {},
|
|
26
|
+
safetySettings: DEFAULT_SAFETY_SETTINGS
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
// Generation config
|
|
30
|
+
if (body.temperature !== undefined) {
|
|
31
|
+
result.generationConfig.temperature = body.temperature;
|
|
32
|
+
}
|
|
33
|
+
if (body.top_p !== undefined) {
|
|
34
|
+
result.generationConfig.topP = body.top_p;
|
|
35
|
+
}
|
|
36
|
+
if (body.top_k !== undefined) {
|
|
37
|
+
result.generationConfig.topK = body.top_k;
|
|
38
|
+
}
|
|
39
|
+
if (body.max_tokens !== undefined) {
|
|
40
|
+
result.generationConfig.maxOutputTokens = body.max_tokens;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Build tool_call_id -> name map
|
|
44
|
+
const tcID2Name = {};
|
|
45
|
+
if (body.messages && Array.isArray(body.messages)) {
|
|
46
|
+
for (const msg of body.messages) {
|
|
47
|
+
if (msg.role === "assistant" && msg.tool_calls) {
|
|
48
|
+
for (const tc of msg.tool_calls) {
|
|
49
|
+
if (tc.type === "function" && tc.id && tc.function?.name) {
|
|
50
|
+
tcID2Name[tc.id] = tc.function.name;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Build tool responses cache
|
|
58
|
+
const toolResponses = {};
|
|
59
|
+
if (body.messages && Array.isArray(body.messages)) {
|
|
60
|
+
for (const msg of body.messages) {
|
|
61
|
+
if (msg.role === "tool" && msg.tool_call_id) {
|
|
62
|
+
toolResponses[msg.tool_call_id] = msg.content;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// Convert messages
|
|
68
|
+
if (body.messages && Array.isArray(body.messages)) {
|
|
69
|
+
for (let i = 0; i < body.messages.length; i++) {
|
|
70
|
+
const msg = body.messages[i];
|
|
71
|
+
const role = msg.role;
|
|
72
|
+
const content = msg.content;
|
|
73
|
+
|
|
74
|
+
if (role === "system" && body.messages.length > 1) {
|
|
75
|
+
result.systemInstruction = {
|
|
76
|
+
role: "user",
|
|
77
|
+
parts: [{ text: typeof content === "string" ? content : extractTextContent(content) }]
|
|
78
|
+
};
|
|
79
|
+
} else if (role === "user" || (role === "system" && body.messages.length === 1)) {
|
|
80
|
+
const parts = convertOpenAIContentToParts(content);
|
|
81
|
+
if (parts.length > 0) {
|
|
82
|
+
result.contents.push({ role: "user", parts });
|
|
83
|
+
}
|
|
84
|
+
} else if (role === "assistant") {
|
|
85
|
+
const parts = [];
|
|
86
|
+
|
|
87
|
+
if (content) {
|
|
88
|
+
const text = typeof content === "string" ? content : extractTextContent(content);
|
|
89
|
+
if (text) {
|
|
90
|
+
parts.push({ text });
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (msg.tool_calls && Array.isArray(msg.tool_calls)) {
|
|
95
|
+
const toolCallIds = [];
|
|
96
|
+
for (const tc of msg.tool_calls) {
|
|
97
|
+
if (tc.type !== "function") continue;
|
|
98
|
+
|
|
99
|
+
const args = tryParseJSON(tc.function?.arguments || "{}");
|
|
100
|
+
parts.push({
|
|
101
|
+
thoughtSignature: DEFAULT_THINKING_GEMINI_SIGNATURE,
|
|
102
|
+
functionCall: {
|
|
103
|
+
id: tc.id,
|
|
104
|
+
name: tc.function.name,
|
|
105
|
+
args: args
|
|
106
|
+
}
|
|
107
|
+
});
|
|
108
|
+
toolCallIds.push(tc.id);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (parts.length > 0) {
|
|
112
|
+
result.contents.push({ role: "model", parts });
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// Append function responses - extract name from tool_call_id format "ToolName-timestamp-index"
|
|
116
|
+
const toolParts = [];
|
|
117
|
+
for (const fid of toolCallIds) {
|
|
118
|
+
// Try to get name from tcID2Name map first, then extract from id format
|
|
119
|
+
let name = tcID2Name[fid];
|
|
120
|
+
if (!name) {
|
|
121
|
+
// Extract name from id format: "ToolName-timestamp-index"
|
|
122
|
+
const idParts = fid.split("-");
|
|
123
|
+
if (idParts.length > 2) {
|
|
124
|
+
name = idParts.slice(0, -2).join("-");
|
|
125
|
+
} else {
|
|
126
|
+
name = fid;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
let resp = toolResponses[fid] || "{}";
|
|
131
|
+
let parsedResp = tryParseJSON(resp);
|
|
132
|
+
if (parsedResp === null) {
|
|
133
|
+
parsedResp = { result: resp };
|
|
134
|
+
} else if (typeof parsedResp !== "object") {
|
|
135
|
+
parsedResp = { result: parsedResp };
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
toolParts.push({
|
|
139
|
+
functionResponse: {
|
|
140
|
+
id: fid,
|
|
141
|
+
name: name,
|
|
142
|
+
response: { result: parsedResp }
|
|
143
|
+
}
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
if (toolParts.length > 0) {
|
|
147
|
+
result.contents.push({ role: "user", parts: toolParts });
|
|
148
|
+
}
|
|
149
|
+
} else if (parts.length > 0) {
|
|
150
|
+
result.contents.push({ role: "model", parts });
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// Convert tools
|
|
157
|
+
if (body.tools && Array.isArray(body.tools) && body.tools.length > 0) {
|
|
158
|
+
const functionDeclarations = [];
|
|
159
|
+
for (const t of body.tools) {
|
|
160
|
+
if (t.type === "function" && t.function) {
|
|
161
|
+
const fn = t.function;
|
|
162
|
+
functionDeclarations.push({
|
|
163
|
+
name: fn.name,
|
|
164
|
+
description: fn.description || "",
|
|
165
|
+
parameters: fn.parameters || { type: "object", properties: {} }
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
if (functionDeclarations.length > 0) {
|
|
171
|
+
result.tools = [{ functionDeclarations }];
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
return result;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// OpenAI -> Gemini (standard API)
|
|
179
|
+
function openaiToGemini(model, body, stream) {
|
|
180
|
+
return openaiToGeminiBase(model, body, stream);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// OpenAI -> Gemini CLI (Cloud Code Assist)
|
|
184
|
+
function openaiToGeminiCLI(model, body, stream) {
|
|
185
|
+
const gemini = openaiToGeminiBase(model, body, stream);
|
|
186
|
+
const isClaude = model.toLowerCase().includes("claude");
|
|
187
|
+
|
|
188
|
+
// Add thinking config for CLI
|
|
189
|
+
if (body.reasoning_effort) {
|
|
190
|
+
const budgetMap = { low: 1024, medium: 8192, high: 32768 };
|
|
191
|
+
const budget = budgetMap[body.reasoning_effort] || 8192;
|
|
192
|
+
gemini.generationConfig.thinkingConfig = {
|
|
193
|
+
thinkingBudget: budget,
|
|
194
|
+
include_thoughts: true
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// Thinking config from Claude format
|
|
199
|
+
if (body.thinking?.type === "enabled" && body.thinking.budget_tokens) {
|
|
200
|
+
gemini.generationConfig.thinkingConfig = {
|
|
201
|
+
thinkingBudget: body.thinking.budget_tokens,
|
|
202
|
+
include_thoughts: true
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Clean schema for tools
|
|
207
|
+
// Claude models: use "parameters" (backend converts parametersJsonSchema -> parameters)
|
|
208
|
+
// Gemini native: use "parametersJsonSchema" (backend expects this field)
|
|
209
|
+
if (gemini.tools?.[0]?.functionDeclarations) {
|
|
210
|
+
for (const fn of gemini.tools[0].functionDeclarations) {
|
|
211
|
+
if (fn.parameters) {
|
|
212
|
+
const cleanedSchema = cleanJSONSchemaForAntigravity(fn.parameters);
|
|
213
|
+
if (isClaude) {
|
|
214
|
+
fn.parameters = cleanedSchema;
|
|
215
|
+
} else {
|
|
216
|
+
fn.parametersJsonSchema = cleanedSchema;
|
|
217
|
+
delete fn.parameters;
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
return gemini;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// Wrap Gemini CLI format in Cloud Code wrapper
|
|
227
|
+
function wrapInCloudCodeEnvelope(model, geminiCLI, credentials = null) {
|
|
228
|
+
// Use real project ID if available, otherwise generate random
|
|
229
|
+
const projectId = credentials?.projectId || generateProjectId();
|
|
230
|
+
|
|
231
|
+
return {
|
|
232
|
+
project: projectId,
|
|
233
|
+
model: model,
|
|
234
|
+
userAgent: "gemini-cli",
|
|
235
|
+
requestId: generateRequestId(),
|
|
236
|
+
request: {
|
|
237
|
+
sessionId: generateSessionId(),
|
|
238
|
+
contents: geminiCLI.contents,
|
|
239
|
+
systemInstruction: geminiCLI.systemInstruction,
|
|
240
|
+
generationConfig: geminiCLI.generationConfig,
|
|
241
|
+
safetySettings: geminiCLI.safetySettings,
|
|
242
|
+
tools: geminiCLI.tools,
|
|
243
|
+
}
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
// OpenAI -> Antigravity (Sandbox Cloud Code with wrapper)
|
|
248
|
+
function openaiToAntigravity(model, body, stream, credentials = null) {
|
|
249
|
+
const geminiCLI = openaiToGeminiCLI(model, body, stream);
|
|
250
|
+
return wrapInCloudCodeEnvelope(model, geminiCLI, credentials);
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
// ============================================
|
|
254
|
+
// RESPONSE TRANSLATORS: Gemini/GeminiCLI/Antigravity -> OpenAI
|
|
255
|
+
// ============================================
|
|
256
|
+
|
|
257
|
+
// Core: Convert Gemini response chunk to OpenAI format
|
|
258
|
+
function geminiToOpenAIResponse(chunk, state) {
|
|
259
|
+
if (!chunk) return null;
|
|
260
|
+
|
|
261
|
+
// Handle Antigravity wrapper
|
|
262
|
+
const response = chunk.response || chunk;
|
|
263
|
+
if (!response || !response.candidates?.[0]) return null;
|
|
264
|
+
|
|
265
|
+
const results = [];
|
|
266
|
+
const candidate = response.candidates[0];
|
|
267
|
+
const content = candidate.content;
|
|
268
|
+
|
|
269
|
+
// Initialize state
|
|
270
|
+
if (!state.messageId) {
|
|
271
|
+
state.messageId = response.responseId || `msg_${Date.now()}`;
|
|
272
|
+
state.model = response.modelVersion || "gemini";
|
|
273
|
+
state.functionIndex = 0;
|
|
274
|
+
results.push({
|
|
275
|
+
id: `chatcmpl-${state.messageId}`,
|
|
276
|
+
object: "chat.completion.chunk",
|
|
277
|
+
created: Math.floor(Date.now() / 1000),
|
|
278
|
+
model: state.model,
|
|
279
|
+
choices: [{
|
|
280
|
+
index: 0,
|
|
281
|
+
delta: { role: "assistant" },
|
|
282
|
+
finish_reason: null
|
|
283
|
+
}]
|
|
284
|
+
});
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
// Process parts
|
|
288
|
+
if (content?.parts) {
|
|
289
|
+
for (const part of content.parts) {
|
|
290
|
+
const hasThoughtSig = part.thoughtSignature || part.thought_signature;
|
|
291
|
+
const isThought = part.thought === true;
|
|
292
|
+
|
|
293
|
+
// Handle thought signature (thinking mode)
|
|
294
|
+
if (hasThoughtSig) {
|
|
295
|
+
const hasTextContent = part.text !== undefined && part.text !== "";
|
|
296
|
+
const hasFunctionCall = !!part.functionCall;
|
|
297
|
+
// If there's text with thoughtSignature
|
|
298
|
+
if (hasTextContent) {
|
|
299
|
+
results.push({
|
|
300
|
+
id: `chatcmpl-${state.messageId}`,
|
|
301
|
+
object: "chat.completion.chunk",
|
|
302
|
+
created: Math.floor(Date.now() / 1000),
|
|
303
|
+
model: state.model,
|
|
304
|
+
choices: [{
|
|
305
|
+
index: 0,
|
|
306
|
+
delta: isThought
|
|
307
|
+
? { reasoning_content: part.text }
|
|
308
|
+
: { content: part.text },
|
|
309
|
+
finish_reason: null
|
|
310
|
+
}]
|
|
311
|
+
});
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
// Process functionCall if exists, then skip to next part
|
|
315
|
+
if (hasFunctionCall) {
|
|
316
|
+
const fcName = part.functionCall.name;
|
|
317
|
+
const fcArgs = part.functionCall.args || {};
|
|
318
|
+
const toolCallIndex = state.functionIndex++;
|
|
319
|
+
|
|
320
|
+
const toolCall = {
|
|
321
|
+
id: `${fcName}-${Date.now()}-${toolCallIndex}`,
|
|
322
|
+
index: toolCallIndex,
|
|
323
|
+
type: "function",
|
|
324
|
+
function: {
|
|
325
|
+
name: fcName,
|
|
326
|
+
arguments: JSON.stringify(fcArgs)
|
|
327
|
+
}
|
|
328
|
+
};
|
|
329
|
+
|
|
330
|
+
state.toolCalls.set(toolCallIndex, toolCall);
|
|
331
|
+
|
|
332
|
+
results.push({
|
|
333
|
+
id: `chatcmpl-${state.messageId}`,
|
|
334
|
+
object: "chat.completion.chunk",
|
|
335
|
+
created: Math.floor(Date.now() / 1000),
|
|
336
|
+
model: state.model,
|
|
337
|
+
choices: [{
|
|
338
|
+
index: 0,
|
|
339
|
+
delta: { tool_calls: [toolCall] },
|
|
340
|
+
finish_reason: null
|
|
341
|
+
}]
|
|
342
|
+
});
|
|
343
|
+
}
|
|
344
|
+
continue;
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
// Text content (non-thinking) - skip empty text
|
|
348
|
+
if (part.text !== undefined && part.text !== "") {
|
|
349
|
+
results.push({
|
|
350
|
+
id: `chatcmpl-${state.messageId}`,
|
|
351
|
+
object: "chat.completion.chunk",
|
|
352
|
+
created: Math.floor(Date.now() / 1000),
|
|
353
|
+
model: state.model,
|
|
354
|
+
choices: [{
|
|
355
|
+
index: 0,
|
|
356
|
+
delta: { content: part.text },
|
|
357
|
+
finish_reason: null
|
|
358
|
+
}]
|
|
359
|
+
});
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
// Function call
|
|
363
|
+
if (part.functionCall) {
|
|
364
|
+
const fcName = part.functionCall.name;
|
|
365
|
+
const fcArgs = part.functionCall.args || {};
|
|
366
|
+
const toolCallIndex = state.functionIndex++;
|
|
367
|
+
|
|
368
|
+
const toolCall = {
|
|
369
|
+
id: `${fcName}-${Date.now()}-${toolCallIndex}`,
|
|
370
|
+
index: toolCallIndex,
|
|
371
|
+
type: "function",
|
|
372
|
+
function: {
|
|
373
|
+
name: fcName,
|
|
374
|
+
arguments: JSON.stringify(fcArgs)
|
|
375
|
+
}
|
|
376
|
+
};
|
|
377
|
+
|
|
378
|
+
state.toolCalls.set(toolCallIndex, toolCall);
|
|
379
|
+
|
|
380
|
+
results.push({
|
|
381
|
+
id: `chatcmpl-${state.messageId}`,
|
|
382
|
+
object: "chat.completion.chunk",
|
|
383
|
+
created: Math.floor(Date.now() / 1000),
|
|
384
|
+
model: state.model,
|
|
385
|
+
choices: [{
|
|
386
|
+
index: 0,
|
|
387
|
+
delta: { tool_calls: [toolCall] },
|
|
388
|
+
finish_reason: null
|
|
389
|
+
}]
|
|
390
|
+
});
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
// Inline data (images)
|
|
394
|
+
const inlineData = part.inlineData || part.inline_data;
|
|
395
|
+
if (inlineData?.data) {
|
|
396
|
+
const mimeType = inlineData.mimeType || inlineData.mime_type || "image/png";
|
|
397
|
+
results.push({
|
|
398
|
+
id: `chatcmpl-${state.messageId}`,
|
|
399
|
+
object: "chat.completion.chunk",
|
|
400
|
+
created: Math.floor(Date.now() / 1000),
|
|
401
|
+
model: state.model,
|
|
402
|
+
choices: [{
|
|
403
|
+
index: 0,
|
|
404
|
+
delta: {
|
|
405
|
+
images: [{
|
|
406
|
+
type: "image_url",
|
|
407
|
+
image_url: { url: `data:${mimeType};base64,${inlineData.data}` }
|
|
408
|
+
}]
|
|
409
|
+
},
|
|
410
|
+
finish_reason: null
|
|
411
|
+
}]
|
|
412
|
+
});
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
// Finish reason
|
|
418
|
+
if (candidate.finishReason) {
|
|
419
|
+
let finishReason = candidate.finishReason.toLowerCase();
|
|
420
|
+
if (finishReason === "stop" && state.toolCalls.size > 0) {
|
|
421
|
+
finishReason = "tool_calls";
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
results.push({
|
|
425
|
+
id: `chatcmpl-${state.messageId}`,
|
|
426
|
+
object: "chat.completion.chunk",
|
|
427
|
+
created: Math.floor(Date.now() / 1000),
|
|
428
|
+
model: state.model,
|
|
429
|
+
choices: [{
|
|
430
|
+
index: 0,
|
|
431
|
+
delta: {},
|
|
432
|
+
finish_reason: finishReason
|
|
433
|
+
}]
|
|
434
|
+
});
|
|
435
|
+
state.finishReason = finishReason;
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
// Usage metadata
|
|
439
|
+
const usage = response.usageMetadata || chunk.usageMetadata;
|
|
440
|
+
if (usage) {
|
|
441
|
+
const promptTokens = (usage.promptTokenCount || 0) + (usage.thoughtsTokenCount || 0);
|
|
442
|
+
state.usage = {
|
|
443
|
+
prompt_tokens: promptTokens,
|
|
444
|
+
completion_tokens: usage.candidatesTokenCount || 0,
|
|
445
|
+
total_tokens: usage.totalTokenCount || 0
|
|
446
|
+
};
|
|
447
|
+
if (usage.thoughtsTokenCount > 0) {
|
|
448
|
+
state.usage.completion_tokens_details = {
|
|
449
|
+
reasoning_tokens: usage.thoughtsTokenCount
|
|
450
|
+
};
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
return results.length > 0 ? results : null;
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
// ============================================
|
|
458
|
+
// REGISTER ALL TRANSLATORS
|
|
459
|
+
// ============================================
|
|
460
|
+
|
|
461
|
+
// Request: OpenAI -> Gemini variants
|
|
462
|
+
register(FORMATS.OPENAI, FORMATS.GEMINI, openaiToGemini, null);
|
|
463
|
+
register(FORMATS.OPENAI, FORMATS.GEMINI_CLI, (model, body, stream, credentials) => wrapInCloudCodeEnvelope(model, openaiToGeminiCLI(model, body, stream), credentials), null);
|
|
464
|
+
register(FORMATS.OPENAI, FORMATS.ANTIGRAVITY, openaiToAntigravity, null);
|
|
465
|
+
|
|
466
|
+
// Response: Gemini variants -> OpenAI (all use same handler)
|
|
467
|
+
register(FORMATS.GEMINI, FORMATS.OPENAI, null, geminiToOpenAIResponse);
|
|
468
|
+
register(FORMATS.GEMINI_CLI, FORMATS.OPENAI, null, geminiToOpenAIResponse);
|
|
469
|
+
register(FORMATS.ANTIGRAVITY, FORMATS.OPENAI, null, geminiToOpenAIResponse);
|