agentic-flow 1.9.2 → 1.9.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +86 -0
- package/README.md +104 -0
- package/dist/cli-proxy.js +38 -6
- package/dist/core/long-running-agent.js +219 -0
- package/dist/core/provider-manager.js +434 -0
- package/dist/examples/use-provider-fallback.js +176 -0
- package/dist/proxy/anthropic-to-gemini.js +50 -15
- package/dist/proxy/proxy/anthropic-to-gemini.js +439 -0
- package/dist/proxy/utils/logger.js +59 -0
- package/docs/LANDING-PAGE-PROVIDER-CONTENT.md +204 -0
- package/docs/PROVIDER-FALLBACK-GUIDE.md +619 -0
- package/docs/PROVIDER-FALLBACK-SUMMARY.md +418 -0
- package/package.json +1 -1
- package/validation/test-provider-fallback.ts +285 -0
- package/wasm/reasoningbank/reasoningbank_wasm_bg.js +2 -2
- package/wasm/reasoningbank/reasoningbank_wasm_bg.wasm +0 -0
|
@@ -49,7 +49,9 @@ export class AnthropicToGeminiProxy {
|
|
|
49
49
|
});
|
|
50
50
|
// Determine endpoint based on streaming
|
|
51
51
|
const endpoint = anthropicReq.stream ? 'streamGenerateContent' : 'generateContent';
|
|
52
|
-
|
|
52
|
+
// BUG FIX: Add &alt=sse for streaming to get Server-Sent Events format
|
|
53
|
+
const streamParam = anthropicReq.stream ? '&alt=sse' : '';
|
|
54
|
+
const url = `${this.geminiBaseUrl}/models/${this.defaultModel}:${endpoint}?key=${this.geminiApiKey}${streamParam}`;
|
|
53
55
|
// Forward to Gemini
|
|
54
56
|
const response = await fetch(url, {
|
|
55
57
|
method: 'POST',
|
|
@@ -79,23 +81,39 @@ export class AnthropicToGeminiProxy {
|
|
|
79
81
|
throw new Error('No response body');
|
|
80
82
|
}
|
|
81
83
|
const decoder = new TextDecoder();
|
|
84
|
+
let chunkCount = 0;
|
|
82
85
|
while (true) {
|
|
83
86
|
const { done, value } = await reader.read();
|
|
84
87
|
if (done)
|
|
85
88
|
break;
|
|
86
89
|
const chunk = decoder.decode(value);
|
|
90
|
+
chunkCount++;
|
|
91
|
+
logger.info('Gemini stream chunk received', { chunkCount, chunkLength: chunk.length, chunkPreview: chunk.substring(0, 200) });
|
|
87
92
|
const anthropicChunk = this.convertGeminiStreamToAnthropic(chunk);
|
|
93
|
+
logger.info('Anthropic stream chunk generated', { chunkCount, anthropicLength: anthropicChunk.length, anthropicPreview: anthropicChunk.substring(0, 200) });
|
|
88
94
|
res.write(anthropicChunk);
|
|
89
95
|
}
|
|
96
|
+
logger.info('Gemini stream complete', { totalChunks: chunkCount });
|
|
90
97
|
res.end();
|
|
91
98
|
}
|
|
92
99
|
else {
|
|
93
100
|
// Non-streaming response
|
|
94
101
|
const geminiRes = await response.json();
|
|
102
|
+
// DEBUG: Log raw Gemini response
|
|
103
|
+
logger.info('Raw Gemini API response', {
|
|
104
|
+
hasResponse: !!geminiRes,
|
|
105
|
+
hasCandidates: !!geminiRes.candidates,
|
|
106
|
+
candidatesLength: geminiRes.candidates?.length,
|
|
107
|
+
firstCandidate: geminiRes.candidates?.[0],
|
|
108
|
+
fullResponse: JSON.stringify(geminiRes).substring(0, 500)
|
|
109
|
+
});
|
|
95
110
|
const anthropicRes = this.convertGeminiToAnthropic(geminiRes);
|
|
96
111
|
logger.info('Gemini proxy response sent', {
|
|
97
112
|
model: this.defaultModel,
|
|
98
|
-
usage: anthropicRes.usage
|
|
113
|
+
usage: anthropicRes.usage,
|
|
114
|
+
contentBlocks: anthropicRes.content?.length,
|
|
115
|
+
hasText: anthropicRes.content?.some((c) => c.type === 'text'),
|
|
116
|
+
firstContent: anthropicRes.content?.[0]
|
|
99
117
|
});
|
|
100
118
|
res.json(anthropicRes);
|
|
101
119
|
}
|
|
@@ -284,21 +302,33 @@ The system will automatically execute these commands and provide results.
|
|
|
284
302
|
convertGeminiToAnthropic(geminiRes) {
|
|
285
303
|
const candidate = geminiRes.candidates?.[0];
|
|
286
304
|
if (!candidate) {
|
|
305
|
+
logger.error('No candidates in Gemini response', { geminiRes });
|
|
287
306
|
throw new Error('No candidates in Gemini response');
|
|
288
307
|
}
|
|
289
308
|
const content = candidate.content;
|
|
290
309
|
const parts = content?.parts || [];
|
|
310
|
+
logger.info('Converting Gemini to Anthropic', {
|
|
311
|
+
hasParts: !!parts,
|
|
312
|
+
partsCount: parts.length,
|
|
313
|
+
partTypes: parts.map((p) => Object.keys(p))
|
|
314
|
+
});
|
|
291
315
|
// Extract text and function calls
|
|
292
316
|
let rawText = '';
|
|
293
317
|
const functionCalls = [];
|
|
294
318
|
for (const part of parts) {
|
|
295
319
|
if (part.text) {
|
|
296
320
|
rawText += part.text;
|
|
321
|
+
logger.info('Found text in part', { textLength: part.text.length, textPreview: part.text.substring(0, 100) });
|
|
297
322
|
}
|
|
298
323
|
if (part.functionCall) {
|
|
299
324
|
functionCalls.push(part.functionCall);
|
|
300
325
|
}
|
|
301
326
|
}
|
|
327
|
+
logger.info('Extracted content from Gemini', {
|
|
328
|
+
rawTextLength: rawText.length,
|
|
329
|
+
functionCallsCount: functionCalls.length,
|
|
330
|
+
rawTextPreview: rawText.substring(0, 200)
|
|
331
|
+
});
|
|
302
332
|
// Parse structured commands from Gemini's text response
|
|
303
333
|
const { cleanText, toolUses } = this.parseStructuredCommands(rawText);
|
|
304
334
|
// Build content array with text and tool uses
|
|
@@ -345,27 +375,32 @@ The system will automatically execute these commands and provide results.
|
|
|
345
375
|
};
|
|
346
376
|
}
|
|
347
377
|
convertGeminiStreamToAnthropic(chunk) {
|
|
348
|
-
// Gemini streaming returns
|
|
378
|
+
// Gemini streaming returns Server-Sent Events format: "data: {json}"
|
|
349
379
|
const lines = chunk.split('\n').filter(line => line.trim());
|
|
350
380
|
const anthropicChunks = [];
|
|
351
381
|
for (const line of lines) {
|
|
352
382
|
try {
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
383
|
+
// Parse SSE format: "data: {json}"
|
|
384
|
+
if (line.startsWith('data: ')) {
|
|
385
|
+
const jsonStr = line.substring(6); // Remove "data: " prefix
|
|
386
|
+
const parsed = JSON.parse(jsonStr);
|
|
387
|
+
const candidate = parsed.candidates?.[0];
|
|
388
|
+
const text = candidate?.content?.parts?.[0]?.text;
|
|
389
|
+
if (text) {
|
|
390
|
+
anthropicChunks.push(`event: content_block_delta\ndata: ${JSON.stringify({
|
|
391
|
+
type: 'content_block_delta',
|
|
392
|
+
delta: { type: 'text_delta', text }
|
|
393
|
+
})}\n\n`);
|
|
394
|
+
}
|
|
395
|
+
// Check for finish
|
|
396
|
+
if (candidate?.finishReason) {
|
|
397
|
+
anthropicChunks.push('event: message_stop\ndata: {}\n\n');
|
|
398
|
+
}
|
|
365
399
|
}
|
|
366
400
|
}
|
|
367
401
|
catch (e) {
|
|
368
402
|
// Ignore parse errors
|
|
403
|
+
logger.debug('Failed to parse Gemini stream chunk', { line, error: e.message });
|
|
369
404
|
}
|
|
370
405
|
}
|
|
371
406
|
return anthropicChunks.join('');
|
|
@@ -0,0 +1,439 @@
|
|
|
1
|
+
// Anthropic to Gemini Proxy Server
|
|
2
|
+
// Converts Anthropic API format to Google Gemini format
|
|
3
|
+
import express from 'express';
|
|
4
|
+
import { logger } from '../utils/logger.js';
|
|
5
|
+
export class AnthropicToGeminiProxy {
|
|
6
|
+
constructor(config) {
|
|
7
|
+
this.app = express();
|
|
8
|
+
this.geminiApiKey = config.geminiApiKey;
|
|
9
|
+
this.geminiBaseUrl = config.geminiBaseUrl || 'https://generativelanguage.googleapis.com/v1beta';
|
|
10
|
+
this.defaultModel = config.defaultModel || 'gemini-2.0-flash-exp';
|
|
11
|
+
this.setupMiddleware();
|
|
12
|
+
this.setupRoutes();
|
|
13
|
+
}
|
|
14
|
+
setupMiddleware() {
|
|
15
|
+
// Parse JSON bodies
|
|
16
|
+
this.app.use(express.json({ limit: '50mb' }));
|
|
17
|
+
// Logging middleware
|
|
18
|
+
this.app.use((req, res, next) => {
|
|
19
|
+
logger.debug('Gemini proxy request', {
|
|
20
|
+
method: req.method,
|
|
21
|
+
path: req.path,
|
|
22
|
+
headers: Object.keys(req.headers)
|
|
23
|
+
});
|
|
24
|
+
next();
|
|
25
|
+
});
|
|
26
|
+
}
|
|
27
|
+
setupRoutes() {
|
|
28
|
+
// Health check
|
|
29
|
+
this.app.get('/health', (req, res) => {
|
|
30
|
+
res.json({ status: 'ok', service: 'anthropic-to-gemini-proxy' });
|
|
31
|
+
});
|
|
32
|
+
// Anthropic Messages API → Gemini generateContent
|
|
33
|
+
this.app.post('/v1/messages', async (req, res) => {
|
|
34
|
+
try {
|
|
35
|
+
const anthropicReq = req.body;
|
|
36
|
+
// Convert Anthropic format to Gemini format
|
|
37
|
+
const geminiReq = this.convertAnthropicToGemini(anthropicReq);
|
|
38
|
+
logger.info('Converting Anthropic request to Gemini', {
|
|
39
|
+
anthropicModel: anthropicReq.model,
|
|
40
|
+
geminiModel: this.defaultModel,
|
|
41
|
+
messageCount: geminiReq.contents.length,
|
|
42
|
+
stream: anthropicReq.stream,
|
|
43
|
+
apiKeyPresent: !!this.geminiApiKey,
|
|
44
|
+
apiKeyPrefix: this.geminiApiKey?.substring(0, 10)
|
|
45
|
+
});
|
|
46
|
+
// Determine endpoint based on streaming
|
|
47
|
+
const endpoint = anthropicReq.stream ? 'streamGenerateContent' : 'generateContent';
|
|
48
|
+
const url = `${this.geminiBaseUrl}/models/${this.defaultModel}:${endpoint}?key=${this.geminiApiKey}`;
|
|
49
|
+
// Forward to Gemini
|
|
50
|
+
const response = await fetch(url, {
|
|
51
|
+
method: 'POST',
|
|
52
|
+
headers: {
|
|
53
|
+
'Content-Type': 'application/json'
|
|
54
|
+
},
|
|
55
|
+
body: JSON.stringify(geminiReq)
|
|
56
|
+
});
|
|
57
|
+
if (!response.ok) {
|
|
58
|
+
const error = await response.text();
|
|
59
|
+
logger.error('Gemini API error', { status: response.status, error });
|
|
60
|
+
return res.status(response.status).json({
|
|
61
|
+
error: {
|
|
62
|
+
type: 'api_error',
|
|
63
|
+
message: error
|
|
64
|
+
}
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
// Handle streaming vs non-streaming
|
|
68
|
+
if (anthropicReq.stream) {
|
|
69
|
+
// Stream response
|
|
70
|
+
res.setHeader('Content-Type', 'text/event-stream');
|
|
71
|
+
res.setHeader('Cache-Control', 'no-cache');
|
|
72
|
+
res.setHeader('Connection', 'keep-alive');
|
|
73
|
+
const reader = response.body?.getReader();
|
|
74
|
+
if (!reader) {
|
|
75
|
+
throw new Error('No response body');
|
|
76
|
+
}
|
|
77
|
+
const decoder = new TextDecoder();
|
|
78
|
+
let chunkCount = 0;
|
|
79
|
+
while (true) {
|
|
80
|
+
const { done, value } = await reader.read();
|
|
81
|
+
if (done)
|
|
82
|
+
break;
|
|
83
|
+
const chunk = decoder.decode(value);
|
|
84
|
+
chunkCount++;
|
|
85
|
+
logger.info('Gemini stream chunk received', { chunkCount, chunkLength: chunk.length, chunkPreview: chunk.substring(0, 200) });
|
|
86
|
+
const anthropicChunk = this.convertGeminiStreamToAnthropic(chunk);
|
|
87
|
+
logger.info('Anthropic stream chunk generated', { chunkCount, anthropicLength: anthropicChunk.length, anthropicPreview: anthropicChunk.substring(0, 200) });
|
|
88
|
+
res.write(anthropicChunk);
|
|
89
|
+
}
|
|
90
|
+
logger.info('Gemini stream complete', { totalChunks: chunkCount });
|
|
91
|
+
res.end();
|
|
92
|
+
}
|
|
93
|
+
else {
|
|
94
|
+
// Non-streaming response
|
|
95
|
+
const geminiRes = await response.json();
|
|
96
|
+
// DEBUG: Log raw Gemini response
|
|
97
|
+
logger.info('Raw Gemini API response', {
|
|
98
|
+
hasResponse: !!geminiRes,
|
|
99
|
+
hasCandidates: !!geminiRes.candidates,
|
|
100
|
+
candidatesLength: geminiRes.candidates?.length,
|
|
101
|
+
firstCandidate: geminiRes.candidates?.[0],
|
|
102
|
+
fullResponse: JSON.stringify(geminiRes).substring(0, 500)
|
|
103
|
+
});
|
|
104
|
+
const anthropicRes = this.convertGeminiToAnthropic(geminiRes);
|
|
105
|
+
logger.info('Gemini proxy response sent', {
|
|
106
|
+
model: this.defaultModel,
|
|
107
|
+
usage: anthropicRes.usage,
|
|
108
|
+
contentBlocks: anthropicRes.content?.length,
|
|
109
|
+
hasText: anthropicRes.content?.some((c) => c.type === 'text'),
|
|
110
|
+
firstContent: anthropicRes.content?.[0]
|
|
111
|
+
});
|
|
112
|
+
res.json(anthropicRes);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
catch (error) {
|
|
116
|
+
logger.error('Gemini proxy error', { error: error.message, stack: error.stack });
|
|
117
|
+
res.status(500).json({
|
|
118
|
+
error: {
|
|
119
|
+
type: 'proxy_error',
|
|
120
|
+
message: error.message
|
|
121
|
+
}
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
// Fallback for other Anthropic API endpoints
|
|
126
|
+
this.app.use((req, res) => {
|
|
127
|
+
logger.warn('Unsupported endpoint', { path: req.path, method: req.method });
|
|
128
|
+
res.status(404).json({
|
|
129
|
+
error: {
|
|
130
|
+
type: 'not_found',
|
|
131
|
+
message: `Endpoint ${req.path} not supported by Gemini proxy`
|
|
132
|
+
}
|
|
133
|
+
});
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
convertAnthropicToGemini(anthropicReq) {
|
|
137
|
+
const contents = [];
|
|
138
|
+
// Add system message as first user message if present
|
|
139
|
+
// Gemini doesn't have a dedicated system role, so we prepend it to the first user message
|
|
140
|
+
let systemPrefix = '';
|
|
141
|
+
if (anthropicReq.system) {
|
|
142
|
+
systemPrefix = `System: ${anthropicReq.system}\n\n`;
|
|
143
|
+
}
|
|
144
|
+
// Add tool instructions for Gemini to understand file operations
|
|
145
|
+
// Since Gemini doesn't have native tool calling, we instruct it to use structured XML-like commands
|
|
146
|
+
const toolInstructions = `
|
|
147
|
+
IMPORTANT: You have access to file system operations through structured commands. Use these exact formats:
|
|
148
|
+
|
|
149
|
+
<file_write path="filename.ext">
|
|
150
|
+
content here
|
|
151
|
+
</file_write>
|
|
152
|
+
|
|
153
|
+
<file_read path="filename.ext"/>
|
|
154
|
+
|
|
155
|
+
<bash_command>
|
|
156
|
+
command here
|
|
157
|
+
</bash_command>
|
|
158
|
+
|
|
159
|
+
When you need to create, edit, or read files, use these structured commands in your response.
|
|
160
|
+
The system will automatically execute these commands and provide results.
|
|
161
|
+
|
|
162
|
+
`;
|
|
163
|
+
// Prepend tool instructions to system prompt
|
|
164
|
+
if (systemPrefix) {
|
|
165
|
+
systemPrefix = toolInstructions + systemPrefix;
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
systemPrefix = toolInstructions;
|
|
169
|
+
}
|
|
170
|
+
// Convert Anthropic messages to Gemini format
|
|
171
|
+
for (let i = 0; i < anthropicReq.messages.length; i++) {
|
|
172
|
+
const msg = anthropicReq.messages[i];
|
|
173
|
+
let text;
|
|
174
|
+
if (typeof msg.content === 'string') {
|
|
175
|
+
text = msg.content;
|
|
176
|
+
}
|
|
177
|
+
else if (Array.isArray(msg.content)) {
|
|
178
|
+
// Extract text from content blocks
|
|
179
|
+
text = msg.content
|
|
180
|
+
.filter(block => block.type === 'text')
|
|
181
|
+
.map(block => block.text)
|
|
182
|
+
.join('\n');
|
|
183
|
+
}
|
|
184
|
+
else {
|
|
185
|
+
text = '';
|
|
186
|
+
}
|
|
187
|
+
// Add system prefix to first user message
|
|
188
|
+
if (i === 0 && msg.role === 'user' && systemPrefix) {
|
|
189
|
+
text = systemPrefix + text;
|
|
190
|
+
}
|
|
191
|
+
contents.push({
|
|
192
|
+
role: msg.role === 'assistant' ? 'model' : 'user',
|
|
193
|
+
parts: [{ text }]
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
const geminiReq = {
|
|
197
|
+
contents
|
|
198
|
+
};
|
|
199
|
+
// Add generation config if temperature or max_tokens specified
|
|
200
|
+
if (anthropicReq.temperature !== undefined || anthropicReq.max_tokens !== undefined) {
|
|
201
|
+
geminiReq.generationConfig = {};
|
|
202
|
+
if (anthropicReq.temperature !== undefined) {
|
|
203
|
+
geminiReq.generationConfig.temperature = anthropicReq.temperature;
|
|
204
|
+
}
|
|
205
|
+
if (anthropicReq.max_tokens !== undefined) {
|
|
206
|
+
geminiReq.generationConfig.maxOutputTokens = anthropicReq.max_tokens;
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
// Convert MCP/Anthropic tools to Gemini tools format
|
|
210
|
+
if (anthropicReq.tools && anthropicReq.tools.length > 0) {
|
|
211
|
+
geminiReq.tools = [{
|
|
212
|
+
functionDeclarations: anthropicReq.tools.map(tool => {
|
|
213
|
+
// Clean schema: Remove $schema and additionalProperties fields that Gemini doesn't support
|
|
214
|
+
const cleanSchema = (schema) => {
|
|
215
|
+
if (!schema || typeof schema !== 'object')
|
|
216
|
+
return schema;
|
|
217
|
+
const { $schema, additionalProperties, ...rest } = schema;
|
|
218
|
+
const cleaned = { ...rest };
|
|
219
|
+
// Recursively clean nested objects
|
|
220
|
+
if (cleaned.properties) {
|
|
221
|
+
cleaned.properties = Object.fromEntries(Object.entries(cleaned.properties).map(([key, value]) => [
|
|
222
|
+
key,
|
|
223
|
+
cleanSchema(value)
|
|
224
|
+
]));
|
|
225
|
+
}
|
|
226
|
+
// Clean items if present
|
|
227
|
+
if (cleaned.items) {
|
|
228
|
+
cleaned.items = cleanSchema(cleaned.items);
|
|
229
|
+
}
|
|
230
|
+
return cleaned;
|
|
231
|
+
};
|
|
232
|
+
return {
|
|
233
|
+
name: tool.name,
|
|
234
|
+
description: tool.description || '',
|
|
235
|
+
parameters: cleanSchema(tool.input_schema) || {
|
|
236
|
+
type: 'object',
|
|
237
|
+
properties: {},
|
|
238
|
+
required: []
|
|
239
|
+
}
|
|
240
|
+
};
|
|
241
|
+
})
|
|
242
|
+
}];
|
|
243
|
+
logger.info('Forwarding MCP tools to Gemini', {
|
|
244
|
+
toolCount: anthropicReq.tools.length,
|
|
245
|
+
toolNames: anthropicReq.tools.map(t => t.name)
|
|
246
|
+
});
|
|
247
|
+
}
|
|
248
|
+
return geminiReq;
|
|
249
|
+
}
|
|
250
|
+
parseStructuredCommands(text) {
|
|
251
|
+
const toolUses = [];
|
|
252
|
+
let cleanText = text;
|
|
253
|
+
// Parse file_write commands
|
|
254
|
+
const fileWriteRegex = /<file_write path="([^"]+)">([\s\S]*?)<\/file_write>/g;
|
|
255
|
+
let match;
|
|
256
|
+
while ((match = fileWriteRegex.exec(text)) !== null) {
|
|
257
|
+
toolUses.push({
|
|
258
|
+
type: 'tool_use',
|
|
259
|
+
id: `tool_${Date.now()}_${toolUses.length}`,
|
|
260
|
+
name: 'Write',
|
|
261
|
+
input: {
|
|
262
|
+
file_path: match[1],
|
|
263
|
+
content: match[2].trim()
|
|
264
|
+
}
|
|
265
|
+
});
|
|
266
|
+
cleanText = cleanText.replace(match[0], `[File written: ${match[1]}]`);
|
|
267
|
+
}
|
|
268
|
+
// Parse file_read commands
|
|
269
|
+
const fileReadRegex = /<file_read path="([^"]+)"\/>/g;
|
|
270
|
+
while ((match = fileReadRegex.exec(text)) !== null) {
|
|
271
|
+
toolUses.push({
|
|
272
|
+
type: 'tool_use',
|
|
273
|
+
id: `tool_${Date.now()}_${toolUses.length}`,
|
|
274
|
+
name: 'Read',
|
|
275
|
+
input: {
|
|
276
|
+
file_path: match[1]
|
|
277
|
+
}
|
|
278
|
+
});
|
|
279
|
+
cleanText = cleanText.replace(match[0], `[Reading file: ${match[1]}]`);
|
|
280
|
+
}
|
|
281
|
+
// Parse bash commands
|
|
282
|
+
const bashRegex = /<bash_command>([\s\S]*?)<\/bash_command>/g;
|
|
283
|
+
while ((match = bashRegex.exec(text)) !== null) {
|
|
284
|
+
toolUses.push({
|
|
285
|
+
type: 'tool_use',
|
|
286
|
+
id: `tool_${Date.now()}_${toolUses.length}`,
|
|
287
|
+
name: 'Bash',
|
|
288
|
+
input: {
|
|
289
|
+
command: match[1].trim()
|
|
290
|
+
}
|
|
291
|
+
});
|
|
292
|
+
cleanText = cleanText.replace(match[0], `[Executing: ${match[1].trim()}]`);
|
|
293
|
+
}
|
|
294
|
+
return { cleanText: cleanText.trim(), toolUses };
|
|
295
|
+
}
|
|
296
|
+
convertGeminiToAnthropic(geminiRes) {
|
|
297
|
+
const candidate = geminiRes.candidates?.[0];
|
|
298
|
+
if (!candidate) {
|
|
299
|
+
logger.error('No candidates in Gemini response', { geminiRes });
|
|
300
|
+
throw new Error('No candidates in Gemini response');
|
|
301
|
+
}
|
|
302
|
+
const content = candidate.content;
|
|
303
|
+
const parts = content?.parts || [];
|
|
304
|
+
logger.info('Converting Gemini to Anthropic', {
|
|
305
|
+
hasParts: !!parts,
|
|
306
|
+
partsCount: parts.length,
|
|
307
|
+
partTypes: parts.map((p) => Object.keys(p))
|
|
308
|
+
});
|
|
309
|
+
// Extract text and function calls
|
|
310
|
+
let rawText = '';
|
|
311
|
+
const functionCalls = [];
|
|
312
|
+
for (const part of parts) {
|
|
313
|
+
if (part.text) {
|
|
314
|
+
rawText += part.text;
|
|
315
|
+
logger.info('Found text in part', { textLength: part.text.length, textPreview: part.text.substring(0, 100) });
|
|
316
|
+
}
|
|
317
|
+
if (part.functionCall) {
|
|
318
|
+
functionCalls.push(part.functionCall);
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
logger.info('Extracted content from Gemini', {
|
|
322
|
+
rawTextLength: rawText.length,
|
|
323
|
+
functionCallsCount: functionCalls.length,
|
|
324
|
+
rawTextPreview: rawText.substring(0, 200)
|
|
325
|
+
});
|
|
326
|
+
// Parse structured commands from Gemini's text response
|
|
327
|
+
const { cleanText, toolUses } = this.parseStructuredCommands(rawText);
|
|
328
|
+
// Build content array with text and tool uses
|
|
329
|
+
const contentBlocks = [];
|
|
330
|
+
if (cleanText) {
|
|
331
|
+
contentBlocks.push({
|
|
332
|
+
type: 'text',
|
|
333
|
+
text: cleanText
|
|
334
|
+
});
|
|
335
|
+
}
|
|
336
|
+
// Add tool uses from structured commands
|
|
337
|
+
contentBlocks.push(...toolUses);
|
|
338
|
+
// Add tool uses from Gemini function calls (MCP tools)
|
|
339
|
+
if (functionCalls.length > 0) {
|
|
340
|
+
for (const functionCall of functionCalls) {
|
|
341
|
+
contentBlocks.push({
|
|
342
|
+
type: 'tool_use',
|
|
343
|
+
id: `tool_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
|
|
344
|
+
name: functionCall.name,
|
|
345
|
+
input: functionCall.args || {}
|
|
346
|
+
});
|
|
347
|
+
}
|
|
348
|
+
logger.info('Converted Gemini function calls to Anthropic format', {
|
|
349
|
+
functionCallCount: functionCalls.length,
|
|
350
|
+
functionNames: functionCalls.map((fc) => fc.name)
|
|
351
|
+
});
|
|
352
|
+
}
|
|
353
|
+
return {
|
|
354
|
+
id: `msg_${Date.now()}`,
|
|
355
|
+
type: 'message',
|
|
356
|
+
role: 'assistant',
|
|
357
|
+
model: this.defaultModel,
|
|
358
|
+
content: contentBlocks.length > 0 ? contentBlocks : [
|
|
359
|
+
{
|
|
360
|
+
type: 'text',
|
|
361
|
+
text: rawText
|
|
362
|
+
}
|
|
363
|
+
],
|
|
364
|
+
stop_reason: this.mapFinishReason(candidate.finishReason),
|
|
365
|
+
usage: {
|
|
366
|
+
input_tokens: geminiRes.usageMetadata?.promptTokenCount || 0,
|
|
367
|
+
output_tokens: geminiRes.usageMetadata?.candidatesTokenCount || 0
|
|
368
|
+
}
|
|
369
|
+
};
|
|
370
|
+
}
|
|
371
|
+
convertGeminiStreamToAnthropic(chunk) {
|
|
372
|
+
// Gemini streaming returns Server-Sent Events format: "data: {json}"
|
|
373
|
+
const lines = chunk.split('\n').filter(line => line.trim());
|
|
374
|
+
const anthropicChunks = [];
|
|
375
|
+
for (const line of lines) {
|
|
376
|
+
try {
|
|
377
|
+
// Parse SSE format: "data: {json}"
|
|
378
|
+
if (line.startsWith('data: ')) {
|
|
379
|
+
const jsonStr = line.substring(6); // Remove "data: " prefix
|
|
380
|
+
const parsed = JSON.parse(jsonStr);
|
|
381
|
+
const candidate = parsed.candidates?.[0];
|
|
382
|
+
const text = candidate?.content?.parts?.[0]?.text;
|
|
383
|
+
if (text) {
|
|
384
|
+
anthropicChunks.push(`event: content_block_delta\ndata: ${JSON.stringify({
|
|
385
|
+
type: 'content_block_delta',
|
|
386
|
+
delta: { type: 'text_delta', text }
|
|
387
|
+
})}\n\n`);
|
|
388
|
+
}
|
|
389
|
+
// Check for finish
|
|
390
|
+
if (candidate?.finishReason) {
|
|
391
|
+
anthropicChunks.push('event: message_stop\ndata: {}\n\n');
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
catch (e) {
|
|
396
|
+
// Ignore parse errors
|
|
397
|
+
logger.debug('Failed to parse Gemini stream chunk', { line, error: e.message });
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
return anthropicChunks.join('');
|
|
401
|
+
}
|
|
402
|
+
mapFinishReason(reason) {
|
|
403
|
+
const mapping = {
|
|
404
|
+
'STOP': 'end_turn',
|
|
405
|
+
'MAX_TOKENS': 'max_tokens',
|
|
406
|
+
'SAFETY': 'stop_sequence',
|
|
407
|
+
'RECITATION': 'stop_sequence',
|
|
408
|
+
'OTHER': 'end_turn'
|
|
409
|
+
};
|
|
410
|
+
return mapping[reason || 'STOP'] || 'end_turn';
|
|
411
|
+
}
|
|
412
|
+
start(port) {
|
|
413
|
+
this.app.listen(port, () => {
|
|
414
|
+
logger.info('Anthropic to Gemini proxy started', {
|
|
415
|
+
port,
|
|
416
|
+
geminiBaseUrl: this.geminiBaseUrl,
|
|
417
|
+
defaultModel: this.defaultModel
|
|
418
|
+
});
|
|
419
|
+
console.log(`\n✅ Gemini Proxy running at http://localhost:${port}`);
|
|
420
|
+
console.log(` Gemini Base URL: ${this.geminiBaseUrl}`);
|
|
421
|
+
console.log(` Default Model: ${this.defaultModel}\n`);
|
|
422
|
+
});
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
// CLI entry point
|
|
426
|
+
if (import.meta.url === `file://${process.argv[1]}`) {
|
|
427
|
+
const port = parseInt(process.env.PORT || '3001');
|
|
428
|
+
const geminiApiKey = process.env.GOOGLE_GEMINI_API_KEY;
|
|
429
|
+
if (!geminiApiKey) {
|
|
430
|
+
console.error('❌ Error: GOOGLE_GEMINI_API_KEY environment variable required');
|
|
431
|
+
process.exit(1);
|
|
432
|
+
}
|
|
433
|
+
const proxy = new AnthropicToGeminiProxy({
|
|
434
|
+
geminiApiKey,
|
|
435
|
+
geminiBaseUrl: process.env.GEMINI_BASE_URL,
|
|
436
|
+
defaultModel: process.env.COMPLETION_MODEL || process.env.REASONING_MODEL
|
|
437
|
+
});
|
|
438
|
+
proxy.start(port);
|
|
439
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
class Logger {
|
|
2
|
+
constructor() {
|
|
3
|
+
this.context = {};
|
|
4
|
+
}
|
|
5
|
+
setContext(ctx) {
|
|
6
|
+
this.context = { ...this.context, ...ctx };
|
|
7
|
+
}
|
|
8
|
+
log(level, message, data) {
|
|
9
|
+
// Skip all logs if QUIET mode is enabled (unless it's an error)
|
|
10
|
+
if (process.env.QUIET === 'true' && level !== 'error') {
|
|
11
|
+
return;
|
|
12
|
+
}
|
|
13
|
+
const timestamp = new Date().toISOString();
|
|
14
|
+
const logEntry = {
|
|
15
|
+
timestamp,
|
|
16
|
+
level,
|
|
17
|
+
message,
|
|
18
|
+
...this.context,
|
|
19
|
+
...data
|
|
20
|
+
};
|
|
21
|
+
// Structured JSON logging for production
|
|
22
|
+
if (process.env.NODE_ENV === 'production') {
|
|
23
|
+
console.log(JSON.stringify(logEntry));
|
|
24
|
+
}
|
|
25
|
+
else {
|
|
26
|
+
// Human-readable for development
|
|
27
|
+
const prefix = `[${timestamp}] ${level.toUpperCase()}`;
|
|
28
|
+
const contextStr = Object.keys({ ...this.context, ...data }).length > 0
|
|
29
|
+
? ` ${JSON.stringify({ ...this.context, ...data })}`
|
|
30
|
+
: '';
|
|
31
|
+
console.log(`${prefix}: ${message}${contextStr}`);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
debug(message, data) {
|
|
35
|
+
// Skip debug logs unless DEBUG or VERBOSE environment variable is set
|
|
36
|
+
if (!process.env.DEBUG && !process.env.VERBOSE) {
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
this.log('debug', message, data);
|
|
40
|
+
}
|
|
41
|
+
info(message, data) {
|
|
42
|
+
// Skip info logs unless VERBOSE is set
|
|
43
|
+
if (!process.env.DEBUG && !process.env.VERBOSE) {
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
this.log('info', message, data);
|
|
47
|
+
}
|
|
48
|
+
warn(message, data) {
|
|
49
|
+
// Skip warnings unless VERBOSE is set
|
|
50
|
+
if (!process.env.DEBUG && !process.env.VERBOSE) {
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
this.log('warn', message, data);
|
|
54
|
+
}
|
|
55
|
+
error(message, data) {
|
|
56
|
+
this.log('error', message, data);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
export const logger = new Logger();
|