codex-claude-proxy 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,331 @@
1
+ /**
2
+ * Format Converter
3
+ * Converts between Anthropic Messages API and OpenAI Responses API format
4
+ */
5
+
6
+ import crypto from 'crypto';
7
+
8
+ function extractSystemPrompt(system) {
9
+ if (!system) {
10
+ return undefined;
11
+ }
12
+
13
+ if (typeof system === 'string') {
14
+ return system;
15
+ }
16
+
17
+ if (Array.isArray(system)) {
18
+ const textParts = system
19
+ .filter(block => block.type === 'text')
20
+ .map(block => block.text);
21
+ return textParts.join('\n\n') || undefined;
22
+ }
23
+
24
+ return undefined;
25
+ }
26
+
27
+ /**
28
+ * Convert Anthropic Messages API request to OpenAI Responses API format
29
+ */
30
+ export function convertAnthropicToResponsesAPI(anthropicRequest) {
31
+ const { model, messages, system, tools, tool_choice } = anthropicRequest;
32
+
33
+ const instructions = extractSystemPrompt(system);
34
+
35
+ const request = {
36
+ model: model || 'gpt-5.2-codex',
37
+ input: convertMessagesToInput(messages),
38
+ tools: tools ? convertAnthropicToolsToOpenAI(tools) : [],
39
+ tool_choice: tool_choice || 'auto',
40
+ parallel_tool_calls: true,
41
+ store: false,
42
+ stream: true,
43
+ include: []
44
+ };
45
+
46
+ if (instructions) {
47
+ request.instructions = instructions;
48
+ } else {
49
+ request.instructions = '';
50
+ }
51
+
52
+ return request;
53
+ }
54
+
55
+ /**
56
+ * Convert Anthropic messages to OpenAI Responses API input format
57
+ */
58
+ function convertMessagesToInput(messages) {
59
+ if (!Array.isArray(messages)) {
60
+ return [];
61
+ }
62
+
63
+ const input = [];
64
+
65
+ for (const msg of messages) {
66
+ if (msg.role === 'user') {
67
+ const { textParts, toolResults } = convertUserContent(msg.content);
68
+
69
+ if (textParts.length > 0) {
70
+ // API accepts: string OR array of {type: 'input_text', text: '...'}
71
+ const content = textParts.length === 1
72
+ ? textParts[0] // Use string for single text
73
+ : textParts.map(text => ({ type: 'input_text', text }));
74
+ input.push({
75
+ type: 'message',
76
+ role: 'user',
77
+ content
78
+ });
79
+ }
80
+
81
+ for (const result of toolResults) {
82
+ input.push(result);
83
+ }
84
+ } else if (msg.role === 'assistant') {
85
+ const { textParts, toolCalls } = convertAssistantContentToOpenAI(msg.content);
86
+
87
+ if (textParts.length > 0) {
88
+ // API accepts: string OR array of {type: 'output_text', text: '...'}
89
+ const content = textParts.length === 1
90
+ ? textParts[0] // Use string for single text
91
+ : textParts.map(text => ({ type: 'output_text', text }));
92
+ input.push({
93
+ type: 'message',
94
+ role: 'assistant',
95
+ content
96
+ });
97
+ }
98
+
99
+ for (const call of toolCalls) {
100
+ input.push(call);
101
+ }
102
+ }
103
+ }
104
+
105
+ return input;
106
+ }
107
+
108
+ /**
109
+ * Convert user content, separating text and tool results
110
+ */
111
+ function convertUserContent(content) {
112
+ const textParts = [];
113
+ const toolResults = [];
114
+
115
+ if (typeof content === 'string') {
116
+ textParts.push(content);
117
+ } else if (Array.isArray(content)) {
118
+ for (const block of content) {
119
+ if (block.type === 'text') {
120
+ textParts.push(block.text);
121
+ } else if (block.type === 'tool_result') {
122
+ const outputContent = typeof block.content === 'string'
123
+ ? block.content
124
+ : Array.isArray(block.content)
125
+ ? block.content.filter(c => c.type === 'text').map(c => c.text).join('\n')
126
+ : JSON.stringify(block.content);
127
+
128
+ let callId = block.tool_use_id;
129
+ if (!callId.startsWith('fc_') && !callId.startsWith('fc')) {
130
+ callId = 'fc_' + callId.replace(/^(call_|toolu_)/, '');
131
+ }
132
+
133
+ toolResults.push({
134
+ type: 'function_call_output',
135
+ call_id: callId,
136
+ output: block.is_error ? `Error: ${outputContent}` : outputContent
137
+ });
138
+ }
139
+ }
140
+ }
141
+
142
+ return { textParts, toolResults };
143
+ }
144
+
145
+ /**
146
+ * Convert Anthropic assistant content to OpenAI format
147
+ */
148
+ function convertAssistantContentToOpenAI(content) {
149
+ const textParts = [];
150
+ const toolCalls = [];
151
+
152
+ if (typeof content === 'string') {
153
+ textParts.push(content);
154
+ } else if (Array.isArray(content)) {
155
+ for (const block of content) {
156
+ if (block.type === 'text') {
157
+ textParts.push(block.text);
158
+ } else if (block.type === 'tool_use') {
159
+ let callId = block.id;
160
+ if (!callId.startsWith('fc_') && !callId.startsWith('fc')) {
161
+ callId = 'fc_' + callId.replace(/^(call_|toolu_)/, '');
162
+ }
163
+
164
+ toolCalls.push({
165
+ type: 'function_call',
166
+ id: callId,
167
+ call_id: callId,
168
+ name: block.name,
169
+ arguments: typeof block.input === 'string'
170
+ ? block.input
171
+ : JSON.stringify(block.input)
172
+ });
173
+ }
174
+ }
175
+ }
176
+
177
+ return { textParts, toolCalls };
178
+ }
179
+
180
+ /**
181
+ * Convert Anthropic tools to OpenAI function format
182
+ */
183
+ function convertAnthropicToolsToOpenAI(tools) {
184
+ if (!Array.isArray(tools)) {
185
+ return [];
186
+ }
187
+
188
+ return tools.map(tool => ({
189
+ type: 'function',
190
+ name: tool.name,
191
+ description: tool.description || '',
192
+ parameters: sanitizeSchema(tool.input_schema || { type: 'object' })
193
+ }));
194
+ }
195
+
196
+ function sanitizeSchema(schema) {
197
+ if (typeof schema !== 'object' || schema === null) {
198
+ return { type: 'object' };
199
+ }
200
+
201
+ const result = {};
202
+
203
+ for (const [key, value] of Object.entries(schema)) {
204
+ if (key === 'const') {
205
+ result.enum = [value];
206
+ continue;
207
+ }
208
+
209
+ if ([
210
+ '$schema', '$id', '$ref', '$defs', '$comment',
211
+ 'additionalItems', 'definitions', 'examples',
212
+ 'minLength', 'maxLength', 'pattern', 'format',
213
+ 'minItems', 'maxItems', 'minimum', 'maximum',
214
+ 'exclusiveMinimum', 'exclusiveMaximum',
215
+ 'allOf', 'anyOf', 'oneOf', 'not'
216
+ ].includes(key)) {
217
+ continue;
218
+ }
219
+
220
+ if (key === 'additionalProperties' && typeof value === 'boolean') {
221
+ continue;
222
+ }
223
+
224
+ if (key === 'type' && Array.isArray(value)) {
225
+ const nonNullTypes = value.filter(t => t !== 'null');
226
+ result.type = nonNullTypes.length > 0 ? nonNullTypes[0] : 'string';
227
+ continue;
228
+ }
229
+
230
+ if (key === 'properties' && value && typeof value === 'object') {
231
+ result.properties = {};
232
+ for (const [propKey, propValue] of Object.entries(value)) {
233
+ result.properties[propKey] = sanitizeSchema(propValue);
234
+ }
235
+ continue;
236
+ }
237
+
238
+ if (key === 'items') {
239
+ if (Array.isArray(value)) {
240
+ result.items = value.map(item => sanitizeSchema(item));
241
+ } else if (typeof value === 'object') {
242
+ result.items = sanitizeSchema(value);
243
+ } else {
244
+ result.items = value;
245
+ }
246
+ continue;
247
+ }
248
+
249
+ if (key === 'required' && Array.isArray(value)) {
250
+ result.required = value;
251
+ continue;
252
+ }
253
+
254
+ if (key === 'enum' && Array.isArray(value)) {
255
+ result.enum = value;
256
+ continue;
257
+ }
258
+
259
+ if (['type', 'description', 'title'].includes(key)) {
260
+ result[key] = value;
261
+ }
262
+ }
263
+
264
+ if (!result.type) {
265
+ result.type = 'object';
266
+ }
267
+
268
+ if (result.type === 'object' && !result.properties) {
269
+ result.properties = {};
270
+ }
271
+
272
+ return result;
273
+ }
274
+
275
+ /**
276
+ * Convert OpenAI Responses API output to Anthropic content blocks
277
+ */
278
+ export function convertOutputToAnthropic(output) {
279
+ if (!Array.isArray(output)) {
280
+ return [{ type: 'text', text: '' }];
281
+ }
282
+
283
+ const content = [];
284
+
285
+ for (const item of output) {
286
+ if (item.type === 'message') {
287
+ for (const part of item.content || []) {
288
+ if (part.type === 'output_text') {
289
+ content.push({ type: 'text', text: part.text });
290
+ }
291
+ }
292
+ } else if (item.type === 'function_call') {
293
+ let input = {};
294
+ try {
295
+ input = typeof item.arguments === 'string'
296
+ ? JSON.parse(item.arguments)
297
+ : item.arguments || {};
298
+ } catch (e) {
299
+ input = {};
300
+ }
301
+
302
+ content.push({
303
+ type: 'tool_use',
304
+ id: item.call_id || item.id,
305
+ name: item.name,
306
+ input: input
307
+ });
308
+ } else if (item.type === 'reasoning') {
309
+ content.push({
310
+ type: 'thinking',
311
+ thinking: '',
312
+ signature: ''
313
+ });
314
+ }
315
+ }
316
+
317
+ return content.length > 0 ? content : [{ type: 'text', text: '' }];
318
+ }
319
+
320
+ /**
321
+ * Generate Anthropic message ID
322
+ */
323
+ export function generateMessageId() {
324
+ return `msg_${crypto.randomBytes(16).toString('hex')}`;
325
+ }
326
+
327
+ export default {
328
+ convertAnthropicToResponsesAPI,
329
+ convertOutputToAnthropic,
330
+ generateMessageId
331
+ };
package/src/index.js ADDED
@@ -0,0 +1,41 @@
1
+ /**
2
+ * Codex Claude Proxy
3
+ * Entry point
4
+ */
5
+
6
+ import { startServer } from './server.js';
7
+ import { logger } from './utils/logger.js';
8
+ import { getStatus, ACCOUNTS_FILE } from './account-manager.js';
9
+
10
+ const PORT = Number(process.env.PORT || 8081);
11
+
12
+ startServer({ port: PORT });
13
+
14
+ console.log(`
15
+ ╔══════════════════════════════════════════════════════════════╗
16
+ ║ Codex Claude Proxy v2.0.0 ║
17
+ ║ (Direct API Mode) ║
18
+ ╠══════════════════════════════════════════════════════════════╣
19
+ ║ Server: http://localhost:${PORT} ║
20
+ ║ WebUI: http://localhost:${PORT} ║
21
+ ║ Health: http://localhost:${PORT}/health ║
22
+ ║ Accounts: http://localhost:${PORT}/accounts ║
23
+ ║ Logs: http://localhost:${PORT}/api/logs/stream ║
24
+ ╠══════════════════════════════════════════════════════════════╣
25
+ ║ Features: ║
26
+ ║ ✓ Native tool calling support ║
27
+ ║ ✓ Real-time streaming ║
28
+ ║ ✓ Multi-account management ║
29
+ ║ ✓ OpenAI & Anthropic API compatibility ║
30
+ ╚══════════════════════════════════════════════════════════════╝
31
+ `);
32
+
33
+ const status = getStatus();
34
+ logger.info(`Accounts: ${status.total} total, Active: ${status.active || 'None'}`);
35
+
36
+ if (status.total === 0) {
37
+ logger.warn(`No accounts configured. Open http://localhost:${PORT} to add one.`);
38
+ }
39
+
40
+ // Expose config path in logs for convenience
41
+ logger.info(`Accounts config: ${ACCOUNTS_FILE}`);
@@ -0,0 +1,68 @@
1
+ /**
2
+ * Kilo API client
3
+ */
4
+
5
+ import { convertAnthropicToOpenAIChat, convertOpenAIChatToAnthropic } from './kilo-format-converter.js';
6
+ import { streamOpenAIChat } from './kilo-streamer.js';
7
+
8
+ const KILO_API_URL = 'https://api.kilo.ai/api/openrouter/chat/completions';
9
+
10
+ const KILO_HEADERS = {
11
+ Authorization: 'Bearer anonymous',
12
+ 'User-Agent': 'opencode-kilo-provider',
13
+ 'HTTP-Referer': 'https://kilo.ai'
14
+ };
15
+
16
+ function buildError(status, message) {
17
+ const err = new Error(message);
18
+ err.status = status;
19
+ return err;
20
+ }
21
+
22
+ export async function* sendKiloMessageStream(anthropicRequest, targetModel) {
23
+ const requestBody = convertAnthropicToOpenAIChat(anthropicRequest, targetModel);
24
+
25
+ const response = await fetch(KILO_API_URL, {
26
+ method: 'POST',
27
+ headers: {
28
+ ...KILO_HEADERS,
29
+ 'Content-Type': 'application/json',
30
+ Accept: 'text/event-stream'
31
+ },
32
+ body: JSON.stringify(requestBody)
33
+ });
34
+
35
+ if (!response.ok) {
36
+ const errorText = await response.text();
37
+ throw buildError(response.status, `KILO_API_ERROR: ${response.status} - ${errorText}`);
38
+ }
39
+
40
+ yield* streamOpenAIChat(response, anthropicRequest.model);
41
+ }
42
+
43
+ export async function sendKiloMessage(anthropicRequest, targetModel) {
44
+ const requestBody = convertAnthropicToOpenAIChat({ ...anthropicRequest, stream: false }, targetModel);
45
+
46
+ const response = await fetch(KILO_API_URL, {
47
+ method: 'POST',
48
+ headers: {
49
+ ...KILO_HEADERS,
50
+ 'Content-Type': 'application/json',
51
+ Accept: 'application/json'
52
+ },
53
+ body: JSON.stringify({ ...requestBody, stream: false })
54
+ });
55
+
56
+ if (!response.ok) {
57
+ const errorText = await response.text();
58
+ throw buildError(response.status, `KILO_API_ERROR: ${response.status} - ${errorText}`);
59
+ }
60
+
61
+ const data = await response.json();
62
+ return convertOpenAIChatToAnthropic(data);
63
+ }
64
+
65
+ export default {
66
+ sendKiloMessageStream,
67
+ sendKiloMessage
68
+ };
@@ -0,0 +1,270 @@
1
+ /**
2
+ * Kilo Format Converter
3
+ * Converts between Anthropic Messages API and OpenAI Chat Completions format
4
+ */
5
+
6
+ function extractSystemPrompt(system) {
7
+ if (!system) return [];
8
+ if (typeof system === 'string') return [{ role: 'system', content: system }];
9
+ if (Array.isArray(system)) {
10
+ const text = system
11
+ .filter(block => block.type === 'text')
12
+ .map(block => block.text)
13
+ .join('\n\n');
14
+ return text ? [{ role: 'system', content: text }] : [];
15
+ }
16
+ return [];
17
+ }
18
+
19
+ function sanitizeSchema(schema) {
20
+ if (typeof schema !== 'object' || schema === null) {
21
+ return { type: 'object' };
22
+ }
23
+
24
+ const result = {};
25
+
26
+ for (const [key, value] of Object.entries(schema)) {
27
+ if (key === 'const') {
28
+ result.enum = [value];
29
+ continue;
30
+ }
31
+
32
+ if ([
33
+ '$schema', '$id', '$ref', '$defs', '$comment',
34
+ 'additionalItems', 'definitions', 'examples',
35
+ 'minLength', 'maxLength', 'pattern', 'format',
36
+ 'minItems', 'maxItems', 'minimum', 'maximum',
37
+ 'exclusiveMinimum', 'exclusiveMaximum',
38
+ 'allOf', 'anyOf', 'oneOf', 'not'
39
+ ].includes(key)) {
40
+ continue;
41
+ }
42
+
43
+ if (key === 'additionalProperties' && typeof value === 'boolean') {
44
+ continue;
45
+ }
46
+
47
+ if (key === 'type' && Array.isArray(value)) {
48
+ const nonNullTypes = value.filter(t => t !== 'null');
49
+ result.type = nonNullTypes.length > 0 ? nonNullTypes[0] : 'string';
50
+ continue;
51
+ }
52
+
53
+ if (key === 'properties' && value && typeof value === 'object') {
54
+ result.properties = {};
55
+ for (const [propKey, propValue] of Object.entries(value)) {
56
+ result.properties[propKey] = sanitizeSchema(propValue);
57
+ }
58
+ continue;
59
+ }
60
+
61
+ if (key === 'items') {
62
+ if (Array.isArray(value)) {
63
+ result.items = value.map(item => sanitizeSchema(item));
64
+ } else if (typeof value === 'object') {
65
+ result.items = sanitizeSchema(value);
66
+ } else {
67
+ result.items = value;
68
+ }
69
+ continue;
70
+ }
71
+
72
+ if (key === 'required' && Array.isArray(value)) {
73
+ result.required = value;
74
+ continue;
75
+ }
76
+
77
+ if (key === 'enum' && Array.isArray(value)) {
78
+ result.enum = value;
79
+ continue;
80
+ }
81
+
82
+ if (['type', 'description', 'title'].includes(key)) {
83
+ result[key] = value;
84
+ }
85
+ }
86
+
87
+ if (!result.type) {
88
+ result.type = 'object';
89
+ }
90
+
91
+ if (result.type === 'object' && !result.properties) {
92
+ result.properties = {};
93
+ }
94
+
95
+ return result;
96
+ }
97
+
98
+ function convertTools(tools) {
99
+ if (!Array.isArray(tools)) return undefined;
100
+ return tools.map(tool => ({
101
+ type: 'function',
102
+ function: {
103
+ name: tool.name,
104
+ description: tool.description || '',
105
+ parameters: sanitizeSchema(tool.input_schema || { type: 'object' })
106
+ }
107
+ }));
108
+ }
109
+
110
+ function convertToolChoice(toolChoice) {
111
+ if (!toolChoice) return undefined;
112
+ if (typeof toolChoice === 'string') return toolChoice;
113
+ if (toolChoice.type === 'tool' && toolChoice.name) {
114
+ return { type: 'function', function: { name: toolChoice.name } };
115
+ }
116
+ return undefined;
117
+ }
118
+
119
+ function normalizeTextBlocks(content) {
120
+ if (typeof content === 'string') return [content];
121
+ if (!Array.isArray(content)) return [];
122
+ return content.filter(block => block.type === 'text').map(block => block.text);
123
+ }
124
+
125
+ function normalizeToolResultContent(block) {
126
+ if (typeof block.content === 'string') return block.content;
127
+ if (Array.isArray(block.content)) {
128
+ return block.content.filter(c => c.type === 'text').map(c => c.text).join('\n');
129
+ }
130
+ if (block.content && typeof block.content === 'object') {
131
+ return JSON.stringify(block.content);
132
+ }
133
+ return '';
134
+ }
135
+
136
+ function convertMessages(messages = []) {
137
+ const output = [];
138
+
139
+ for (const msg of messages) {
140
+ if (msg.role === 'user') {
141
+ const textParts = normalizeTextBlocks(msg.content);
142
+ if (textParts.length > 0) {
143
+ output.push({ role: 'user', content: textParts.join('\n\n') });
144
+ }
145
+
146
+ if (Array.isArray(msg.content)) {
147
+ for (const block of msg.content) {
148
+ if (block.type === 'tool_result') {
149
+ output.push({
150
+ role: 'tool',
151
+ tool_call_id: block.tool_use_id,
152
+ content: normalizeToolResultContent(block)
153
+ });
154
+ }
155
+ }
156
+ }
157
+ }
158
+
159
+ if (msg.role === 'assistant') {
160
+ const textParts = normalizeTextBlocks(msg.content);
161
+ const toolCalls = [];
162
+
163
+ if (Array.isArray(msg.content)) {
164
+ for (const block of msg.content) {
165
+ if (block.type === 'tool_use') {
166
+ toolCalls.push({
167
+ id: block.id,
168
+ type: 'function',
169
+ function: {
170
+ name: block.name,
171
+ arguments: typeof block.input === 'string'
172
+ ? block.input
173
+ : JSON.stringify(block.input || {})
174
+ }
175
+ });
176
+ }
177
+ }
178
+ }
179
+
180
+ if (textParts.length > 0 || toolCalls.length > 0) {
181
+ const message = {
182
+ role: 'assistant',
183
+ content: textParts.join('\n\n')
184
+ };
185
+
186
+ if (toolCalls.length > 0) {
187
+ message.tool_calls = toolCalls;
188
+ }
189
+
190
+ output.push(message);
191
+ }
192
+ }
193
+ }
194
+
195
+ return output;
196
+ }
197
+
198
+ export function convertAnthropicToOpenAIChat(anthropicRequest, targetModel) {
199
+ const { system, messages, tools, tool_choice, max_tokens, temperature, top_p, stop_sequences, stream } = anthropicRequest;
200
+
201
+ const convertedMessages = [
202
+ ...extractSystemPrompt(system),
203
+ ...convertMessages(messages || [])
204
+ ];
205
+
206
+ const request = {
207
+ model: targetModel,
208
+ messages: convertedMessages,
209
+ stream: stream !== false
210
+ };
211
+
212
+ if (typeof max_tokens === 'number') request.max_tokens = max_tokens;
213
+ if (typeof temperature === 'number') request.temperature = temperature;
214
+ if (typeof top_p === 'number') request.top_p = top_p;
215
+ if (Array.isArray(stop_sequences) && stop_sequences.length > 0) request.stop = stop_sequences;
216
+
217
+ const convertedTools = convertTools(tools);
218
+ if (convertedTools?.length) request.tools = convertedTools;
219
+
220
+ const convertedToolChoice = convertToolChoice(tool_choice);
221
+ if (convertedToolChoice) request.tool_choice = convertedToolChoice;
222
+
223
+ return request;
224
+ }
225
+
226
+ export function convertOpenAIChatToAnthropic(openAiResponse) {
227
+ const message = openAiResponse?.choices?.[0]?.message || {};
228
+ const content = [];
229
+
230
+ if (message.content) {
231
+ content.push({ type: 'text', text: message.content });
232
+ }
233
+
234
+ if (Array.isArray(message.tool_calls)) {
235
+ for (const call of message.tool_calls) {
236
+ let input = {};
237
+ try {
238
+ input = typeof call.function?.arguments === 'string'
239
+ ? JSON.parse(call.function.arguments)
240
+ : call.function?.arguments || {};
241
+ } catch (error) {
242
+ input = {};
243
+ }
244
+
245
+ content.push({
246
+ type: 'tool_use',
247
+ id: call.id,
248
+ name: call.function?.name || 'unknown',
249
+ input
250
+ });
251
+ }
252
+ }
253
+
254
+ const finishReason = openAiResponse?.choices?.[0]?.finish_reason;
255
+ const stopReason = finishReason === 'tool_calls' ? 'tool_use' : 'end_turn';
256
+
257
+ return {
258
+ content: content.length > 0 ? content : [{ type: 'text', text: '' }],
259
+ stopReason,
260
+ usage: {
261
+ input_tokens: openAiResponse?.usage?.prompt_tokens || 0,
262
+ output_tokens: openAiResponse?.usage?.completion_tokens || 0
263
+ }
264
+ };
265
+ }
266
+
267
+ export default {
268
+ convertAnthropicToOpenAIChat,
269
+ convertOpenAIChatToAnthropic
270
+ };