@elvatis_com/openclaw-cli-bridge-elvatis 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,280 @@
1
+ /**
2
+ * proxy-server.ts
3
+ *
4
+ * Minimal OpenAI-compatible HTTP proxy server.
5
+ * Routes POST /v1/chat/completions to the appropriate CLI tool.
6
+ * Supports both streaming (SSE) and non-streaming responses.
7
+ *
8
+ * OpenClaw connects via the "vllm" provider with baseUrl pointing here.
9
+ */
10
+
11
+ import http from "node:http";
12
+ import { randomBytes } from "node:crypto";
13
+ import { type ChatMessage, routeToCliRunner } from "./cli-runner.js";
14
+
15
+ export interface ProxyServerOptions {
16
+ port: number;
17
+ apiKey?: string; // if set, validates Authorization: Bearer <key>
18
+ timeoutMs?: number;
19
+ log: (msg: string) => void;
20
+ warn: (msg: string) => void;
21
+ }
22
+
23
+ /** Available CLI bridge models for GET /v1/models */
24
+ export const CLI_MODELS = [
25
+ {
26
+ id: "cli-gemini/gemini-2.5-pro",
27
+ name: "Gemini 2.5 Pro (CLI)",
28
+ contextWindow: 1_000_000,
29
+ maxTokens: 8192,
30
+ },
31
+ {
32
+ id: "cli-gemini/gemini-2.5-flash",
33
+ name: "Gemini 2.5 Flash (CLI)",
34
+ contextWindow: 1_000_000,
35
+ maxTokens: 8192,
36
+ },
37
+ {
38
+ id: "cli-gemini/gemini-3-pro",
39
+ name: "Gemini 3 Pro (CLI)",
40
+ contextWindow: 1_000_000,
41
+ maxTokens: 8192,
42
+ },
43
+ {
44
+ id: "cli-claude/claude-opus-4-6",
45
+ name: "Claude Opus 4.6 (CLI)",
46
+ contextWindow: 200_000,
47
+ maxTokens: 8192,
48
+ },
49
+ {
50
+ id: "cli-claude/claude-sonnet-4-6",
51
+ name: "Claude Sonnet 4.6 (CLI)",
52
+ contextWindow: 200_000,
53
+ maxTokens: 8192,
54
+ },
55
+ {
56
+ id: "cli-claude/claude-haiku-4-5",
57
+ name: "Claude Haiku 4.5 (CLI)",
58
+ contextWindow: 200_000,
59
+ maxTokens: 8192,
60
+ },
61
+ ];
62
+
63
+ // ──────────────────────────────────────────────────────────────────────────────
64
+ // Server
65
+ // ──────────────────────────────────────────────────────────────────────────────
66
+
67
+ export function startProxyServer(opts: ProxyServerOptions): Promise<http.Server> {
68
+ return new Promise((resolve, reject) => {
69
+ const server = http.createServer((req, res) => {
70
+ handleRequest(req, res, opts).catch((err: Error) => {
71
+ opts.warn(`[cli-bridge] Unhandled request error: ${err.message}`);
72
+ if (!res.headersSent) {
73
+ res.writeHead(500, { "Content-Type": "application/json" });
74
+ res.end(JSON.stringify({ error: { message: err.message, type: "internal_error" } }));
75
+ }
76
+ });
77
+ });
78
+
79
+ server.on("error", (err) => reject(err));
80
+ server.listen(opts.port, "127.0.0.1", () => {
81
+ opts.log(
82
+ `[cli-bridge] proxy server listening on http://127.0.0.1:${opts.port}`
83
+ );
84
+ resolve(server);
85
+ });
86
+ });
87
+ }
88
+
89
+ // ──────────────────────────────────────────────────────────────────────────────
90
+ // Request router
91
+ // ──────────────────────────────────────────────────────────────────────────────
92
+
93
+ async function handleRequest(
94
+ req: http.IncomingMessage,
95
+ res: http.ServerResponse,
96
+ opts: ProxyServerOptions
97
+ ): Promise<void> {
98
+ // CORS preflight
99
+ if (req.method === "OPTIONS") {
100
+ res.writeHead(204, corsHeaders());
101
+ res.end();
102
+ return;
103
+ }
104
+
105
+ const url = req.url ?? "/";
106
+
107
+ // Health check
108
+ if (url === "/health" || url === "/v1/health") {
109
+ res.writeHead(200, { "Content-Type": "application/json" });
110
+ res.end(JSON.stringify({ status: "ok", service: "openclaw-cli-bridge" }));
111
+ return;
112
+ }
113
+
114
+ // Model list
115
+ if (url === "/v1/models" && req.method === "GET") {
116
+ const now = Math.floor(Date.now() / 1000);
117
+ res.writeHead(200, { "Content-Type": "application/json", ...corsHeaders() });
118
+ res.end(
119
+ JSON.stringify({
120
+ object: "list",
121
+ data: CLI_MODELS.map((m) => ({
122
+ id: m.id,
123
+ object: "model",
124
+ created: now,
125
+ owned_by: "openclaw-cli-bridge",
126
+ })),
127
+ })
128
+ );
129
+ return;
130
+ }
131
+
132
+ // Chat completions
133
+ if (url === "/v1/chat/completions" && req.method === "POST") {
134
+ // Auth check
135
+ if (opts.apiKey) {
136
+ const auth = req.headers.authorization ?? "";
137
+ const token = auth.startsWith("Bearer ") ? auth.slice(7) : "";
138
+ if (token !== opts.apiKey) {
139
+ res.writeHead(401, { "Content-Type": "application/json" });
140
+ res.end(JSON.stringify({ error: { message: "Unauthorized", type: "auth_error" } }));
141
+ return;
142
+ }
143
+ }
144
+
145
+ const body = await readBody(req);
146
+ let parsed: {
147
+ model: string;
148
+ messages: ChatMessage[];
149
+ stream?: boolean;
150
+ };
151
+
152
+ try {
153
+ parsed = JSON.parse(body) as typeof parsed;
154
+ } catch {
155
+ res.writeHead(400, { "Content-Type": "application/json" });
156
+ res.end(JSON.stringify({ error: { message: "Invalid JSON body", type: "invalid_request_error" } }));
157
+ return;
158
+ }
159
+
160
+ const { model, messages, stream = false } = parsed;
161
+
162
+ if (!model || !messages?.length) {
163
+ res.writeHead(400, { "Content-Type": "application/json" });
164
+ res.end(JSON.stringify({ error: { message: "model and messages are required", type: "invalid_request_error" } }));
165
+ return;
166
+ }
167
+
168
+ opts.log(`[cli-bridge] ${model} · ${messages.length} msg(s) · stream=${stream}`);
169
+
170
+ let content: string;
171
+ try {
172
+ content = await routeToCliRunner(model, messages, opts.timeoutMs ?? 120_000);
173
+ } catch (err) {
174
+ const msg = (err as Error).message;
175
+ opts.warn(`[cli-bridge] CLI error for ${model}: ${msg}`);
176
+ res.writeHead(500, { "Content-Type": "application/json" });
177
+ res.end(JSON.stringify({ error: { message: msg, type: "cli_error" } }));
178
+ return;
179
+ }
180
+
181
+ const id = `chatcmpl-cli-${randomBytes(6).toString("hex")}`;
182
+ const created = Math.floor(Date.now() / 1000);
183
+
184
+ if (stream) {
185
+ res.writeHead(200, {
186
+ "Content-Type": "text/event-stream",
187
+ "Cache-Control": "no-cache",
188
+ Connection: "keep-alive",
189
+ ...corsHeaders(),
190
+ });
191
+
192
+ // Role chunk
193
+ sendSseChunk(res, { id, created, model, delta: { role: "assistant" }, finish_reason: null });
194
+
195
+ // Content in chunks (~50 chars each for natural feel)
196
+ const chunkSize = 50;
197
+ for (let i = 0; i < content.length; i += chunkSize) {
198
+ sendSseChunk(res, {
199
+ id,
200
+ created,
201
+ model,
202
+ delta: { content: content.slice(i, i + chunkSize) },
203
+ finish_reason: null,
204
+ });
205
+ }
206
+
207
+ // Stop chunk
208
+ sendSseChunk(res, { id, created, model, delta: {}, finish_reason: "stop" });
209
+ res.write("data: [DONE]\n\n");
210
+ res.end();
211
+ } else {
212
+ const response = {
213
+ id,
214
+ object: "chat.completion",
215
+ created,
216
+ model,
217
+ choices: [
218
+ {
219
+ index: 0,
220
+ message: { role: "assistant", content },
221
+ finish_reason: "stop",
222
+ },
223
+ ],
224
+ usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 },
225
+ };
226
+
227
+ res.writeHead(200, { "Content-Type": "application/json", ...corsHeaders() });
228
+ res.end(JSON.stringify(response));
229
+ }
230
+
231
+ return;
232
+ }
233
+
234
+ // 404
235
+ res.writeHead(404, { "Content-Type": "application/json" });
236
+ res.end(JSON.stringify({ error: { message: `Not found: ${url}`, type: "not_found" } }));
237
+ }
238
+
239
+ // ──────────────────────────────────────────────────────────────────────────────
240
+ // Helpers
241
+ // ──────────────────────────────────────────────────────────────────────────────
242
+
243
+ function sendSseChunk(
244
+ res: http.ServerResponse,
245
+ params: {
246
+ id: string;
247
+ created: number;
248
+ model: string;
249
+ delta: Record<string, unknown>;
250
+ finish_reason: string | null;
251
+ }
252
+ ): void {
253
+ const chunk = {
254
+ id: params.id,
255
+ object: "chat.completion.chunk",
256
+ created: params.created,
257
+ model: params.model,
258
+ choices: [
259
+ { index: 0, delta: params.delta, finish_reason: params.finish_reason },
260
+ ],
261
+ };
262
+ res.write(`data: ${JSON.stringify(chunk)}\n\n`);
263
+ }
264
+
265
+ function readBody(req: http.IncomingMessage): Promise<string> {
266
+ return new Promise((resolve, reject) => {
267
+ const chunks: Buffer[] = [];
268
+ req.on("data", (d: Buffer) => chunks.push(d));
269
+ req.on("end", () => resolve(Buffer.concat(chunks).toString("utf8")));
270
+ req.on("error", reject);
271
+ });
272
+ }
273
+
274
+ function corsHeaders(): Record<string, string> {
275
+ return {
276
+ "Access-Control-Allow-Origin": "*",
277
+ "Access-Control-Allow-Methods": "GET, POST, OPTIONS",
278
+ "Access-Control-Allow-Headers": "Content-Type, Authorization",
279
+ };
280
+ }
package/tsconfig.json ADDED
@@ -0,0 +1,20 @@
1
+ {
2
+ "compilerOptions": {
3
+ "target": "ES2022",
4
+ "module": "Node16",
5
+ "moduleResolution": "Node16",
6
+ "outDir": "dist",
7
+ "rootDir": ".",
8
+ "declaration": true,
9
+ "declarationMap": true,
10
+ "sourceMap": true,
11
+ "strict": true,
12
+ "esModuleInterop": true,
13
+ "skipLibCheck": true,
14
+ "forceConsistentCasingInFileNames": true,
15
+ "resolveJsonModule": true,
16
+ "isolatedModules": true
17
+ },
18
+ "include": ["index.ts", "src/**/*.ts"],
19
+ "exclude": ["node_modules", "dist", "test"]
20
+ }