@elvatis_com/openclaw-cli-bridge-elvatis 1.6.3 β 1.6.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -3
- package/SKILL.md +1 -1
- package/index.ts +13 -4
- package/openclaw.plugin.json +1 -1
- package/package.json +1 -1
- package/src/cli-runner.ts +17 -2
- package/src/proxy-server.ts +24 -2
package/README.md
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
> OpenClaw plugin that bridges locally installed AI CLIs (Codex, Gemini, Claude Code) as model providers β with slash commands for instant model switching, restore, health testing, and model listing.
|
|
4
4
|
|
|
5
|
-
**Current version:** `1.6.
|
|
5
|
+
**Current version:** `1.6.4`
|
|
6
6
|
|
|
7
7
|
---
|
|
8
8
|
|
|
@@ -362,9 +362,13 @@ npm test # vitest run (83 tests)
|
|
|
362
362
|
|
|
363
363
|
## Changelog
|
|
364
364
|
|
|
365
|
+
### v1.6.4
|
|
366
|
+
- **chore:** version bump (1.6.3 was already published on npm with partial changes)
|
|
367
|
+
|
|
365
368
|
### v1.6.3
|
|
366
|
-
- **fix:**
|
|
367
|
-
- **
|
|
369
|
+
- **fix:** CLI-proxy models (`cli-gemini/*`, `cli-claude/*`) now return HTTP 400 with `tools_not_supported` when a request includes tool/function call schemas β prevents agents from silently failing or hallucinating when assigned a CLI-proxy model
|
|
370
|
+
- **feat:** `/v1/models` response includes `capabilities.tools: false` for CLI-proxy models so OpenClaw can detect tool support upfront
|
|
371
|
+
- **fix:** EADDRINUSE on hot-reload: re-probe after 1.5s wait before retrying bind; probe timeout 800ms β 2000ms
|
|
368
372
|
|
|
369
373
|
### v1.6.2
|
|
370
374
|
- **docs:** Add missing changelog entries (v1.5.1, v1.6.0, v1.6.1), fix /cli-codex54 command name in SKILL.md, add startup re-login alert description to SKILL.md.
|
package/SKILL.md
CHANGED
package/index.ts
CHANGED
|
@@ -854,7 +854,7 @@ function proxyTestRequest(
|
|
|
854
854
|
const plugin = {
|
|
855
855
|
id: "openclaw-cli-bridge-elvatis",
|
|
856
856
|
name: "OpenClaw CLI Bridge",
|
|
857
|
-
version: "1.6.
|
|
857
|
+
version: "1.6.4",
|
|
858
858
|
description:
|
|
859
859
|
"Phase 1: openai-codex auth bridge. " +
|
|
860
860
|
"Phase 2: HTTP proxy for gemini/claude CLIs. " +
|
|
@@ -1067,7 +1067,7 @@ const plugin = {
|
|
|
1067
1067
|
headers: { Authorization: `Bearer ${apiKey}` } },
|
|
1068
1068
|
(res) => { res.resume(); resolve(res.statusCode === 200); }
|
|
1069
1069
|
);
|
|
1070
|
-
req.setTimeout(
|
|
1070
|
+
req.setTimeout(2000, () => { req.destroy(); resolve(false); });
|
|
1071
1071
|
req.on("error", () => resolve(false));
|
|
1072
1072
|
req.end();
|
|
1073
1073
|
});
|
|
@@ -1144,8 +1144,17 @@ const plugin = {
|
|
|
1144
1144
|
} catch (err: unknown) {
|
|
1145
1145
|
const msg = (err as Error).message ?? String(err);
|
|
1146
1146
|
if (msg.includes("EADDRINUSE")) {
|
|
1147
|
-
// Port is busy but probe didn't respond β
|
|
1148
|
-
|
|
1147
|
+
// Port is busy but probe didn't respond β maybe the old server is still shutting down.
|
|
1148
|
+
// Re-probe first: if it now responds, reuse it without rebinding.
|
|
1149
|
+
api.logger.warn(`[cli-bridge] port ${port} busy β re-probing before retryβ¦`);
|
|
1150
|
+
await new Promise(r => setTimeout(r, 1500));
|
|
1151
|
+
const aliveNow = await probeExisting();
|
|
1152
|
+
if (aliveNow) {
|
|
1153
|
+
api.logger.info(`[cli-bridge] proxy now responding on :${port} β reusing`);
|
|
1154
|
+
return;
|
|
1155
|
+
}
|
|
1156
|
+
// Still not responding β wait for OS to release the port, then rebind
|
|
1157
|
+
api.logger.warn(`[cli-bridge] port ${port} still busy, waiting 1s for OS releaseβ¦`);
|
|
1149
1158
|
await new Promise((r) => setTimeout(r, 1000));
|
|
1150
1159
|
// One final attempt
|
|
1151
1160
|
try {
|
package/openclaw.plugin.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"id": "openclaw-cli-bridge-elvatis",
|
|
3
3
|
"name": "OpenClaw CLI Bridge",
|
|
4
|
-
"version": "1.6.
|
|
4
|
+
"version": "1.6.4",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"description": "Phase 1: openai-codex auth bridge. Phase 2: local HTTP proxy routing model calls through gemini/claude CLIs (vllm provider).",
|
|
7
7
|
"providers": [
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@elvatis_com/openclaw-cli-bridge-elvatis",
|
|
3
|
-
"version": "1.6.
|
|
3
|
+
"version": "1.6.4",
|
|
4
4
|
"description": "Bridges gemini, claude, and codex CLI tools as OpenClaw model providers. Reads existing CLI auth without re-login.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"openclaw": {
|
package/src/cli-runner.ts
CHANGED
|
@@ -316,8 +316,20 @@ export const DEFAULT_ALLOWED_CLI_MODELS: ReadonlySet<string> = new Set([
|
|
|
316
316
|
"cli-gemini/gemini-2.5-flash",
|
|
317
317
|
"cli-gemini/gemini-3-pro-preview",
|
|
318
318
|
"cli-gemini/gemini-3-flash-preview",
|
|
319
|
+
// Aliases (map to preview variants internally)
|
|
320
|
+
"cli-gemini/gemini-3-pro", // alias β gemini-3-pro-preview
|
|
321
|
+
"cli-gemini/gemini-3-flash", // alias β gemini-3-flash-preview
|
|
319
322
|
]);
|
|
320
323
|
|
|
324
|
+
/** Normalize model aliases to their canonical CLI model names. */
|
|
325
|
+
function normalizeModelAlias(normalized: string): string {
|
|
326
|
+
const ALIASES: Record<string, string> = {
|
|
327
|
+
"cli-gemini/gemini-3-pro": "cli-gemini/gemini-3-pro-preview",
|
|
328
|
+
"cli-gemini/gemini-3-flash": "cli-gemini/gemini-3-flash-preview",
|
|
329
|
+
};
|
|
330
|
+
return ALIASES[normalized] ?? normalized;
|
|
331
|
+
}
|
|
332
|
+
|
|
321
333
|
// ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
|
322
334
|
// Router
|
|
323
335
|
// ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
|
@@ -364,8 +376,11 @@ export async function routeToCliRunner(
|
|
|
364
376
|
);
|
|
365
377
|
}
|
|
366
378
|
|
|
367
|
-
|
|
368
|
-
|
|
379
|
+
// Resolve aliases (e.g. gemini-3-pro β gemini-3-pro-preview) after allowlist check
|
|
380
|
+
const resolved = normalizeModelAlias(normalized);
|
|
381
|
+
|
|
382
|
+
if (resolved.startsWith("cli-gemini/")) return runGemini(prompt, resolved, timeoutMs);
|
|
383
|
+
if (resolved.startsWith("cli-claude/")) return runClaude(prompt, resolved, timeoutMs);
|
|
369
384
|
|
|
370
385
|
throw new Error(
|
|
371
386
|
`Unknown CLI bridge model: "${model}". Use "vllm/cli-gemini/<model>" or "vllm/cli-claude/<model>".`
|
package/src/proxy-server.ts
CHANGED
|
@@ -72,6 +72,7 @@ export const CLI_MODELS = [
|
|
|
72
72
|
{ id: "cli-gemini/gemini-2.5-pro", name: "Gemini 2.5 Pro (CLI)", contextWindow: 1_000_000, maxTokens: 8_192 },
|
|
73
73
|
{ id: "cli-gemini/gemini-2.5-flash", name: "Gemini 2.5 Flash (CLI)", contextWindow: 1_000_000, maxTokens: 8_192 },
|
|
74
74
|
{ id: "cli-gemini/gemini-3-pro-preview",name: "Gemini 3 Pro (CLI)", contextWindow: 1_000_000, maxTokens: 8_192 },
|
|
75
|
+
{ id: "cli-gemini/gemini-3-pro", name: "Gemini 3 Pro (CLI, alias)", contextWindow: 1_000_000, maxTokens: 8_192 },
|
|
75
76
|
// Grok web-session models (requires /grok-login)
|
|
76
77
|
{ id: "web-grok/grok-3", name: "Grok 3 (web session)", contextWindow: 131_072, maxTokens: 131_072 },
|
|
77
78
|
{ id: "web-grok/grok-3-fast", name: "Grok 3 Fast (web session)", contextWindow: 131_072, maxTokens: 131_072 },
|
|
@@ -167,6 +168,10 @@ async function handleRequest(
|
|
|
167
168
|
object: "model",
|
|
168
169
|
created: now,
|
|
169
170
|
owned_by: "openclaw-cli-bridge",
|
|
171
|
+
// CLI-proxy models stream plain text β no tool/function call support
|
|
172
|
+
capabilities: {
|
|
173
|
+
tools: !(m.id.startsWith("cli-gemini/") || m.id.startsWith("cli-claude/")),
|
|
174
|
+
},
|
|
170
175
|
})),
|
|
171
176
|
})
|
|
172
177
|
);
|
|
@@ -201,7 +206,8 @@ async function handleRequest(
|
|
|
201
206
|
return;
|
|
202
207
|
}
|
|
203
208
|
|
|
204
|
-
const { model, messages, stream = false } = parsed;
|
|
209
|
+
const { model, messages, stream = false } = parsed as { model: string; messages: ChatMessage[]; stream?: boolean; tools?: unknown };
|
|
210
|
+
const hasTools = Array.isArray((parsed as { tools?: unknown }).tools) && (parsed as { tools?: unknown[] }).tools!.length > 0;
|
|
205
211
|
|
|
206
212
|
if (!model || !messages?.length) {
|
|
207
213
|
res.writeHead(400, { "Content-Type": "application/json" });
|
|
@@ -209,7 +215,23 @@ async function handleRequest(
|
|
|
209
215
|
return;
|
|
210
216
|
}
|
|
211
217
|
|
|
212
|
-
|
|
218
|
+
// CLI-proxy models (cli-gemini/*, cli-claude/*) are plain text completions β
|
|
219
|
+
// they cannot process tool/function call schemas. Return a clear 400 so
|
|
220
|
+
// OpenClaw can surface a meaningful error instead of getting a garbled response.
|
|
221
|
+
const isCliModel = model.startsWith("cli-gemini/") || model.startsWith("cli-claude/");
|
|
222
|
+
if (hasTools && isCliModel) {
|
|
223
|
+
res.writeHead(400, { "Content-Type": "application/json" });
|
|
224
|
+
res.end(JSON.stringify({
|
|
225
|
+
error: {
|
|
226
|
+
message: `Model ${model} does not support tool/function calls. Use a native API model (e.g. github-copilot/gpt-5-mini) for agents that need tools.`,
|
|
227
|
+
type: "invalid_request_error",
|
|
228
|
+
code: "tools_not_supported",
|
|
229
|
+
}
|
|
230
|
+
}));
|
|
231
|
+
return;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
opts.log(`[cli-bridge] ${model} Β· ${messages.length} msg(s) Β· stream=${stream}${hasTools ? " Β· tools=unsupportedβrejected" : ""}`);
|
|
213
235
|
|
|
214
236
|
const id = `chatcmpl-cli-${randomBytes(6).toString("hex")}`;
|
|
215
237
|
const created = Math.floor(Date.now() / 1000);
|