@elvatis_com/openclaw-cli-bridge-elvatis 1.6.3 → 1.6.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  > OpenClaw plugin that bridges locally installed AI CLIs (Codex, Gemini, Claude Code) as model providers — with slash commands for instant model switching, restore, health testing, and model listing.
4
4
 
5
- **Current version:** `1.6.3`
5
+ **Current version:** `1.6.5`
6
6
 
7
7
  ---
8
8
 
@@ -362,9 +362,16 @@ npm test # vitest run (83 tests)
362
362
 
363
363
  ## Changelog
364
364
 
365
+ ### v1.6.5
366
+ - **feat:** Automatic session keep-alive — every 20h, active browser sessions are silently refreshed by navigating to the provider home page. Prevents cookie expiry on providers like ChatGPT (7-day sessions) without storing credentials.
367
+
368
+ ### v1.6.4
369
+ - **chore:** version bump (1.6.3 was already published on npm with partial changes)
370
+
365
371
  ### v1.6.3
366
- - **fix:** `/bridge-status` now uses cookie expiry files as source of truth (not in-memory context). Shows 🟡 "logged in, browser not loaded" instead of "not connected" after gateway restarts when cookies are still valid.
367
- - **fix:** Update ChatGPT web-session models to current lineup: added `gpt-4.1`, `gpt-5-mini`; renamed `gpt-o3`→`o3`, `gpt-o4-mini`→`o4-mini`; updated context window sizes.
372
+ - **fix:** CLI-proxy models (`cli-gemini/*`, `cli-claude/*`) now return HTTP 400 with `tools_not_supported` when a request includes tool/function call schemas prevents agents from silently failing or hallucinating when assigned a CLI-proxy model
373
+ - **feat:** `/v1/models` response includes `capabilities.tools: false` for CLI-proxy models so OpenClaw can detect tool support upfront
374
+ - **fix:** EADDRINUSE on hot-reload: re-probe after 1.5s wait before retrying bind; probe timeout 800ms → 2000ms
368
375
 
369
376
  ### v1.6.2
370
377
  - **docs:** Add missing changelog entries (v1.5.1, v1.6.0, v1.6.1), fix /cli-codex54 command name in SKILL.md, add startup re-login alert description to SKILL.md.
package/SKILL.md CHANGED
@@ -64,4 +64,4 @@ On gateway restart, if any session has expired, a **WhatsApp alert** is sent aut
64
64
 
65
65
  See `README.md` for full configuration reference and architecture diagram.
66
66
 
67
- **Version:** 1.6.3
67
+ **Version:** 1.6.5
package/index.ts CHANGED
@@ -249,6 +249,9 @@ let _cdpBrowserLaunchPromise: Promise<import("playwright").BrowserContext | null
249
249
  // Set to true after first run; hot-reloads see true and skip the restore loop.
250
250
  let _startupRestoreDone = false;
251
251
 
252
+ // Session keep-alive interval — refreshes browser cookies every 20h
253
+ let _keepAliveInterval: ReturnType<typeof setInterval> | null = null;
254
+
252
255
  /**
253
256
  * Connect to the OpenClaw managed browser (CDP port 18800).
254
257
  * Singleton: reuses the same connection. Falls back to persistent Chromium for Grok only.
@@ -435,8 +438,46 @@ async function getOrLaunchChatGPTContext(
435
438
  return _chatgptLaunchPromise;
436
439
  }
437
440
 
441
+ /** Session keep-alive — navigate to provider home pages to refresh cookies */
442
+ async function sessionKeepAlive(log: (msg: string) => void): Promise<void> {
443
+ const providers: Array<{
444
+ name: string;
445
+ homeUrl: string;
446
+ getCtx: () => BrowserContext | null;
447
+ scanExpiry: (ctx: BrowserContext) => Promise<{ expiresAt: number; loginAt: number; cookieName: string } | null>;
448
+ saveExpiry: (info: { expiresAt: number; loginAt: number; cookieName: string }) => void;
449
+ }> = [
450
+ { name: "grok", homeUrl: "https://grok.com", getCtx: () => grokContext, scanExpiry: scanCookieExpiry, saveExpiry: saveGrokExpiry },
451
+ { name: "gemini", homeUrl: "https://gemini.google.com/app", getCtx: () => geminiContext, scanExpiry: scanGeminiCookieExpiry, saveExpiry: saveGeminiExpiry },
452
+ { name: "claude-web", homeUrl: "https://claude.ai/new", getCtx: () => claudeWebContext, scanExpiry: scanClaudeCookieExpiry, saveExpiry: saveClaudeExpiry },
453
+ { name: "chatgpt", homeUrl: "https://chatgpt.com", getCtx: () => chatgptContext, scanExpiry: scanChatGPTCookieExpiry, saveExpiry: saveChatGPTExpiry },
454
+ ];
455
+
456
+ for (const p of providers) {
457
+ const ctx = p.getCtx();
458
+ if (!ctx) continue;
459
+ try {
460
+ const page = await ctx.newPage();
461
+ await page.goto(p.homeUrl, { waitUntil: "domcontentloaded", timeout: 15_000 });
462
+ await new Promise(r => setTimeout(r, 4000));
463
+ await page.close();
464
+ const expiry = await p.scanExpiry(ctx);
465
+ if (expiry) p.saveExpiry(expiry);
466
+ log(`[cli-bridge:${p.name}] session keep-alive touch ✅`);
467
+ } catch (err) {
468
+ log(`[cli-bridge:${p.name}] session keep-alive failed: ${(err as Error).message}`);
469
+ }
470
+ // Sequential — avoid spawning multiple pages at once
471
+ await new Promise(r => setTimeout(r, 2000));
472
+ }
473
+ }
474
+
438
475
  /** Clean up all browser resources — call on plugin teardown */
439
476
  async function cleanupBrowsers(log: (msg: string) => void): Promise<void> {
477
+ if (_keepAliveInterval) {
478
+ clearInterval(_keepAliveInterval);
479
+ _keepAliveInterval = null;
480
+ }
440
481
  if (grokContext) {
441
482
  try { await grokContext.close(); } catch { /* ignore */ }
442
483
  grokContext = null;
@@ -854,7 +895,7 @@ function proxyTestRequest(
854
895
  const plugin = {
855
896
  id: "openclaw-cli-bridge-elvatis",
856
897
  name: "OpenClaw CLI Bridge",
857
- version: "1.6.2",
898
+ version: "1.6.5",
858
899
  description:
859
900
  "Phase 1: openai-codex auth bridge. " +
860
901
  "Phase 2: HTTP proxy for gemini/claude CLIs. " +
@@ -991,6 +1032,13 @@ const plugin = {
991
1032
  }
992
1033
  }
993
1034
  })();
1035
+
1036
+ // Start session keep-alive interval (every 20h)
1037
+ if (!_keepAliveInterval) {
1038
+ _keepAliveInterval = setInterval(() => {
1039
+ void sessionKeepAlive((msg) => api.logger.info(msg));
1040
+ }, 72_000_000);
1041
+ }
994
1042
  }
995
1043
 
996
1044
  // ── Phase 1: openai-codex auth bridge ─────────────────────────────────────
@@ -1067,7 +1115,7 @@ const plugin = {
1067
1115
  headers: { Authorization: `Bearer ${apiKey}` } },
1068
1116
  (res) => { res.resume(); resolve(res.statusCode === 200); }
1069
1117
  );
1070
- req.setTimeout(800, () => { req.destroy(); resolve(false); });
1118
+ req.setTimeout(2000, () => { req.destroy(); resolve(false); });
1071
1119
  req.on("error", () => resolve(false));
1072
1120
  req.end();
1073
1121
  });
@@ -1144,8 +1192,17 @@ const plugin = {
1144
1192
  } catch (err: unknown) {
1145
1193
  const msg = (err as Error).message ?? String(err);
1146
1194
  if (msg.includes("EADDRINUSE")) {
1147
- // Port is busy but probe didn't respond — wait for the OS to release it
1148
- api.logger.warn(`[cli-bridge] port ${port} busy, waiting 1s for OS release…`);
1195
+ // Port is busy but probe didn't respond — maybe the old server is still shutting down.
1196
+ // Re-probe first: if it now responds, reuse it without rebinding.
1197
+ api.logger.warn(`[cli-bridge] port ${port} busy — re-probing before retry…`);
1198
+ await new Promise(r => setTimeout(r, 1500));
1199
+ const aliveNow = await probeExisting();
1200
+ if (aliveNow) {
1201
+ api.logger.info(`[cli-bridge] proxy now responding on :${port} — reusing`);
1202
+ return;
1203
+ }
1204
+ // Still not responding — wait for OS to release the port, then rebind
1205
+ api.logger.warn(`[cli-bridge] port ${port} still busy, waiting 1s for OS release…`);
1149
1206
  await new Promise((r) => setTimeout(r, 1000));
1150
1207
  // One final attempt
1151
1208
  try {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "id": "openclaw-cli-bridge-elvatis",
3
3
  "name": "OpenClaw CLI Bridge",
4
- "version": "1.6.3",
4
+ "version": "1.6.5",
5
5
  "license": "MIT",
6
6
  "description": "Phase 1: openai-codex auth bridge. Phase 2: local HTTP proxy routing model calls through gemini/claude CLIs (vllm provider).",
7
7
  "providers": [
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elvatis_com/openclaw-cli-bridge-elvatis",
3
- "version": "1.6.3",
3
+ "version": "1.6.5",
4
4
  "description": "Bridges gemini, claude, and codex CLI tools as OpenClaw model providers. Reads existing CLI auth without re-login.",
5
5
  "type": "module",
6
6
  "openclaw": {
package/src/cli-runner.ts CHANGED
@@ -316,8 +316,20 @@ export const DEFAULT_ALLOWED_CLI_MODELS: ReadonlySet<string> = new Set([
316
316
  "cli-gemini/gemini-2.5-flash",
317
317
  "cli-gemini/gemini-3-pro-preview",
318
318
  "cli-gemini/gemini-3-flash-preview",
319
+ // Aliases (map to preview variants internally)
320
+ "cli-gemini/gemini-3-pro", // alias → gemini-3-pro-preview
321
+ "cli-gemini/gemini-3-flash", // alias → gemini-3-flash-preview
319
322
  ]);
320
323
 
324
+ /** Normalize model aliases to their canonical CLI model names. */
325
+ function normalizeModelAlias(normalized: string): string {
326
+ const ALIASES: Record<string, string> = {
327
+ "cli-gemini/gemini-3-pro": "cli-gemini/gemini-3-pro-preview",
328
+ "cli-gemini/gemini-3-flash": "cli-gemini/gemini-3-flash-preview",
329
+ };
330
+ return ALIASES[normalized] ?? normalized;
331
+ }
332
+
321
333
  // ──────────────────────────────────────────────────────────────────────────────
322
334
  // Router
323
335
  // ──────────────────────────────────────────────────────────────────────────────
@@ -364,8 +376,11 @@ export async function routeToCliRunner(
364
376
  );
365
377
  }
366
378
 
367
- if (normalized.startsWith("cli-gemini/")) return runGemini(prompt, normalized, timeoutMs);
368
- if (normalized.startsWith("cli-claude/")) return runClaude(prompt, normalized, timeoutMs);
379
+ // Resolve aliases (e.g. gemini-3-pro → gemini-3-pro-preview) after allowlist check
380
+ const resolved = normalizeModelAlias(normalized);
381
+
382
+ if (resolved.startsWith("cli-gemini/")) return runGemini(prompt, resolved, timeoutMs);
383
+ if (resolved.startsWith("cli-claude/")) return runClaude(prompt, resolved, timeoutMs);
369
384
 
370
385
  throw new Error(
371
386
  `Unknown CLI bridge model: "${model}". Use "vllm/cli-gemini/<model>" or "vllm/cli-claude/<model>".`
@@ -72,6 +72,7 @@ export const CLI_MODELS = [
72
72
  { id: "cli-gemini/gemini-2.5-pro", name: "Gemini 2.5 Pro (CLI)", contextWindow: 1_000_000, maxTokens: 8_192 },
73
73
  { id: "cli-gemini/gemini-2.5-flash", name: "Gemini 2.5 Flash (CLI)", contextWindow: 1_000_000, maxTokens: 8_192 },
74
74
  { id: "cli-gemini/gemini-3-pro-preview",name: "Gemini 3 Pro (CLI)", contextWindow: 1_000_000, maxTokens: 8_192 },
75
+ { id: "cli-gemini/gemini-3-pro", name: "Gemini 3 Pro (CLI, alias)", contextWindow: 1_000_000, maxTokens: 8_192 },
75
76
  // Grok web-session models (requires /grok-login)
76
77
  { id: "web-grok/grok-3", name: "Grok 3 (web session)", contextWindow: 131_072, maxTokens: 131_072 },
77
78
  { id: "web-grok/grok-3-fast", name: "Grok 3 Fast (web session)", contextWindow: 131_072, maxTokens: 131_072 },
@@ -167,6 +168,10 @@ async function handleRequest(
167
168
  object: "model",
168
169
  created: now,
169
170
  owned_by: "openclaw-cli-bridge",
171
+ // CLI-proxy models stream plain text — no tool/function call support
172
+ capabilities: {
173
+ tools: !(m.id.startsWith("cli-gemini/") || m.id.startsWith("cli-claude/")),
174
+ },
170
175
  })),
171
176
  })
172
177
  );
@@ -201,7 +206,8 @@ async function handleRequest(
201
206
  return;
202
207
  }
203
208
 
204
- const { model, messages, stream = false } = parsed;
209
+ const { model, messages, stream = false } = parsed as { model: string; messages: ChatMessage[]; stream?: boolean; tools?: unknown };
210
+ const hasTools = Array.isArray((parsed as { tools?: unknown }).tools) && (parsed as { tools?: unknown[] }).tools!.length > 0;
205
211
 
206
212
  if (!model || !messages?.length) {
207
213
  res.writeHead(400, { "Content-Type": "application/json" });
@@ -209,7 +215,23 @@ async function handleRequest(
209
215
  return;
210
216
  }
211
217
 
212
- opts.log(`[cli-bridge] ${model} · ${messages.length} msg(s) · stream=${stream}`);
218
+ // CLI-proxy models (cli-gemini/*, cli-claude/*) are plain text completions
219
+ // they cannot process tool/function call schemas. Return a clear 400 so
220
+ // OpenClaw can surface a meaningful error instead of getting a garbled response.
221
+ const isCliModel = model.startsWith("cli-gemini/") || model.startsWith("cli-claude/");
222
+ if (hasTools && isCliModel) {
223
+ res.writeHead(400, { "Content-Type": "application/json" });
224
+ res.end(JSON.stringify({
225
+ error: {
226
+ message: `Model ${model} does not support tool/function calls. Use a native API model (e.g. github-copilot/gpt-5-mini) for agents that need tools.`,
227
+ type: "invalid_request_error",
228
+ code: "tools_not_supported",
229
+ }
230
+ }));
231
+ return;
232
+ }
233
+
234
+ opts.log(`[cli-bridge] ${model} · ${messages.length} msg(s) · stream=${stream}${hasTools ? " · tools=unsupported→rejected" : ""}`);
213
235
 
214
236
  const id = `chatcmpl-cli-${randomBytes(6).toString("hex")}`;
215
237
  const created = Math.floor(Date.now() / 1000);