@elvatis_com/openclaw-cli-bridge-elvatis 0.2.17 → 0.2.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,13 +2,13 @@
2
2
 
3
3
  _Last updated: 2026-03-08 by Akido (claude-sonnet-4-6)_
4
4
 
5
- ## Current Version: 0.2.17 — STABLE
5
+ ## Current Version: 0.2.19 — STABLE
6
6
 
7
7
  ## What is done
8
8
 
9
9
  - ✅ Repo: `https://github.com/elvatis/openclaw-cli-bridge-elvatis`
10
- - ✅ npm: `@@elvatis_com/openclaw-cli-bridge-elvatis@0.2.17`
11
- - ✅ ClawHub: `openclaw-cli-bridge-elvatis@0.2.17`
10
+ - ✅ npm: `@@@@elvatis_com/openclaw-cli-bridge-elvatis@0.2.19`
11
+ - ✅ ClawHub: `openclaw-cli-bridge-elvatis@0.2.19`
12
12
  - ✅ Phase 1: `openai-codex` provider via `~/.codex/auth.json` (no re-login)
13
13
  - ✅ Phase 2: Local OpenAI-compatible proxy on `127.0.0.1:31337` (Gemini + Claude CLI)
14
14
  - ✅ Phase 3: 10 slash commands (`/cli-sonnet`, `/cli-opus`, `/cli-haiku`, `/cli-gemini`, `/cli-gemini-flash`, `/cli-gemini3`, `/cli-codex`, `/cli-codex-mini`, `/cli-back`, `/cli-test`)
package/README.md CHANGED
@@ -1,8 +1,8 @@
1
1
  # openclaw-cli-bridge-elvatis
2
2
 
3
- > OpenClaw plugin that bridges locally installed AI CLIs (Codex, Gemini, Claude Code) as model providers — with slash commands for instant model switching, restore, and health testing.
3
+ > OpenClaw plugin that bridges locally installed AI CLIs (Codex, Gemini, Claude Code) as model providers — with slash commands for instant model switching, restore, health testing, and model listing.
4
4
 
5
- **Current version:** `0.2.17`
5
+ **Current version:** `0.2.19`
6
6
 
7
7
  ---
8
8
 
@@ -20,13 +20,15 @@ Starts a local OpenAI-compatible HTTP proxy on `127.0.0.1:31337` and configures
20
20
  |---|---|---|
21
21
  | `vllm/cli-gemini/gemini-2.5-pro` | `gemini -m gemini-2.5-pro -p ""` (stdin, cwd=/tmp) | ~8–10s |
22
22
  | `vllm/cli-gemini/gemini-2.5-flash` | `gemini -m gemini-2.5-flash -p ""` (stdin, cwd=/tmp) | ~4–6s |
23
- | `vllm/cli-gemini/gemini-3-pro` | `gemini -m gemini-3-pro -p ""` (stdin, cwd=/tmp) | ~8–10s |
23
+ | `vllm/cli-gemini/gemini-3-pro-preview` | `gemini -m gemini-3-pro-preview -p ""` (stdin, cwd=/tmp) | ~8–10s |
24
+ | `vllm/cli-gemini/gemini-3-flash-preview` | `gemini -m gemini-3-flash-preview -p ""` (stdin, cwd=/tmp) | ~4–6s |
24
25
  | `vllm/cli-claude/claude-sonnet-4-6` | `claude -p --output-format text --model claude-sonnet-4-6` (stdin) | ~2–4s |
25
26
  | `vllm/cli-claude/claude-opus-4-6` | `claude -p --output-format text --model claude-opus-4-6` (stdin) | ~3–5s |
26
27
  | `vllm/cli-claude/claude-haiku-4-5` | `claude -p --output-format text --model claude-haiku-4-5` (stdin) | ~1–3s |
27
28
 
28
29
  ### Phase 3 — Slash commands
29
- Ten plugin-registered commands (all `requireAuth: true`):
30
+
31
+ All commands use gateway-level `commands.allowFrom` for authorization (`requireAuth: false` at plugin level).
30
32
 
31
33
  **Claude Code CLI** (routed via local proxy on `:31337`):
32
34
 
@@ -42,14 +44,18 @@ Ten plugin-registered commands (all `requireAuth: true`):
42
44
  |---|---|
43
45
  | `/cli-gemini` | `vllm/cli-gemini/gemini-2.5-pro` |
44
46
  | `/cli-gemini-flash` | `vllm/cli-gemini/gemini-2.5-flash` |
45
- | `/cli-gemini3` | `vllm/cli-gemini/gemini-3-pro` |
47
+ | `/cli-gemini3` | `vllm/cli-gemini/gemini-3-pro-preview` |
48
+ | `/cli-gemini3-flash` | `vllm/cli-gemini/gemini-3-flash-preview` |
46
49
 
47
- **Codex CLI** (via `openai-codex` provider — Codex CLI OAuth auth, calls OpenAI API directly, **not** through the local proxy):
50
+ **Codex CLI** (via `openai-codex` provider — OAuth auth, calls OpenAI API directly, **not** through the local proxy):
48
51
 
49
- | Command | Model |
50
- |---|---|
51
- | `/cli-codex` | `openai-codex/gpt-5.3-codex` |
52
- | `/cli-codex-mini` | `openai-codex/gpt-5.1-codex-mini` |
52
+ | Command | Model | Notes |
53
+ |---|---|---|
54
+ | `/cli-codex` | `openai-codex/gpt-5.3-codex` | ✅ Tested |
55
+ | `/cli-codex-spark` | `openai-codex/gpt-5.3-codex-spark` | |
56
+ | `/cli-codex52` | `openai-codex/gpt-5.2-codex` | |
57
+ | `/cli-codex54` | `openai-codex/gpt-5.4` | May require upgraded OAuth scope |
58
+ | `/cli-codex-mini` | `openai-codex/gpt-5.1-codex-mini` | ✅ Tested |
53
59
 
54
60
  **Utility:**
55
61
 
@@ -57,6 +63,7 @@ Ten plugin-registered commands (all `requireAuth: true`):
57
63
  |---|---|
58
64
  | `/cli-back` | Restore the model active **before** the last `/cli-*` switch |
59
65
  | `/cli-test [model]` | One-shot proxy health check — **does NOT switch your active model** |
66
+ | `/cli-list` | Show all registered CLI bridge models with commands |
60
67
 
61
68
  **`/cli-back` details:**
62
69
  - Before every `/cli-*` switch the current model is saved to `~/.openclaw/cli-bridge-state.json`
@@ -68,6 +75,10 @@ Ten plugin-registered commands (all `requireAuth: true`):
68
75
  - Default when no arg given: `cli-claude/claude-sonnet-4-6`
69
76
  - Reports response content, latency, and confirms your active model is unchanged
70
77
 
78
+ **`/cli-list` details:**
79
+ - Lists all registered models grouped by provider (Claude CLI, Gemini CLI, Codex)
80
+ - No arguments required
81
+
71
82
  ---
72
83
 
73
84
  ## Requirements
@@ -111,8 +122,10 @@ openclaw gateway restart
111
122
 
112
123
  ```
113
124
  [cli-bridge] proxy ready on :31337
114
- [cli-bridge] registered 8 commands: /cli-sonnet, /cli-opus, /cli-haiku,
115
- /cli-gemini, /cli-gemini-flash, /cli-gemini3, /cli-back, /cli-test
125
+ [cli-bridge] registered 14 commands: /cli-sonnet, /cli-opus, /cli-haiku,
126
+ /cli-gemini, /cli-gemini-flash, /cli-gemini3, /cli-gemini3-flash,
127
+ /cli-codex, /cli-codex-spark, /cli-codex52, /cli-codex54, /cli-codex-mini,
128
+ /cli-back, /cli-test, /cli-list
116
129
  ```
117
130
 
118
131
  ### 3. Register Codex auth (optional — Phase 1 only)
@@ -122,7 +135,39 @@ openclaw models auth login --provider openai-codex
122
135
  # Select: "Codex CLI (existing login)"
123
136
  ```
124
137
 
125
- ### 4. Test without switching your model
138
+ ### 4. List available models
139
+
140
+ ```
141
+ /cli-list
142
+ → 🤖 CLI Bridge Models
143
+
144
+ Claude Code CLI
145
+ /cli-sonnet claude-sonnet-4-6
146
+ /cli-opus claude-opus-4-6
147
+ /cli-haiku claude-haiku-4-5
148
+
149
+ Gemini CLI
150
+ /cli-gemini gemini-2.5-pro
151
+ /cli-gemini-flash gemini-2.5-flash
152
+ /cli-gemini3 gemini-3-pro-preview
153
+ /cli-gemini3-flash gemini-3-flash-preview
154
+
155
+ Codex (OAuth)
156
+ /cli-codex gpt-5.3-codex
157
+ /cli-codex-spark gpt-5.3-codex-spark
158
+ /cli-codex52 gpt-5.2-codex
159
+ /cli-codex54 gpt-5.4
160
+ /cli-codex-mini gpt-5.1-codex-mini
161
+
162
+ Utility
163
+ /cli-back Restore previous model
164
+ /cli-test [model] Health check (no model switch)
165
+ /cli-list This overview
166
+
167
+ Proxy: 127.0.0.1:31337
168
+ ```
169
+
170
+ ### 5. Test without switching your model
126
171
 
127
172
  ```
128
173
  /cli-test
@@ -140,7 +185,7 @@ openclaw models auth login --provider openai-codex
140
185
  Active model unchanged: anthropic/claude-sonnet-4-6
141
186
  ```
142
187
 
143
- ### 5. Switch and restore
188
+ ### 6. Switch and restore
144
189
 
145
190
  ```
146
191
  /cli-sonnet
@@ -173,15 +218,22 @@ In `~/.openclaw/openclaw.json` → `plugins.entries.openclaw-cli-bridge-elvatis.
173
218
 
174
219
  ---
175
220
 
221
+ ## Model Allowlist
222
+
223
+ `routeToCliRunner` enforces `DEFAULT_ALLOWED_CLI_MODELS` — only models registered in the plugin are accepted by the proxy. Unregistered models receive a clear error listing allowed options.
224
+
225
+ To disable the check (e.g. for custom vllm routing): pass `allowedModels: null` in `RouteOptions`.
226
+
227
+ ---
228
+
176
229
  ## Architecture
177
230
 
178
231
  ```
179
232
  OpenClaw agent
180
233
 
181
234
  ├─ openai-codex/* ──────────────────────────► OpenAI API (direct)
182
- │ auth: ~/.codex/auth.json OAuth tokens
183
-
184
- │ /cli-codex, /cli-codex-mini ─────────────────┘ (switch to this provider)
235
+ │ auth: ~/.codex/auth.json OAuth tokens
236
+ /cli-codex, /cli-codex-spark, /cli-codex52, /cli-codex54, /cli-codex-mini
185
237
 
186
238
  └─ vllm/cli-gemini/* ─┐
187
239
  vllm/cli-claude/* ─┤─► localhost:31337 (openclaw-cli-bridge proxy)
@@ -190,21 +242,16 @@ OpenClaw agent
190
242
  │ │ (neutral cwd prevents agentic mode)
191
243
  │ └─ cli-claude/* → claude -p --model <model>
192
244
  │ stdin=prompt
193
- └───────────────────────────────────────────────────
194
245
 
195
- Slash commands (bypass agent, requireAuth=true):
196
- /cli-sonnet|opus|haiku|gemini|gemini-flash|gemini3|codex|codex-mini
246
+ Slash commands (requireAuth=false, gateway commands.allowFrom is the auth layer):
247
+ /cli-sonnet|opus|haiku|gemini|gemini-flash|gemini3|gemini3-flash
248
+ /cli-codex|codex-spark|codex52|codex54|codex-mini
197
249
  └─► saves current model → ~/.openclaw/cli-bridge-state.json
198
- └─► openclaw models set <model> (~1s, atomic)
250
+ └─► openclaw models set <model>
199
251
 
200
- /cli-back
201
- └─► reads ~/.openclaw/cli-bridge-state.json
202
- └─► openclaw models set <previous>
203
-
204
- /cli-test [model]
205
- └─► HTTP POST → localhost:31337 (no global model change)
206
- └─► reports response + latency
207
- └─► NOTE: only tests the proxy — Codex models bypass the proxy
252
+ /cli-back → reads state file, restores previous model, clears state
253
+ /cli-test → HTTP POST → localhost:31337, no global model change
254
+ /cli-list → formatted table of all registered models
208
255
  ```
209
256
 
210
257
  ---
@@ -218,8 +265,8 @@ Slash commands (bypass agent, requireAuth=true):
218
265
 
219
266
  ### Gemini agentic mode / hangs (fixed in v0.2.4)
220
267
  **Symptom:** Gemini hangs, returns wrong answers, or says "directory does not exist".
221
- **Cause:** `@file` syntax (`gemini -p @/tmp/xxx.txt`) triggers agentic mode — Gemini scans the working directory for project context and treats prompts as task instructions. Running from the workspace root makes this worse.
222
- **Fix:** Stdin delivery (`gemini -p ""` with prompt via stdin) + `cwd=/tmp`. Same pattern as Claude.
268
+ **Cause:** `@file` syntax (`gemini -p @/tmp/xxx.txt`) triggers agentic mode — Gemini scans the working directory for project context and treats prompts as task instructions.
269
+ **Fix:** Stdin delivery (`gemini -p ""` with prompt via stdin) + `cwd=/tmp`.
223
270
 
224
271
  ---
225
272
 
@@ -227,82 +274,66 @@ Slash commands (bypass agent, requireAuth=true):
227
274
 
228
275
  ```bash
229
276
  npm run typecheck # tsc --noEmit
230
- npm test # vitest run (5 unit tests for formatPrompt)
277
+ npm test # vitest run (45 tests)
231
278
  ```
232
279
 
233
280
  ---
234
281
 
235
282
  ## Changelog
236
283
 
284
+ ### v0.2.19
285
+ - **feat:** `/cli-list` command — formatted overview of all registered models grouped by provider
286
+ - **docs:** Rewrite README to reflect current state (correct model names, command count, requireAuth, test count, /cli-list docs)
287
+
288
+ ### v0.2.18
289
+ - **feat:** Add `/cli-gemini3-flash` → `gemini-3-flash-preview`
290
+ - **feat:** Add `/cli-codex-spark` → `gpt-5.3-codex-spark`, `/cli-codex52` → `gpt-5.2-codex`, `/cli-codex54` → `gpt-5.4`
291
+ - **fix:** Update `DEFAULT_ALLOWED_CLI_MODELS` with `gemini-3-flash-preview`
292
+
237
293
  ### v0.2.17
238
- - **fix:** `/cli-gemini3` model corrected to `gemini-3-pro-preview` (was `gemini-3-pro`, which returns 404 from Gemini API)
294
+ - **fix:** `/cli-gemini3` model corrected to `gemini-3-pro-preview` (was `gemini-3-pro`, returns 404 from Gemini API)
239
295
 
240
296
  ### v0.2.16
241
297
  - **feat(T-101):** Expand test suite to 45 tests — new cases for `formatPrompt` (mixed roles, boundary values, system messages) and `routeToCliRunner` (gemini paths, edge cases)
242
298
  - **feat(T-103):** Add `DEFAULT_ALLOWED_CLI_MODELS` allowlist; `routeToCliRunner` now rejects unregistered models by default; pass `allowedModels: null` to opt out
243
299
 
244
300
  ### v0.2.15
245
- - **docs:** Rewrite changelog (entries for v0.2.12–v0.2.14 were corrupted by repeated sed version bumps); all providers verified working (Claude, Gemini, Codex)
246
- - **docs:** Update STATUS.md with end-to-end test results
301
+ - **docs:** Rewrite changelog (entries for v0.2.12–v0.2.14 were corrupted); all providers verified working end-to-end
247
302
 
248
303
  ### v0.2.14
249
- - **fix:** Strip `vllm/` prefix in `routeToCliRunner` — OpenClaw sends full provider path (`vllm/cli-claude/...`) but proxy router expected bare `cli-claude/...`; caused "Unknown CLI bridge model" on all requests
250
- - **test:** Add 4 routing tests covering both prefixed and non-prefixed model paths (9 tests total)
304
+ - **fix:** Strip `vllm/` prefix in `routeToCliRunner` — OpenClaw sends full provider path (`vllm/cli-claude/...`) but proxy router expected bare `cli-claude/...`
305
+ - **test:** Add 4 routing tests (9 total)
251
306
 
252
307
  ### v0.2.13
253
- - **fix:** Set `requireAuth: false` on all `/cli-*` commands — webchat senders were always blocked because plugin-level auth uses a different resolution path than `commands.allowFrom` config; gateway-level allowlist is the correct security layer
254
- - **fix:** Hardcoded `version: "0.2.5"` in plugin object (`index.ts`) — now tracks `package.json`
255
-
256
- ### v0.2.12
257
- - **docs:** Fix changelog continuity — v0.2.10 entry was lost, v0.2.11 description was wrong; all entries now accurate
258
-
259
- ### v0.2.11
260
- - **docs:** Fix README `Current version:** `0.2.17`
261
-
262
- ### v0.2.10
263
- - **docs:** Fix version labels — SKILL.md was showing 0.2.2, README changelog ended at v0.2.5; add entries for v0.2.6–v0.2.9
308
+ - **fix:** Set `requireAuth: false` on all `/cli-*` commands — plugin-level auth uses different resolution path than `commands.allowFrom`; gateway allowlist is the correct security layer
309
+ - **fix:** Hardcoded `version: "0.2.5"` in plugin object now tracks `package.json`
264
310
 
265
311
  ### v0.2.9
266
- - **fix:** Critical — replace `fuser -k 31337/tcp` with safe health probe (`GET /v1/models`)
267
- - Prevents gateway SIGKILL on in-process hot-reloads (systemd `status=9/KILL` was caused by `fuser` finding gateway itself holding the port)
268
- - If proxy responds → reuse it; if EADDRINUSE but no response → wait 1s, retry once
269
-
270
- ### v0.2.8
271
- - **fix:** EADDRINUSE on every gateway restart — `closeAllConnections()` + `registerService` stop() hook (partially; superseded by v0.2.9 health-probe approach)
312
+ - **fix:** Critical — replace `fuser -k 31337/tcp` with safe health probe to prevent gateway SIGKILL on hot-reloads
272
313
 
273
- ### v0.2.7
274
- - **fix:** Port leak on gateway hot-reload — added `registerService` stop() callback to close proxy server on plugin teardown
314
+ ### v0.2.7–v0.2.8
315
+ - **fix:** Port leak on hot-reload — `registerService` stop() hook + `closeAllConnections()`
275
316
 
276
317
  ### v0.2.6
277
- - **fix:** `openclaw.extensions` added to `package.json` (required for `openclaw plugins install`)
278
- - Config patcher: auto-adds vllm provider to `openclaw.json` on first startup
318
+ - **fix:** `openclaw.extensions` added to `package.json`; config patcher auto-adds vllm provider
279
319
 
280
320
  ### v0.2.5
281
- - **feat:** `/cli-codex` `openai-codex/gpt-5.3-codex`
282
- - **feat:** `/cli-codex-mini` → `openai-codex/gpt-5.1-codex-mini`
283
- - Codex commands use the `openai-codex` provider (Codex CLI OAuth auth, direct OpenAI API — not the local proxy)
321
+ - **feat:** `/cli-codex` + `/cli-codex-mini` (Codex OAuth provider, direct API)
284
322
 
285
323
  ### v0.2.4
286
- - **fix:** Gemini agentic mode — replaced `@file` with stdin delivery (`-p ""`) + `cwd=/tmp`
287
- - **fix:** Filter `[WARN]` and `Loaded cached credentials` noise from Gemini stderr
288
- - Added `RunCliOptions` interface with optional `cwd` field
324
+ - **fix:** Gemini agentic mode — stdin delivery + `cwd=/tmp`
289
325
 
290
326
  ### v0.2.3
291
- - **feat:** `/cli-back` restore previous model (state persisted in `~/.openclaw/cli-bridge-state.json`)
292
- - **feat:** `/cli-test [model]` — one-shot proxy health check without changing active model
327
+ - **feat:** `/cli-back` + `/cli-test`
293
328
 
294
329
  ### v0.2.2
295
- - **feat:** Phase 3 — `/cli-*` slash commands for instant model switching
296
- - All 6 model commands via `api.registerCommand` with `requireAuth: true`
330
+ - **feat:** Phase 3 — `/cli-*` slash commands
297
331
 
298
332
  ### v0.2.1
299
- - **fix:** `spawn E2BIG` `buildMinimalEnv()` instead of spreading full `process.env`
300
- - **feat:** Unit tests (`test/cli-runner.test.ts`)
333
+ - **fix:** `spawn E2BIG` + unit tests
301
334
 
302
335
  ### v0.2.0
303
- - **feat:** Phase 2 — local OpenAI-compatible proxy server
304
- - Stdin prompt delivery, `MAX_MESSAGES=20` + `MAX_MSG_CHARS=4000` truncation
305
- - Auto-patch of `openclaw.json` vllm provider config
336
+ - **feat:** Phase 2 — local OpenAI-compatible proxy, stdin delivery, prompt truncation
306
337
 
307
338
  ### v0.1.x
308
339
  - Phase 1: Codex CLI OAuth auth bridge
package/SKILL.md CHANGED
@@ -53,4 +53,4 @@ Each command runs `openclaw models set <model>` atomically and replies with a co
53
53
 
54
54
  See `README.md` for full configuration reference and architecture diagram.
55
55
 
56
- **Version:** 0.2.17
56
+ **Version:** 0.2.19
package/index.ts CHANGED
@@ -14,11 +14,13 @@
14
14
  * /cli-haiku → vllm/cli-claude/claude-haiku-4-5 (Claude Code CLI proxy)
15
15
  * /cli-gemini → vllm/cli-gemini/gemini-2.5-pro (Gemini CLI proxy)
16
16
  * /cli-gemini-flash → vllm/cli-gemini/gemini-2.5-flash (Gemini CLI proxy)
17
- * /cli-gemini3 → vllm/cli-gemini/gemini-3-pro-preview (Gemini CLI proxy)
17
+ * /cli-gemini3 → vllm/cli-gemini/gemini-3-pro-preview (Gemini CLI proxy)
18
+ * /cli-gemini3-flash→ vllm/cli-gemini/gemini-3-flash-preview (Gemini CLI proxy)
18
19
  * /cli-codex → openai-codex/gpt-5.3-codex (Codex CLI OAuth, direct API)
19
20
  * /cli-codex-mini → openai-codex/gpt-5.1-codex-mini (Codex CLI OAuth, direct API)
20
21
  * /cli-back → restore model that was active before last /cli-* switch
21
22
  * /cli-test [model] → one-shot proxy health check (does NOT switch global model)
23
+ * /cli-list → list all registered CLI bridge models with commands
22
24
  *
23
25
  * Provider / model naming:
24
26
  * vllm/cli-gemini/gemini-2.5-pro → `gemini -m gemini-2.5-pro @<tmpfile>`
@@ -153,8 +155,14 @@ const CLI_MODEL_COMMANDS = [
153
155
  {
154
156
  name: "cli-gemini3",
155
157
  model: "vllm/cli-gemini/gemini-3-pro-preview",
156
- description: "Switch to Gemini 3 Pro (Gemini CLI via local proxy)",
157
- label: "Gemini 3 Pro (CLI)",
158
+ description: "Switch to Gemini 3 Pro Preview (Gemini CLI via local proxy)",
159
+ label: "Gemini 3 Pro Preview (CLI)",
160
+ },
161
+ {
162
+ name: "cli-gemini3-flash",
163
+ model: "vllm/cli-gemini/gemini-3-flash-preview",
164
+ description: "Switch to Gemini 3 Flash Preview (Gemini CLI via local proxy)",
165
+ label: "Gemini 3 Flash Preview (CLI)",
158
166
  },
159
167
  // ── Codex (via openai-codex provider — Codex CLI OAuth auth, direct API) ────
160
168
  {
@@ -163,6 +171,24 @@ const CLI_MODEL_COMMANDS = [
163
171
  description: "Switch to GPT-5.3 Codex (openai-codex provider, Codex CLI auth)",
164
172
  label: "GPT-5.3 Codex",
165
173
  },
174
+ {
175
+ name: "cli-codex-spark",
176
+ model: "openai-codex/gpt-5.3-codex-spark",
177
+ description: "Switch to GPT-5.3 Codex Spark (openai-codex provider, Codex CLI auth)",
178
+ label: "GPT-5.3 Codex Spark",
179
+ },
180
+ {
181
+ name: "cli-codex52",
182
+ model: "openai-codex/gpt-5.2-codex",
183
+ description: "Switch to GPT-5.2 Codex (openai-codex provider, Codex CLI auth)",
184
+ label: "GPT-5.2 Codex",
185
+ },
186
+ {
187
+ name: "cli-codex54",
188
+ model: "openai-codex/gpt-5.4",
189
+ description: "Switch to GPT-5.4 (openai-codex provider, Codex CLI auth — may require upgraded scope)",
190
+ label: "GPT-5.4 (Codex)",
191
+ },
166
192
  {
167
193
  name: "cli-codex-mini",
168
194
  model: "openai-codex/gpt-5.1-codex-mini",
@@ -550,10 +576,51 @@ const plugin = {
550
576
  },
551
577
  } satisfies OpenClawPluginCommandDefinition);
552
578
 
579
+ // ── Phase 3d: /cli-list — formatted model overview ────────────────────────
580
+ api.registerCommand({
581
+ name: "cli-list",
582
+ description: "List all registered CLI bridge models and their commands.",
583
+ requireAuth: false,
584
+ handler: async (): Promise<PluginCommandResult> => {
585
+ const groups: Record<string, { cmd: string; model: string }[]> = {
586
+ "Claude Code CLI": [],
587
+ "Gemini CLI": [],
588
+ "Codex (OAuth)": [],
589
+ };
590
+
591
+ for (const c of CLI_MODEL_COMMANDS) {
592
+ const entry = { cmd: `/${c.name}`, model: c.model };
593
+ if (c.model.startsWith("vllm/cli-claude/")) groups["Claude Code CLI"].push(entry);
594
+ else if (c.model.startsWith("vllm/cli-gemini/")) groups["Gemini CLI"].push(entry);
595
+ else groups["Codex (OAuth)"].push(entry);
596
+ }
597
+
598
+ const lines: string[] = ["🤖 *CLI Bridge Models*", ""];
599
+ for (const [group, entries] of Object.entries(groups)) {
600
+ if (entries.length === 0) continue;
601
+ lines.push(`*${group}*`);
602
+ for (const { cmd, model } of entries) {
603
+ const modelId = model.replace(/^vllm\/cli-(claude|gemini)\//, "").replace(/^openai-codex\//, "");
604
+ lines.push(` ${cmd.padEnd(20)} ${modelId}`);
605
+ }
606
+ lines.push("");
607
+ }
608
+ lines.push("*Utility*");
609
+ lines.push(" /cli-back Restore previous model");
610
+ lines.push(" /cli-test [model] Health check (no model switch)");
611
+ lines.push(" /cli-list This overview");
612
+ lines.push("");
613
+ lines.push(`Proxy: \`127.0.0.1:${port}\``);
614
+
615
+ return { text: lines.join("\n") };
616
+ },
617
+ } satisfies OpenClawPluginCommandDefinition);
618
+
553
619
  const allCommands = [
554
620
  ...CLI_MODEL_COMMANDS.map((c) => `/${c.name}`),
555
621
  "/cli-back",
556
622
  "/cli-test",
623
+ "/cli-list",
557
624
  ];
558
625
  api.logger.info(`[cli-bridge] registered ${allCommands.length} commands: ${allCommands.join(", ")}`);
559
626
  },
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "id": "openclaw-cli-bridge-elvatis",
3
3
  "name": "OpenClaw CLI Bridge",
4
- "version": "0.2.17",
4
+ "version": "0.2.19",
5
5
  "description": "Phase 1: openai-codex auth bridge. Phase 2: local HTTP proxy routing model calls through gemini/claude CLIs (vllm provider).",
6
6
  "providers": [
7
7
  "openai-codex"
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elvatis_com/openclaw-cli-bridge-elvatis",
3
- "version": "0.2.17",
3
+ "version": "0.2.19",
4
4
  "description": "Bridges gemini, claude, and codex CLI tools as OpenClaw model providers. Reads existing CLI auth without re-login.",
5
5
  "type": "module",
6
6
  "openclaw": {
package/src/cli-runner.ts CHANGED
@@ -259,6 +259,7 @@ export const DEFAULT_ALLOWED_CLI_MODELS: ReadonlySet<string> = new Set([
259
259
  "cli-gemini/gemini-2.5-pro",
260
260
  "cli-gemini/gemini-2.5-flash",
261
261
  "cli-gemini/gemini-3-pro-preview",
262
+ "cli-gemini/gemini-3-flash-preview",
262
263
  ]);
263
264
 
264
265
  // ──────────────────────────────────────────────────────────────────────────────