@elvatis_com/openclaw-cli-bridge-elvatis 0.2.4 → 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,8 +1,8 @@
1
1
  # openclaw-cli-bridge-elvatis
2
2
 
3
- > OpenClaw plugin that bridges locally installed AI CLIs (Codex, Gemini, Claude Code) as model providers — with slash commands for instant model switching.
3
+ > OpenClaw plugin that bridges locally installed AI CLIs (Codex, Gemini, Claude Code) as model providers — with slash commands for instant model switching, restore, and health testing.
4
4
 
5
- **Current version:** `0.2.2`
5
+ **Current version:** `0.2.5`
6
6
 
7
7
  ---
8
8
 
@@ -14,30 +14,59 @@ Registers the `openai-codex` provider by reading OAuth tokens already stored by
14
14
  ### Phase 2 — Request bridge (local proxy)
15
15
  Starts a local OpenAI-compatible HTTP proxy on `127.0.0.1:31337` and configures OpenClaw's `vllm` provider to route calls through `gemini` and `claude` CLI subprocesses.
16
16
 
17
- Prompt delivery: always via **stdin** (not CLI args) — avoids `E2BIG` for long sessions. Each message batch is truncated to the last 20 messages + system message (configurable in `src/cli-runner.ts`).
17
+ **Prompt delivery:** always via **stdin** (never CLI args or `@file`) — avoids `E2BIG` for long sessions and Gemini agentic mode. Each message batch is truncated to the last 20 messages + system message (`MAX_MESSAGES`/`MAX_MSG_CHARS` in `src/cli-runner.ts`).
18
18
 
19
- | Model reference | CLI invoked |
20
- |---|---|
21
- | `vllm/cli-gemini/gemini-2.5-pro` | `gemini -m gemini-2.5-pro @<tmpfile>` |
22
- | `vllm/cli-gemini/gemini-2.5-flash` | `gemini -m gemini-2.5-flash @<tmpfile>` |
23
- | `vllm/cli-gemini/gemini-3-pro` | `gemini -m gemini-3-pro @<tmpfile>` |
24
- | `vllm/cli-claude/claude-sonnet-4-6` | `claude -p --output-format text --model claude-sonnet-4-6` (stdin) |
25
- | `vllm/cli-claude/claude-opus-4-6` | `claude -p --output-format text --model claude-opus-4-6` (stdin) |
26
- | `vllm/cli-claude/claude-haiku-4-5` | `claude -p --output-format text --model claude-haiku-4-5` (stdin) |
19
+ | Model reference | CLI invoked | Latency |
20
+ |---|---|---|
21
+ | `vllm/cli-gemini/gemini-2.5-pro` | `gemini -m gemini-2.5-pro -p ""` (stdin, cwd=/tmp) | ~8–10s |
22
+ | `vllm/cli-gemini/gemini-2.5-flash` | `gemini -m gemini-2.5-flash -p ""` (stdin, cwd=/tmp) | ~4–6s |
23
+ | `vllm/cli-gemini/gemini-3-pro` | `gemini -m gemini-3-pro -p ""` (stdin, cwd=/tmp) | ~8–10s |
24
+ | `vllm/cli-claude/claude-sonnet-4-6` | `claude -p --output-format text --model claude-sonnet-4-6` (stdin) | ~2–4s |
25
+ | `vllm/cli-claude/claude-opus-4-6` | `claude -p --output-format text --model claude-opus-4-6` (stdin) | ~3–5s |
26
+ | `vllm/cli-claude/claude-haiku-4-5` | `claude -p --output-format text --model claude-haiku-4-5` (stdin) | ~1–3s |
27
27
 
28
28
  ### Phase 3 — Slash commands
29
- Six plugin-registered commands for instant model switching (no agent invocation needed):
29
+ Ten plugin-registered commands (all `requireAuth: true`):
30
+
31
+ **Claude Code CLI** (routed via local proxy on `:31337`):
30
32
 
31
- | Command | Switches to |
33
+ | Command | Model |
32
34
  |---|---|
33
35
  | `/cli-sonnet` | `vllm/cli-claude/claude-sonnet-4-6` |
34
36
  | `/cli-opus` | `vllm/cli-claude/claude-opus-4-6` |
35
37
  | `/cli-haiku` | `vllm/cli-claude/claude-haiku-4-5` |
38
+
39
+ **Gemini CLI** (routed via local proxy on `:31337`, stdin + `cwd=/tmp`):
40
+
41
+ | Command | Model |
42
+ |---|---|
36
43
  | `/cli-gemini` | `vllm/cli-gemini/gemini-2.5-pro` |
37
44
  | `/cli-gemini-flash` | `vllm/cli-gemini/gemini-2.5-flash` |
38
45
  | `/cli-gemini3` | `vllm/cli-gemini/gemini-3-pro` |
39
46
 
40
- All commands require `requireAuth: true` — only authorized/owner senders can execute them. Each command calls `openclaw models set <model>` via `api.runtime.system.runCommandWithTimeout` and replies with a confirmation.
47
+ **Codex CLI** (via `openai-codex` provider Codex CLI OAuth auth, calls OpenAI API directly, **not** through the local proxy):
48
+
49
+ | Command | Model |
50
+ |---|---|
51
+ | `/cli-codex` | `openai-codex/gpt-5.3-codex` |
52
+ | `/cli-codex-mini` | `openai-codex/gpt-5.1-codex-mini` |
53
+
54
+ **Utility:**
55
+
56
+ | Command | What it does |
57
+ |---|---|
58
+ | `/cli-back` | Restore the model active **before** the last `/cli-*` switch |
59
+ | `/cli-test [model]` | One-shot proxy health check — **does NOT switch your active model** |
60
+
61
+ **`/cli-back` details:**
62
+ - Before every `/cli-*` switch the current model is saved to `~/.openclaw/cli-bridge-state.json`
63
+ - `/cli-back` reads it, calls `openclaw models set <previous>`, then clears the file
64
+ - State survives gateway restarts — safe to use any time
65
+
66
+ **`/cli-test` details:**
67
+ - Accepts short form (`cli-sonnet`) or full path (`vllm/cli-claude/claude-sonnet-4-6`)
68
+ - Default when no arg given: `cli-claude/claude-sonnet-4-6`
69
+ - Reports response content, latency, and confirms your active model is unchanged
41
70
 
42
71
  ---
43
72
 
@@ -57,7 +86,7 @@ All commands require `requireAuth: true` — only authorized/owner senders can e
57
86
  # From ClawHub
58
87
  clawhub install openclaw-cli-bridge-elvatis
59
88
 
60
- # Or from workspace (development / local path)
89
+ # Or from workspace (development)
61
90
  # Add to ~/.openclaw/openclaw.json:
62
91
  # plugins.load.paths: ["~/.openclaw/workspace/openclaw-cli-bridge-elvatis"]
63
92
  # plugins.entries.openclaw-cli-bridge-elvatis: { "enabled": true }
@@ -69,37 +98,61 @@ clawhub install openclaw-cli-bridge-elvatis
69
98
 
70
99
  ### 1. Enable + restart
71
100
 
72
- ```bash
73
- # In ~/.openclaw/openclaw.json → plugins.entries:
101
+ ```json
102
+ // ~/.openclaw/openclaw.json → plugins.entries
74
103
  "openclaw-cli-bridge-elvatis": { "enabled": true }
104
+ ```
75
105
 
106
+ ```bash
76
107
  openclaw gateway restart
77
108
  ```
78
109
 
79
- ### 2. Register Codex auth (Phase 1, optional)
110
+ ### 2. Verify (check gateway logs)
111
+
112
+ ```
113
+ [cli-bridge] proxy ready on :31337
114
+ [cli-bridge] registered 8 commands: /cli-sonnet, /cli-opus, /cli-haiku,
115
+ /cli-gemini, /cli-gemini-flash, /cli-gemini3, /cli-back, /cli-test
116
+ ```
117
+
118
+ ### 3. Register Codex auth (optional — Phase 1 only)
80
119
 
81
120
  ```bash
82
121
  openclaw models auth login --provider openai-codex
83
122
  # Select: "Codex CLI (existing login)"
84
123
  ```
85
124
 
86
- ### 3. Verify proxy (Phase 2)
87
-
88
- On startup the plugin auto-patches `openclaw.json` with the `vllm` provider config (port `31337`) and logs:
125
+ ### 4. Test without switching your model
89
126
 
90
127
  ```
91
- [cli-bridge] proxy ready — vllm/cli-gemini/* and vllm/cli-claude/* available
92
- [cli-bridge] registered 6 slash commands: /cli-sonnet, /cli-opus, /cli-haiku, /cli-gemini, /cli-gemini-flash, /cli-gemini3
128
+ /cli-test
129
+ 🧪 CLI Bridge Test
130
+ Model: vllm/cli-claude/claude-sonnet-4-6
131
+ Response: CLI bridge OK
132
+ Latency: 2531ms
133
+ Active model unchanged: anthropic/claude-sonnet-4-6
134
+
135
+ /cli-test cli-gemini
136
+ → 🧪 CLI Bridge Test
137
+ Model: vllm/cli-gemini/gemini-2.5-pro
138
+ Response: CLI bridge OK
139
+ Latency: 8586ms
140
+ Active model unchanged: anthropic/claude-sonnet-4-6
93
141
  ```
94
142
 
95
- ### 4. Switch models (Phase 3)
96
-
97
- Use any `/cli-*` command from any connected channel:
143
+ ### 5. Switch and restore
98
144
 
99
145
  ```
100
146
  /cli-sonnet
101
147
  → ✅ Switched to Claude Sonnet 4.6 (CLI)
102
148
  `vllm/cli-claude/claude-sonnet-4-6`
149
+ Use /cli-back to restore previous model.
150
+
151
+ ... test things ...
152
+
153
+ /cli-back
154
+ → ✅ Restored previous model
155
+ `anthropic/claude-sonnet-4-6`
103
156
  ```
104
157
 
105
158
  ---
@@ -125,17 +178,33 @@ In `~/.openclaw/openclaw.json` → `plugins.entries.openclaw-cli-bridge-elvatis.
125
178
  ```
126
179
  OpenClaw agent
127
180
 
128
- ├─ openai-codex/* ──► OpenAI API (auth via ~/.codex/auth.json OAuth tokens)
181
+ ├─ openai-codex/* ──────────────────────────► OpenAI API (direct)
182
+ │ auth: ~/.codex/auth.json OAuth tokens ▲
183
+ │ │
184
+ │ /cli-codex, /cli-codex-mini ─────────────────┘ (switch to this provider)
129
185
 
130
186
  └─ vllm/cli-gemini/* ─┐
131
187
  vllm/cli-claude/* ─┤─► localhost:31337 (openclaw-cli-bridge proxy)
132
- │ ├─ cli-gemini/* → gemini -m <model> @<tmpfile>
133
- └─ cli-claude/* → claude -p --model <model> ← prompt via stdin
188
+ │ ├─ cli-gemini/* → gemini -m <model> -p ""
189
+ │ stdin=prompt, cwd=/tmp
190
+ │ │ (neutral cwd prevents agentic mode)
191
+ │ └─ cli-claude/* → claude -p --model <model>
192
+ │ stdin=prompt
134
193
  └───────────────────────────────────────────────────
135
194
 
136
- Slash commands (bypass agent):
137
- /cli-sonnet|opus|haiku|gemini|gemini-flash|gemini3
138
- └─► openclaw models set <model> (atomic, ~1s)
195
+ Slash commands (bypass agent, requireAuth=true):
196
+ /cli-sonnet|opus|haiku|gemini|gemini-flash|gemini3|codex|codex-mini
197
+ └─► saves current model → ~/.openclaw/cli-bridge-state.json
198
+ └─► openclaw models set <model> (~1s, atomic)
199
+
200
+ /cli-back
201
+ └─► reads ~/.openclaw/cli-bridge-state.json
202
+ └─► openclaw models set <previous>
203
+
204
+ /cli-test [model]
205
+ └─► HTTP POST → localhost:31337 (no global model change)
206
+ └─► reports response + latency
207
+ └─► NOTE: only tests the proxy — Codex models bypass the proxy
139
208
  ```
140
209
 
141
210
  ---
@@ -143,12 +212,14 @@ Slash commands (bypass agent):
143
212
  ## Known Issues & Fixes
144
213
 
145
214
  ### `spawn E2BIG` (fixed in v0.2.1)
215
+ **Symptom:** `CLI error for cli-claude/…: spawn E2BIG` after ~500+ messages.
216
+ **Cause:** Gateway injects large values into `process.env` at runtime. Spreading it into `spawn()` exceeds Linux's `ARG_MAX` (~2MB).
217
+ **Fix:** `buildMinimalEnv()` — only passes `HOME`, `PATH`, `USER`, and auth keys.
146
218
 
147
- **Symptom:** `CLI error for cli-claude/…: spawn E2BIG` after ~500+ messages in a session.
148
-
149
- **Root cause:** The OpenClaw gateway modifies `process.env` at runtime (OPENCLAW_* vars, session context, etc.). Spreading the full `process.env` into `spawn()` pushes `argv + envp` over Linux's `ARG_MAX` (~2MB).
150
-
151
- **Fix:** `buildMinimalEnv()` in `src/cli-runner.ts` — only passes `HOME`, `PATH`, `USER`, and auth keys to the subprocess. Immune to gateway runtime env size.
219
+ ### Gemini agentic mode / hangs (fixed in v0.2.4)
220
+ **Symptom:** Gemini hangs, returns wrong answers, or says "directory does not exist".
221
+ **Cause:** `@file` syntax (`gemini -p @/tmp/xxx.txt`) triggers agentic mode Gemini scans the working directory for project context and treats prompts as task instructions. Running from the workspace root makes this worse.
222
+ **Fix:** Stdin delivery (`gemini -p ""` with prompt via stdin) + `cwd=/tmp`. Same pattern as Claude.
152
223
 
153
224
  ---
154
225
 
@@ -156,30 +227,39 @@ Slash commands (bypass agent):
156
227
 
157
228
  ```bash
158
229
  npm run typecheck # tsc --noEmit
159
- npm test # vitest run
230
+ npm test # vitest run (5 unit tests for formatPrompt)
160
231
  ```
161
232
 
162
- Test coverage: `test/cli-runner.test.ts` — unit tests for `formatPrompt` (truncation, system message handling, MAX_MSG_CHARS).
163
-
164
233
  ---
165
234
 
166
235
  ## Changelog
167
236
 
237
+ ### v0.2.5
238
+ - **feat:** `/cli-codex` → `openai-codex/gpt-5.3-codex`
239
+ - **feat:** `/cli-codex-mini` → `openai-codex/gpt-5.1-codex-mini`
240
+ - Codex commands use the `openai-codex` provider (Codex CLI OAuth auth, direct OpenAI API — not the local proxy)
241
+
242
+ ### v0.2.4
243
+ - **fix:** Gemini agentic mode — replaced `@file` with stdin delivery (`-p ""`) + `cwd=/tmp`
244
+ - **fix:** Filter `[WARN]` and `Loaded cached credentials` noise from Gemini stderr
245
+ - Added `RunCliOptions` interface with optional `cwd` field
246
+
247
+ ### v0.2.3
248
+ - **feat:** `/cli-back` — restore previous model (state persisted in `~/.openclaw/cli-bridge-state.json`)
249
+ - **feat:** `/cli-test [model]` — one-shot proxy health check without changing active model
250
+
168
251
  ### v0.2.2
169
252
  - **feat:** Phase 3 — `/cli-*` slash commands for instant model switching
170
- - All 6 commands registered via `api.registerCommand` with `requireAuth: true`
171
- - Calls `openclaw models set <model>` via `api.runtime.system.runCommandWithTimeout`
253
+ - All 6 model commands via `api.registerCommand` with `requireAuth: true`
172
254
 
173
255
  ### v0.2.1
174
- - **fix:** `spawn E2BIG` — use `buildMinimalEnv()` instead of spreading full `process.env`
175
- - **feat:** Added `test/cli-runner.test.ts` (5 unit tests)
176
- - Added Gemini 3 Pro model (`vllm/cli-gemini/gemini-3-pro`)
256
+ - **fix:** `spawn E2BIG` — `buildMinimalEnv()` instead of spreading full `process.env`
257
+ - **feat:** Unit tests (`test/cli-runner.test.ts`)
177
258
 
178
259
  ### v0.2.0
179
260
  - **feat:** Phase 2 — local OpenAI-compatible proxy server
180
- - Prompt via stdin/tmpfile (never as CLI arg) to prevent arg-size issues
181
- - `MAX_MESSAGES=20` + `MAX_MSG_CHARS=4000` truncation in `formatPrompt`
182
- - Auto-patch of `openclaw.json` vllm provider config on first start
261
+ - Stdin prompt delivery, `MAX_MESSAGES=20` + `MAX_MSG_CHARS=4000` truncation
262
+ - Auto-patch of `openclaw.json` vllm provider config
183
263
 
184
264
  ### v0.1.x
185
265
  - Phase 1: Codex CLI OAuth auth bridge
package/index.ts CHANGED
@@ -9,12 +9,14 @@
9
9
  * are handled by the Gemini CLI and Claude Code CLI subprocesses.
10
10
  *
11
11
  * Phase 3 (slash commands): registers /cli-* commands for instant model switching.
12
- * /cli-sonnet → vllm/cli-claude/claude-sonnet-4-6
13
- * /cli-opus → vllm/cli-claude/claude-opus-4-6
14
- * /cli-haiku → vllm/cli-claude/claude-haiku-4-5
15
- * /cli-gemini → vllm/cli-gemini/gemini-2.5-pro
16
- * /cli-gemini-flash → vllm/cli-gemini/gemini-2.5-flash
17
- * /cli-gemini3 → vllm/cli-gemini/gemini-3-pro
12
+ * /cli-sonnet → vllm/cli-claude/claude-sonnet-4-6 (Claude Code CLI proxy)
13
+ * /cli-opus → vllm/cli-claude/claude-opus-4-6 (Claude Code CLI proxy)
14
+ * /cli-haiku → vllm/cli-claude/claude-haiku-4-5 (Claude Code CLI proxy)
15
+ * /cli-gemini → vllm/cli-gemini/gemini-2.5-pro (Gemini CLI proxy)
16
+ * /cli-gemini-flash → vllm/cli-gemini/gemini-2.5-flash (Gemini CLI proxy)
17
+ * /cli-gemini3 → vllm/cli-gemini/gemini-3-pro (Gemini CLI proxy)
18
+ * /cli-codex → openai-codex/gpt-5.3-codex (Codex CLI OAuth, direct API)
19
+ * /cli-codex-mini → openai-codex/gpt-5.1-codex-mini (Codex CLI OAuth, direct API)
18
20
  * /cli-back → restore model that was active before last /cli-* switch
19
21
  * /cli-test [model] → one-shot proxy health check (does NOT switch global model)
20
22
  *
@@ -116,42 +118,57 @@ function readCurrentModel(): string | null {
116
118
  // Phase 3: model command table
117
119
  // ──────────────────────────────────────────────────────────────────────────────
118
120
  const CLI_MODEL_COMMANDS = [
121
+ // ── Claude (via local proxy → Claude Code CLI) ──────────────────────────────
119
122
  {
120
123
  name: "cli-sonnet",
121
124
  model: "vllm/cli-claude/claude-sonnet-4-6",
122
- description: "Switch to Claude Sonnet 4.6 (CLI bridge)",
125
+ description: "Switch to Claude Sonnet 4.6 (Claude Code CLI via local proxy)",
123
126
  label: "Claude Sonnet 4.6 (CLI)",
124
127
  },
125
128
  {
126
129
  name: "cli-opus",
127
130
  model: "vllm/cli-claude/claude-opus-4-6",
128
- description: "Switch to Claude Opus 4.6 (CLI bridge)",
131
+ description: "Switch to Claude Opus 4.6 (Claude Code CLI via local proxy)",
129
132
  label: "Claude Opus 4.6 (CLI)",
130
133
  },
131
134
  {
132
135
  name: "cli-haiku",
133
136
  model: "vllm/cli-claude/claude-haiku-4-5",
134
- description: "Switch to Claude Haiku 4.5 (CLI bridge)",
137
+ description: "Switch to Claude Haiku 4.5 (Claude Code CLI via local proxy)",
135
138
  label: "Claude Haiku 4.5 (CLI)",
136
139
  },
140
+ // ── Gemini (via local proxy → Gemini CLI) ───────────────────────────────────
137
141
  {
138
142
  name: "cli-gemini",
139
143
  model: "vllm/cli-gemini/gemini-2.5-pro",
140
- description: "Switch to Gemini 2.5 Pro (CLI bridge)",
144
+ description: "Switch to Gemini 2.5 Pro (Gemini CLI via local proxy)",
141
145
  label: "Gemini 2.5 Pro (CLI)",
142
146
  },
143
147
  {
144
148
  name: "cli-gemini-flash",
145
149
  model: "vllm/cli-gemini/gemini-2.5-flash",
146
- description: "Switch to Gemini 2.5 Flash (CLI bridge)",
150
+ description: "Switch to Gemini 2.5 Flash (Gemini CLI via local proxy)",
147
151
  label: "Gemini 2.5 Flash (CLI)",
148
152
  },
149
153
  {
150
154
  name: "cli-gemini3",
151
155
  model: "vllm/cli-gemini/gemini-3-pro",
152
- description: "Switch to Gemini 3 Pro (CLI bridge)",
156
+ description: "Switch to Gemini 3 Pro (Gemini CLI via local proxy)",
153
157
  label: "Gemini 3 Pro (CLI)",
154
158
  },
159
+ // ── Codex (via openai-codex provider — Codex CLI OAuth auth, direct API) ────
160
+ {
161
+ name: "cli-codex",
162
+ model: "openai-codex/gpt-5.3-codex",
163
+ description: "Switch to GPT-5.3 Codex (openai-codex provider, Codex CLI auth)",
164
+ label: "GPT-5.3 Codex",
165
+ },
166
+ {
167
+ name: "cli-codex-mini",
168
+ model: "openai-codex/gpt-5.1-codex-mini",
169
+ description: "Switch to GPT-5.1 Codex Mini (openai-codex provider, Codex CLI auth)",
170
+ label: "GPT-5.1 Codex Mini",
171
+ },
155
172
  ] as const;
156
173
 
157
174
  /** Default model used by /cli-test when no arg is given */
@@ -260,7 +277,7 @@ function proxyTestRequest(
260
277
  const plugin = {
261
278
  id: "openclaw-cli-bridge-elvatis",
262
279
  name: "OpenClaw CLI Bridge",
263
- version: "0.2.3",
280
+ version: "0.2.5",
264
281
  description:
265
282
  "Phase 1: openai-codex auth bridge. " +
266
283
  "Phase 2: HTTP proxy for gemini/claude CLIs. " +
@@ -334,6 +351,8 @@ const plugin = {
334
351
  }
335
352
 
336
353
  // ── Phase 2: CLI request proxy ─────────────────────────────────────────────
354
+ let proxyServer: import("node:http").Server | null = null;
355
+
337
356
  if (enableProxy) {
338
357
  startProxyServer({
339
358
  port,
@@ -342,7 +361,8 @@ const plugin = {
342
361
  log: (msg) => api.logger.info(msg),
343
362
  warn: (msg) => api.logger.warn(msg),
344
363
  })
345
- .then(() => {
364
+ .then((server) => {
365
+ proxyServer = server;
346
366
  api.logger.info(
347
367
  `[cli-bridge] proxy ready on :${port} — vllm/cli-gemini/* and vllm/cli-claude/* available`
348
368
  );
@@ -358,6 +378,26 @@ const plugin = {
358
378
  });
359
379
  }
360
380
 
381
+ // ── Cleanup: close proxy server on plugin stop (hot-reload / gateway restart) ──
382
+ // Register a named service so OpenClaw can call stop() on plugin teardown.
383
+ api.registerService({
384
+ id: "cli-bridge-proxy",
385
+ start: async () => { /* proxy already started above */ },
386
+ stop: async () => {
387
+ if (proxyServer) {
388
+ await new Promise<void>((resolve) => {
389
+ proxyServer!.close((err) => {
390
+ if (err) api.logger.warn(`[cli-bridge] proxy close error: ${err.message}`);
391
+ else api.logger.info(`[cli-bridge] proxy server closed on plugin stop`);
392
+ resolve();
393
+ });
394
+ });
395
+ proxyServer = null;
396
+ }
397
+ },
398
+ });
399
+
400
+
361
401
  // ── Phase 3a: /cli-* model switch commands ─────────────────────────────────
362
402
  for (const entry of CLI_MODEL_COMMANDS) {
363
403
  const { name, model, description, label } = entry;
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "id": "openclaw-cli-bridge-elvatis",
3
3
  "name": "OpenClaw CLI Bridge",
4
- "version": "0.2.4",
4
+ "version": "0.2.6",
5
5
  "description": "Phase 1: openai-codex auth bridge. Phase 2: local HTTP proxy routing model calls through gemini/claude CLIs (vllm provider).",
6
6
  "providers": [
7
7
  "openai-codex"
package/package.json CHANGED
@@ -1,8 +1,13 @@
1
1
  {
2
2
  "name": "@elvatis_com/openclaw-cli-bridge-elvatis",
3
- "version": "0.2.4",
3
+ "version": "0.2.6",
4
4
  "description": "Bridges gemini, claude, and codex CLI tools as OpenClaw model providers. Reads existing CLI auth without re-login.",
5
5
  "type": "module",
6
+ "openclaw": {
7
+ "extensions": [
8
+ "./index.ts"
9
+ ]
10
+ },
6
11
  "scripts": {
7
12
  "build": "tsc",
8
13
  "typecheck": "tsc -p tsconfig.check.json",