@elvatis_com/openclaw-cli-bridge-elvatis 0.2.17 → 0.2.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.ai/handoff/STATUS.md +3 -3
- package/README.md +7 -2
- package/SKILL.md +1 -1
- package/index.ts +28 -3
- package/openclaw.plugin.json +1 -1
- package/package.json +1 -1
- package/src/cli-runner.ts +1 -0
package/.ai/handoff/STATUS.md
CHANGED
|
@@ -2,13 +2,13 @@
|
|
|
2
2
|
|
|
3
3
|
_Last updated: 2026-03-08 by Akido (claude-sonnet-4-6)_
|
|
4
4
|
|
|
5
|
-
## Current Version: 0.2.
|
|
5
|
+
## Current Version: 0.2.18 — STABLE
|
|
6
6
|
|
|
7
7
|
## What is done
|
|
8
8
|
|
|
9
9
|
- ✅ Repo: `https://github.com/elvatis/openclaw-cli-bridge-elvatis`
|
|
10
|
-
- ✅ npm:
|
|
11
|
-
- ✅ ClawHub: `openclaw-cli-bridge-elvatis@0.2.
|
|
10
|
+
- ✅ npm: `@@@elvatis_com/openclaw-cli-bridge-elvatis@0.2.18`
|
|
11
|
+
- ✅ ClawHub: `openclaw-cli-bridge-elvatis@0.2.18`
|
|
12
12
|
- ✅ Phase 1: `openai-codex` provider via `~/.codex/auth.json` (no re-login)
|
|
13
13
|
- ✅ Phase 2: Local OpenAI-compatible proxy on `127.0.0.1:31337` (Gemini + Claude CLI)
|
|
14
14
|
- ✅ Phase 3: 10 slash commands (`/cli-sonnet`, `/cli-opus`, `/cli-haiku`, `/cli-gemini`, `/cli-gemini-flash`, `/cli-gemini3`, `/cli-codex`, `/cli-codex-mini`, `/cli-back`, `/cli-test`)
|
package/README.md
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
> OpenClaw plugin that bridges locally installed AI CLIs (Codex, Gemini, Claude Code) as model providers — with slash commands for instant model switching, restore, and health testing.
|
|
4
4
|
|
|
5
|
-
**Current version:** `0.2.
|
|
5
|
+
**Current version:** `0.2.18`
|
|
6
6
|
|
|
7
7
|
---
|
|
8
8
|
|
|
@@ -234,6 +234,11 @@ npm test # vitest run (5 unit tests for formatPrompt)
|
|
|
234
234
|
|
|
235
235
|
## Changelog
|
|
236
236
|
|
|
237
|
+
### v0.2.18
|
|
238
|
+
- **feat:** Add `/cli-gemini3-flash` → `gemini-3-flash-preview`
|
|
239
|
+
- **feat:** Add `/cli-codex-spark` → `gpt-5.3-codex-spark`, `/cli-codex52` → `gpt-5.2-codex`, `/cli-codex54` → `gpt-5.4`
|
|
240
|
+
- **fix:** Update `DEFAULT_ALLOWED_CLI_MODELS` with `gemini-3-flash-preview`
|
|
241
|
+
|
|
237
242
|
### v0.2.17
|
|
238
243
|
- **fix:** `/cli-gemini3` model corrected to `gemini-3-pro-preview` (was `gemini-3-pro`, which returns 404 from Gemini API)
|
|
239
244
|
|
|
@@ -257,7 +262,7 @@ npm test # vitest run (5 unit tests for formatPrompt)
|
|
|
257
262
|
- **docs:** Fix changelog continuity — v0.2.10 entry was lost, v0.2.11 description was wrong; all entries now accurate
|
|
258
263
|
|
|
259
264
|
### v0.2.11
|
|
260
|
-
- **docs:** Fix README `Current version:** `0.2.
|
|
265
|
+
- **docs:** Fix README `Current version:** `0.2.18`
|
|
261
266
|
|
|
262
267
|
### v0.2.10
|
|
263
268
|
- **docs:** Fix version labels — SKILL.md was showing 0.2.2, README changelog ended at v0.2.5; add entries for v0.2.6–v0.2.9
|
package/SKILL.md
CHANGED
package/index.ts
CHANGED
|
@@ -14,7 +14,8 @@
|
|
|
14
14
|
* /cli-haiku → vllm/cli-claude/claude-haiku-4-5 (Claude Code CLI proxy)
|
|
15
15
|
* /cli-gemini → vllm/cli-gemini/gemini-2.5-pro (Gemini CLI proxy)
|
|
16
16
|
* /cli-gemini-flash → vllm/cli-gemini/gemini-2.5-flash (Gemini CLI proxy)
|
|
17
|
-
* /cli-gemini3 → vllm/cli-gemini/gemini-3-pro-preview
|
|
17
|
+
* /cli-gemini3 → vllm/cli-gemini/gemini-3-pro-preview (Gemini CLI proxy)
|
|
18
|
+
* /cli-gemini3-flash→ vllm/cli-gemini/gemini-3-flash-preview (Gemini CLI proxy)
|
|
18
19
|
* /cli-codex → openai-codex/gpt-5.3-codex (Codex CLI OAuth, direct API)
|
|
19
20
|
* /cli-codex-mini → openai-codex/gpt-5.1-codex-mini (Codex CLI OAuth, direct API)
|
|
20
21
|
* /cli-back → restore model that was active before last /cli-* switch
|
|
@@ -153,8 +154,14 @@ const CLI_MODEL_COMMANDS = [
|
|
|
153
154
|
{
|
|
154
155
|
name: "cli-gemini3",
|
|
155
156
|
model: "vllm/cli-gemini/gemini-3-pro-preview",
|
|
156
|
-
description: "Switch to Gemini 3 Pro (Gemini CLI via local proxy)",
|
|
157
|
-
label: "Gemini 3 Pro (CLI)",
|
|
157
|
+
description: "Switch to Gemini 3 Pro Preview (Gemini CLI via local proxy)",
|
|
158
|
+
label: "Gemini 3 Pro Preview (CLI)",
|
|
159
|
+
},
|
|
160
|
+
{
|
|
161
|
+
name: "cli-gemini3-flash",
|
|
162
|
+
model: "vllm/cli-gemini/gemini-3-flash-preview",
|
|
163
|
+
description: "Switch to Gemini 3 Flash Preview (Gemini CLI via local proxy)",
|
|
164
|
+
label: "Gemini 3 Flash Preview (CLI)",
|
|
158
165
|
},
|
|
159
166
|
// ── Codex (via openai-codex provider — Codex CLI OAuth auth, direct API) ────
|
|
160
167
|
{
|
|
@@ -163,6 +170,24 @@ const CLI_MODEL_COMMANDS = [
|
|
|
163
170
|
description: "Switch to GPT-5.3 Codex (openai-codex provider, Codex CLI auth)",
|
|
164
171
|
label: "GPT-5.3 Codex",
|
|
165
172
|
},
|
|
173
|
+
{
|
|
174
|
+
name: "cli-codex-spark",
|
|
175
|
+
model: "openai-codex/gpt-5.3-codex-spark",
|
|
176
|
+
description: "Switch to GPT-5.3 Codex Spark (openai-codex provider, Codex CLI auth)",
|
|
177
|
+
label: "GPT-5.3 Codex Spark",
|
|
178
|
+
},
|
|
179
|
+
{
|
|
180
|
+
name: "cli-codex52",
|
|
181
|
+
model: "openai-codex/gpt-5.2-codex",
|
|
182
|
+
description: "Switch to GPT-5.2 Codex (openai-codex provider, Codex CLI auth)",
|
|
183
|
+
label: "GPT-5.2 Codex",
|
|
184
|
+
},
|
|
185
|
+
{
|
|
186
|
+
name: "cli-codex54",
|
|
187
|
+
model: "openai-codex/gpt-5.4",
|
|
188
|
+
description: "Switch to GPT-5.4 (openai-codex provider, Codex CLI auth — may require upgraded scope)",
|
|
189
|
+
label: "GPT-5.4 (Codex)",
|
|
190
|
+
},
|
|
166
191
|
{
|
|
167
192
|
name: "cli-codex-mini",
|
|
168
193
|
model: "openai-codex/gpt-5.1-codex-mini",
|
package/openclaw.plugin.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"id": "openclaw-cli-bridge-elvatis",
|
|
3
3
|
"name": "OpenClaw CLI Bridge",
|
|
4
|
-
"version": "0.2.
|
|
4
|
+
"version": "0.2.18",
|
|
5
5
|
"description": "Phase 1: openai-codex auth bridge. Phase 2: local HTTP proxy routing model calls through gemini/claude CLIs (vllm provider).",
|
|
6
6
|
"providers": [
|
|
7
7
|
"openai-codex"
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@elvatis_com/openclaw-cli-bridge-elvatis",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.18",
|
|
4
4
|
"description": "Bridges gemini, claude, and codex CLI tools as OpenClaw model providers. Reads existing CLI auth without re-login.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"openclaw": {
|
package/src/cli-runner.ts
CHANGED
|
@@ -259,6 +259,7 @@ export const DEFAULT_ALLOWED_CLI_MODELS: ReadonlySet<string> = new Set([
|
|
|
259
259
|
"cli-gemini/gemini-2.5-pro",
|
|
260
260
|
"cli-gemini/gemini-2.5-flash",
|
|
261
261
|
"cli-gemini/gemini-3-pro-preview",
|
|
262
|
+
"cli-gemini/gemini-3-flash-preview",
|
|
262
263
|
]);
|
|
263
264
|
|
|
264
265
|
// ──────────────────────────────────────────────────────────────────────────────
|