@elvatis_com/openclaw-cli-bridge-elvatis 0.2.16 → 0.2.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,13 +2,13 @@
2
2
 
3
3
  _Last updated: 2026-03-08 by Akido (claude-sonnet-4-6)_
4
4
 
5
- ## Current Version: 0.2.16 — STABLE
5
+ ## Current Version: 0.2.17 — STABLE
6
6
 
7
7
  ## What is done
8
8
 
9
9
  - ✅ Repo: `https://github.com/elvatis/openclaw-cli-bridge-elvatis`
10
- - ✅ npm: `@elvatis_com/openclaw-cli-bridge-elvatis@0.2.16`
11
- - ✅ ClawHub: `openclaw-cli-bridge-elvatis@0.2.16`
10
+ - ✅ npm: `@@elvatis_com/openclaw-cli-bridge-elvatis@0.2.17`
11
+ - ✅ ClawHub: `openclaw-cli-bridge-elvatis@0.2.17`
12
12
  - ✅ Phase 1: `openai-codex` provider via `~/.codex/auth.json` (no re-login)
13
13
  - ✅ Phase 2: Local OpenAI-compatible proxy on `127.0.0.1:31337` (Gemini + Claude CLI)
14
14
  - ✅ Phase 3: 10 slash commands (`/cli-sonnet`, `/cli-opus`, `/cli-haiku`, `/cli-gemini`, `/cli-gemini-flash`, `/cli-gemini3`, `/cli-codex`, `/cli-codex-mini`, `/cli-back`, `/cli-test`)
package/README.md CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  > OpenClaw plugin that bridges locally installed AI CLIs (Codex, Gemini, Claude Code) as model providers — with slash commands for instant model switching, restore, and health testing.
4
4
 
5
- **Current version:** `0.2.16`
5
+ **Current version:** `0.2.17`
6
6
 
7
7
  ---
8
8
 
@@ -234,6 +234,9 @@ npm test # vitest run (5 unit tests for formatPrompt)
234
234
 
235
235
  ## Changelog
236
236
 
237
+ ### v0.2.17
238
+ - **fix:** `/cli-gemini3` model corrected to `gemini-3-pro-preview` (was `gemini-3-pro`, which returns 404 from Gemini API)
239
+
237
240
  ### v0.2.16
238
241
  - **feat(T-101):** Expand test suite to 45 tests — new cases for `formatPrompt` (mixed roles, boundary values, system messages) and `routeToCliRunner` (gemini paths, edge cases)
239
242
  - **feat(T-103):** Add `DEFAULT_ALLOWED_CLI_MODELS` allowlist; `routeToCliRunner` now rejects unregistered models by default; pass `allowedModels: null` to opt out
@@ -254,7 +257,7 @@ npm test # vitest run (5 unit tests for formatPrompt)
254
257
  - **docs:** Fix changelog continuity — v0.2.10 entry was lost, v0.2.11 description was wrong; all entries now accurate
255
258
 
256
259
  ### v0.2.11
257
- - **docs:** Fix README `Current version` header (was stuck at 0.2.9 after 0.2.10 bump)
260
+ - **docs:** Fix README `Current version:** `0.2.17`
258
261
 
259
262
  ### v0.2.10
260
263
  - **docs:** Fix version labels — SKILL.md was showing 0.2.2, README changelog ended at v0.2.5; add entries for v0.2.6–v0.2.9
package/SKILL.md CHANGED
@@ -53,4 +53,4 @@ Each command runs `openclaw models set <model>` atomically and replies with a co
53
53
 
54
54
  See `README.md` for full configuration reference and architecture diagram.
55
55
 
56
- **Version:** 0.2.16
56
+ **Version:** 0.2.17
package/index.ts CHANGED
@@ -14,7 +14,7 @@
14
14
  * /cli-haiku → vllm/cli-claude/claude-haiku-4-5 (Claude Code CLI proxy)
15
15
  * /cli-gemini → vllm/cli-gemini/gemini-2.5-pro (Gemini CLI proxy)
16
16
  * /cli-gemini-flash → vllm/cli-gemini/gemini-2.5-flash (Gemini CLI proxy)
17
- * /cli-gemini3 → vllm/cli-gemini/gemini-3-pro (Gemini CLI proxy)
17
+ * /cli-gemini3 → vllm/cli-gemini/gemini-3-pro-preview (Gemini CLI proxy)
18
18
  * /cli-codex → openai-codex/gpt-5.3-codex (Codex CLI OAuth, direct API)
19
19
  * /cli-codex-mini → openai-codex/gpt-5.1-codex-mini (Codex CLI OAuth, direct API)
20
20
  * /cli-back → restore model that was active before last /cli-* switch
@@ -152,7 +152,7 @@ const CLI_MODEL_COMMANDS = [
152
152
  },
153
153
  {
154
154
  name: "cli-gemini3",
155
- model: "vllm/cli-gemini/gemini-3-pro",
155
+ model: "vllm/cli-gemini/gemini-3-pro-preview",
156
156
  description: "Switch to Gemini 3 Pro (Gemini CLI via local proxy)",
157
157
  label: "Gemini 3 Pro (CLI)",
158
158
  },
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "id": "openclaw-cli-bridge-elvatis",
3
3
  "name": "OpenClaw CLI Bridge",
4
- "version": "0.2.16",
4
+ "version": "0.2.17",
5
5
  "description": "Phase 1: openai-codex auth bridge. Phase 2: local HTTP proxy routing model calls through gemini/claude CLIs (vllm provider).",
6
6
  "providers": [
7
7
  "openai-codex"
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elvatis_com/openclaw-cli-bridge-elvatis",
3
- "version": "0.2.16",
3
+ "version": "0.2.17",
4
4
  "description": "Bridges gemini, claude, and codex CLI tools as OpenClaw model providers. Reads existing CLI auth without re-login.",
5
5
  "type": "module",
6
6
  "openclaw": {
package/src/cli-runner.ts CHANGED
@@ -258,7 +258,7 @@ export const DEFAULT_ALLOWED_CLI_MODELS: ReadonlySet<string> = new Set([
258
258
  // Gemini CLI
259
259
  "cli-gemini/gemini-2.5-pro",
260
260
  "cli-gemini/gemini-2.5-flash",
261
- "cli-gemini/gemini-3-pro",
261
+ "cli-gemini/gemini-3-pro-preview",
262
262
  ]);
263
263
 
264
264
  // ──────────────────────────────────────────────────────────────────────────────
@@ -180,7 +180,7 @@ describe("routeToCliRunner — model allowlist (T-103)", () => {
180
180
  it("DEFAULT_ALLOWED_CLI_MODELS includes all registered gemini models", () => {
181
181
  expect(DEFAULT_ALLOWED_CLI_MODELS.has("cli-gemini/gemini-2.5-pro")).toBe(true);
182
182
  expect(DEFAULT_ALLOWED_CLI_MODELS.has("cli-gemini/gemini-2.5-flash")).toBe(true);
183
- expect(DEFAULT_ALLOWED_CLI_MODELS.has("cli-gemini/gemini-3-pro")).toBe(true);
183
+ expect(DEFAULT_ALLOWED_CLI_MODELS.has("cli-gemini/gemini-3-pro-preview")).toBe(true);
184
184
  });
185
185
 
186
186
  it("rejects a model not in the default allowlist", async () => {