@elvatis_com/openclaw-cli-bridge-elvatis 2.6.1 → 2.6.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  > OpenClaw plugin that bridges locally installed AI CLIs (Codex, Gemini, Claude Code, OpenCode, Pi) as model providers — with slash commands for instant model switching, restore, health testing, and model listing.
4
4
 
5
- **Current version:** `2.6.1`
5
+ **Current version:** `2.6.3`
6
6
 
7
7
  ---
8
8
 
@@ -406,6 +406,14 @@ npm run ci # lint + typecheck + test
406
406
 
407
407
  ## Changelog
408
408
 
409
+ ### v2.6.3
410
+ - **security:** Bump `vite` 8.0.2 → 8.0.5 — fixes 3 CVEs: `server.fs.deny` bypass via query strings, arbitrary file read via WebSocket, path traversal in optimized deps `.map` handling (merged Dependabot PR #18)
411
+
412
+ ### v2.6.2
413
+ - **fix:** Codex CLI `--quiet` flag removed in latest Codex version — replaced with `codex exec` subcommand for non-interactive execution. All `openai-codex/*` models were failing with "unexpected argument '--quiet'" error.
414
+ - **fix:** Agent model routing — 10 agents referenced non-existent `google-gemini-cli` provider. Remapped to `vllm/cli-gemini/*` (OAuth-based, stable) for reliable Gemini access.
415
+ - **fix:** Main agent fallback `openai-codex/gpt-5.1` routed via direct OpenAI API (broken OAuth scopes) — now routes through CLI bridge (`vllm/openai-codex/gpt-5.1`)
416
+
409
417
  ### v2.6.1
410
418
  - **fix:** Root cause of Exit 143 / 408 timeouts identified — OpenClaw's `agents.defaults.llm.idleTimeoutSeconds` defaults to 60s, which is too short for CLI subprocesses that need time before producing the first token
411
419
  - **feat:** Startup warning when `idleTimeoutSeconds` is not set or < 120s — tells you exactly what to add to `openclaw.json`
package/SKILL.md CHANGED
@@ -68,4 +68,4 @@ On gateway restart, if any session has expired, a **WhatsApp alert** is sent aut
68
68
 
69
69
  See `README.md` for full configuration reference and architecture diagram.
70
70
 
71
- **Version:** 2.6.1
71
+ **Version:** 2.6.3
@@ -2,7 +2,7 @@
2
2
  "id": "openclaw-cli-bridge-elvatis",
3
3
  "slug": "openclaw-cli-bridge-elvatis",
4
4
  "name": "OpenClaw CLI Bridge",
5
- "version": "2.6.1",
5
+ "version": "2.6.3",
6
6
  "license": "MIT",
7
7
  "description": "Phase 1: openai-codex auth bridge. Phase 2: local HTTP proxy routing model calls through gemini/claude CLIs (vllm provider).",
8
8
  "providers": [
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elvatis_com/openclaw-cli-bridge-elvatis",
3
- "version": "2.6.1",
3
+ "version": "2.6.3",
4
4
  "description": "Bridges gemini, claude, and codex CLI tools as OpenClaw model providers. Reads existing CLI auth without re-login.",
5
5
  "type": "module",
6
6
  "openclaw": {
package/src/cli-runner.ts CHANGED
@@ -592,7 +592,7 @@ export async function runCodex(
592
592
  opts?: { tools?: ToolDefinition[]; mediaFiles?: MediaFile[]; log?: (msg: string) => void }
593
593
  ): Promise<string> {
594
594
  const model = stripPrefix(modelId);
595
- const args = ["--model", model, "--quiet", "--full-auto"];
595
+ const args = ["exec", "--model", model, "--full-auto"];
596
596
 
597
597
  // Codex supports native image input via -i flag
598
598
  if (opts?.mediaFiles?.length) {
@@ -332,7 +332,7 @@ export class SessionManager {
332
332
  }
333
333
  return {
334
334
  cmd: "codex",
335
- args: ["--model", modelName, "--quiet", "--full-auto"],
335
+ args: ["exec", "--model", modelName, "--full-auto"],
336
336
  cwd,
337
337
  useStdin: true,
338
338
  };
@@ -87,7 +87,7 @@ describe("runCodex()", () => {
87
87
  expect(result).toBe("codex result");
88
88
  expect(mockSpawn).toHaveBeenCalledWith(
89
89
  "codex",
90
- ["--model", "gpt-5.3-codex", "--quiet", "--full-auto"],
90
+ ["exec", "--model", "gpt-5.3-codex", "--full-auto"],
91
91
  expect.any(Object)
92
92
  );
93
93
  });