@elvatis_com/openclaw-cli-bridge-elvatis 1.8.0 → 1.8.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -1
- package/SKILL.md +1 -1
- package/index.ts +2 -2
- package/openclaw.plugin.json +1 -1
- package/package.json +1 -1
- package/src/proxy-server.ts +1 -1
- package/test/bitnet-proxy.test.ts +6 -5
package/README.md
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
> OpenClaw plugin that bridges locally installed AI CLIs (Codex, Gemini, Claude Code) as model providers — with slash commands for instant model switching, restore, health testing, and model listing.
|
|
4
4
|
|
|
5
|
-
**Current version:** `1.8.
|
|
5
|
+
**Current version:** `1.8.2`
|
|
6
6
|
|
|
7
7
|
---
|
|
8
8
|
|
|
@@ -368,6 +368,12 @@ npm test # vitest run (83 tests)
|
|
|
368
368
|
|
|
369
369
|
## Changelog
|
|
370
370
|
|
|
371
|
+
### v1.8.2
|
|
372
|
+
- **fix:** `local-bitnet/*` exempt from tool-call rejection — llama-server ignores tool schemas silently. OpenClaw always sends tools with every request, so this was blocking all BitNet usage.
|
|
373
|
+
|
|
374
|
+
### v1.8.1
|
|
375
|
+
- **fix:** `--now` flag now works when followed by additional text (e.g. `/cli-bitnet --now hello`) — was using `===` instead of `startsWith`.
|
|
376
|
+
|
|
371
377
|
### v1.8.0
|
|
372
378
|
- **feat:** BitNet local inference — `local-bitnet/bitnet-2b` routes to llama-server on 127.0.0.1:8082. No API key, no internet, pure CPU inference (2.87 tok/s on i7-6700K). Use `/cli-bitnet` to switch.
|
|
373
379
|
- **feat:** `/bridge-status` shows BitNet server health as 5th provider.
|
package/SKILL.md
CHANGED
package/index.ts
CHANGED
|
@@ -939,7 +939,7 @@ function proxyTestRequest(
|
|
|
939
939
|
const plugin = {
|
|
940
940
|
id: "openclaw-cli-bridge-elvatis",
|
|
941
941
|
name: "OpenClaw CLI Bridge",
|
|
942
|
-
version: "1.8.
|
|
942
|
+
version: "1.8.2",
|
|
943
943
|
description:
|
|
944
944
|
"Phase 1: openai-codex auth bridge. " +
|
|
945
945
|
"Phase 2: HTTP proxy for gemini/claude CLIs. " +
|
|
@@ -1408,7 +1408,7 @@ const plugin = {
|
|
|
1408
1408
|
acceptsArgs: true,
|
|
1409
1409
|
requireAuth: false,
|
|
1410
1410
|
handler: async (ctx: PluginCommandContext): Promise<PluginCommandResult> => {
|
|
1411
|
-
const forceNow = (ctx.args ?? "").trim().toLowerCase()
|
|
1411
|
+
const forceNow = (ctx.args ?? "").trim().toLowerCase().startsWith("--now");
|
|
1412
1412
|
api.logger.info(`[cli-bridge] /${name} by ${ctx.senderId ?? "?"} forceNow=${forceNow}`);
|
|
1413
1413
|
return switchModel(api, model, label, forceNow);
|
|
1414
1414
|
},
|
package/openclaw.plugin.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"id": "openclaw-cli-bridge-elvatis",
|
|
3
3
|
"name": "OpenClaw CLI Bridge",
|
|
4
|
-
"version": "1.8.
|
|
4
|
+
"version": "1.8.2",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"description": "Phase 1: openai-codex auth bridge. Phase 2: local HTTP proxy routing model calls through gemini/claude CLIs (vllm provider).",
|
|
7
7
|
"providers": [
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@elvatis_com/openclaw-cli-bridge-elvatis",
|
|
3
|
-
"version": "1.8.
|
|
3
|
+
"version": "1.8.2",
|
|
4
4
|
"description": "Bridges gemini, claude, and codex CLI tools as OpenClaw model providers. Reads existing CLI auth without re-login.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"openclaw": {
|
package/src/proxy-server.ts
CHANGED
|
@@ -341,7 +341,7 @@ async function handleRequest(
|
|
|
341
341
|
// CLI-proxy models (cli-gemini/*, cli-claude/*) are plain text completions —
|
|
342
342
|
// they cannot process tool/function call schemas. Return a clear 400 so
|
|
343
343
|
// OpenClaw can surface a meaningful error instead of getting a garbled response.
|
|
344
|
-
const isCliModel = model.startsWith("cli-gemini/") || model.startsWith("cli-claude/")
|
|
344
|
+
const isCliModel = model.startsWith("cli-gemini/") || model.startsWith("cli-claude/"); // local-bitnet/* exempt: llama-server silently ignores tools
|
|
345
345
|
if (hasTools && isCliModel) {
|
|
346
346
|
res.writeHead(400, { "Content-Type": "application/json" });
|
|
347
347
|
res.end(JSON.stringify({
|
|
@@ -178,8 +178,10 @@ describe("POST /v1/chat/completions — BitNet routing", () => {
|
|
|
178
178
|
expect(b.choices[0].finish_reason).toBe("stop");
|
|
179
179
|
});
|
|
180
180
|
|
|
181
|
-
it("
|
|
182
|
-
|
|
181
|
+
it("accepts tool calls (llama-server ignores tools silently)", async () => {
|
|
182
|
+
// local-bitnet/* is exempt from tool rejection — llama-server ignores tool schemas
|
|
183
|
+
// and responds normally. OpenClaw always sends tools with every request.
|
|
184
|
+
const { status } = await httpPost(
|
|
183
185
|
`${urlWith}/v1/chat/completions`,
|
|
184
186
|
{
|
|
185
187
|
model: "local-bitnet/bitnet-2b",
|
|
@@ -188,8 +190,7 @@ describe("POST /v1/chat/completions — BitNet routing", () => {
|
|
|
188
190
|
},
|
|
189
191
|
auth
|
|
190
192
|
);
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
expect(b.error.code).toBe("tools_not_supported");
|
|
193
|
+
// Should NOT return 400 tools_not_supported — reaches BitNet routing (503 = server not running in test)
|
|
194
|
+
expect(status).not.toBe(400);
|
|
194
195
|
});
|
|
195
196
|
});
|