bridgerapi 1.3.0 → 1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +153 -0
  2. package/dist/cli.js +271 -241
  3. package/package.json +1 -1
package/README.md ADDED
@@ -0,0 +1,153 @@
1
+ # bridgerapi
2
+
3
+ Turn any AI CLI into an OpenAI-compatible API — no API keys, no billing.
4
+
5
+ ```
6
+ npm install -g bridgerapi
7
+ bridgerapi
8
+ ```
9
+
10
+ ---
11
+
12
+ ## What it does
13
+
14
+ bridgerapi runs a local HTTP server that speaks the OpenAI API format (`/v1/chat/completions`, `/v1/models`, `/health`). Any app that supports OpenAI-compatible endpoints — Goose, Cursor, Continue, Open WebUI, and others — can point at it and use whichever AI CLI you have authenticated on your machine.
15
+
16
+ You keep using the free tier or the subscription you already pay for. No Anthropic API key, no OpenAI API key, no extra cost.
17
+
18
+ ---
19
+
20
+ ## Supported backends
21
+
22
+ | CLI | Install | Auth |
23
+ |-----|---------|------|
24
+ | **Claude Code** | [claude.ai/download](https://claude.ai/download) | `claude login` |
25
+ | **Gemini CLI** | `npm i -g @google/gemini-cli` | `gemini auth` |
26
+ | **Codex CLI** | `npm i -g @openai/codex` | `codex auth` |
27
+ | **GitHub Copilot** | `gh extension install github/gh-copilot` | `gh auth login` |
28
+ | **Droid (Factory.ai)** | [factory.ai](https://factory.ai) | `export FACTORY_API_KEY=fk-...` |
29
+
30
+ Install any one of these and bridgerapi will pick it up automatically.
31
+
32
+ ---
33
+
34
+ ## Usage
35
+
36
+ ### Interactive setup (recommended)
37
+
38
+ ```
39
+ bridgerapi
40
+ ```
41
+
42
+ Detects installed backends, asks for a port, and gives you the choice to run in the foreground or install as a background service that auto-starts on login.
43
+
44
+ ### Manual commands
45
+
46
+ ```
47
+ bridgerapi start # run in foreground (default port 8082)
48
+ bridgerapi start --port 9000 # custom port
49
+ bridgerapi install # install as background service
50
+ bridgerapi uninstall # remove background service
51
+ bridgerapi status # check if running
52
+ bridgerapi chat # interactive chat session in terminal
53
+ ```
54
+
55
+ ### Chat mode
56
+
57
+ ```
58
+ bridgerapi chat
59
+ ```
60
+
61
+ Opens an interactive REPL. Type messages, get streamed responses. Keeps conversation history across turns.
62
+
63
+ ---
64
+
65
+ ## Connecting an app
66
+
67
+ Once running, point any OpenAI-compatible app at:
68
+
69
+ | Setting | Value |
70
+ |---------|-------|
71
+ | **Base URL** | `http://127.0.0.1:8082/v1` |
72
+ | **API Key** | `local` (any non-empty string) |
73
+ | **Model** | any model your CLI supports |
74
+
75
+ ### Goose
76
+
77
+ ```yaml
78
+ # ~/.config/goose/config.yaml
79
+ GOOSE_PROVIDER: openai
80
+ GOOSE_MODEL: claude-sonnet-4-6
81
+ OPENAI_HOST: http://127.0.0.1:8082
82
+ ```
83
+
84
+ ### Continue (VS Code / JetBrains)
85
+
86
+ ```json
87
+ {
88
+ "models": [{
89
+ "title": "bridgerapi",
90
+ "provider": "openai",
91
+ "model": "claude-sonnet-4-6",
92
+ "apiBase": "http://127.0.0.1:8082/v1",
93
+ "apiKey": "local"
94
+ }]
95
+ }
96
+ ```
97
+
98
+ ### Open WebUI
99
+
100
+ Set **OpenAI API Base URL** to `http://127.0.0.1:8082/v1` and **API Key** to `local`.
101
+
102
+ ---
103
+
104
+ ## How it works
105
+
106
+ ```
107
+ Your app → POST /v1/chat/completions
108
+
109
+ bridgerapi (local HTTP server)
110
+
111
+ claude / gemini / codex / gh copilot (subprocess)
112
+
113
+ streamed response back to your app
114
+ ```
115
+
116
+ bridgerapi converts OpenAI message format to a plain prompt, spawns the appropriate CLI as a subprocess using your existing auth, and streams the output back as SSE — exactly what the OpenAI streaming format expects.
117
+
118
+ Model routing is automatic by prefix:
119
+
120
+ - `claude-*` → Claude Code CLI
121
+ - `gemini-*` → Gemini CLI
122
+ - `gpt-*`, `o3`, `o4` → Codex CLI
123
+ - `copilot` → GitHub Copilot CLI
124
+ - `glm-*`, `kimi-*`, `minimax-*`, `droid` → Droid CLI (Factory.ai)
125
+
126
+ If the requested backend isn't available, it falls back to the first one that is.
127
+
128
+ ---
129
+
130
+ ## Background service
131
+
132
+ On macOS, `bridgerapi install` creates a LaunchAgent that starts automatically on login and restarts if it crashes. Logs go to `~/.bridgerapi/server.log`.
133
+
134
+ On Linux, it creates a systemd user service (`systemctl --user`).
135
+
136
+ ```
137
+ bridgerapi install # installs and starts
138
+ bridgerapi status # check pid and port
139
+ bridgerapi uninstall # stops and removes
140
+ ```
141
+
142
+ ---
143
+
144
+ ## Requirements
145
+
146
+ - Node.js 18+
147
+ - At least one AI CLI installed and authenticated
148
+
149
+ ---
150
+
151
+ ## License
152
+
153
+ MIT — [teodorwaltervido](https://github.com/teodorwaltervido)
package/dist/cli.js CHANGED
@@ -59,7 +59,6 @@ function messagesToPrompt(messages) {
59
59
  var import_child_process = require("child_process");
60
60
  var import_fs = require("fs");
61
61
  var import_os = require("os");
62
- var import_https = require("https");
63
62
  var HOME = (0, import_os.homedir)();
64
63
  function which(cmd2) {
65
64
  try {
@@ -68,45 +67,18 @@ function which(cmd2) {
68
67
  return "";
69
68
  }
70
69
  }
71
- function httpsGetJson(url, headers) {
72
- return new Promise((resolve, reject) => {
73
- const req = (0, import_https.request)(url, { headers }, (res) => {
74
- const chunks = [];
75
- res.on("data", (c) => chunks.push(c));
76
- res.on("end", () => {
77
- try {
78
- resolve(JSON.parse(Buffer.concat(chunks).toString()));
79
- } catch (e) {
80
- reject(e);
81
- }
82
- });
83
- });
84
- req.on("error", reject);
85
- req.setTimeout(6e3, () => {
86
- req.destroy();
87
- reject(new Error("timeout"));
88
- });
89
- req.end();
90
- });
91
- }
92
- async function* spawnStream(cmd2, args, stdin, env) {
70
+ async function* spawnStream(cmd2, args, stdin) {
93
71
  const proc = (0, import_child_process.spawn)(cmd2, args, {
94
- env: env ?? process.env,
72
+ env: process.env,
95
73
  stdio: ["pipe", "pipe", "pipe"]
96
74
  });
97
- if (stdin) {
98
- proc.stdin.end(stdin);
99
- }
100
- for await (const chunk2 of proc.stdout) {
101
- yield chunk2;
102
- }
75
+ if (stdin) proc.stdin.end(stdin);
76
+ for await (const chunk2 of proc.stdout) yield chunk2;
103
77
  }
104
- var CLAUDE_FALLBACK = ["claude-opus-4-6", "claude-sonnet-4-6", "claude-haiku-4-5"];
105
78
  var ClaudeBackend = class {
106
79
  constructor() {
107
80
  this.name = "claude";
108
81
  this.prefixes = ["claude"];
109
- this.models = [...CLAUDE_FALLBACK];
110
82
  }
111
83
  get bin() {
112
84
  return process.env.CLAUDE_BIN ?? `${HOME}/.local/bin/claude`;
@@ -114,20 +86,6 @@ var ClaudeBackend = class {
114
86
  available() {
115
87
  return (0, import_fs.existsSync)(this.bin) || Boolean(which("claude"));
116
88
  }
117
- async fetchLiveModels() {
118
- const key = process.env.ANTHROPIC_API_KEY;
119
- if (!key) return [...CLAUDE_FALLBACK];
120
- try {
121
- const data = await httpsGetJson("https://api.anthropic.com/v1/models", {
122
- "x-api-key": key,
123
- "anthropic-version": "2023-06-01"
124
- });
125
- const ids = (data.data ?? []).map((m) => String(m.id)).filter((id) => id.startsWith("claude-"));
126
- return ids.length ? ids : [...CLAUDE_FALLBACK];
127
- } catch {
128
- return [...CLAUDE_FALLBACK];
129
- }
130
- }
131
89
  async runBlocking(prompt, model2) {
132
90
  const bin = which("claude") || this.bin;
133
91
  let out;
@@ -148,12 +106,10 @@ var ClaudeBackend = class {
148
106
  yield* spawnStream(bin, ["-p", "--output-format", "text", "--model", model2], prompt);
149
107
  }
150
108
  };
151
- var GEMINI_FALLBACK = ["gemini-2.5-pro", "gemini-2.5-flash", "gemini-2.0-flash", "gemini-1.5-pro"];
152
109
  var GeminiBackend = class {
153
110
  constructor() {
154
111
  this.name = "gemini";
155
112
  this.prefixes = ["gemini"];
156
- this.models = [...GEMINI_FALLBACK];
157
113
  }
158
114
  get bin() {
159
115
  return process.env.GEMINI_BIN ?? which("gemini") ?? "/opt/homebrew/bin/gemini";
@@ -161,61 +117,39 @@ var GeminiBackend = class {
161
117
  available() {
162
118
  return Boolean(which("gemini")) || (0, import_fs.existsSync)(this.bin);
163
119
  }
164
- async fetchLiveModels() {
165
- const key = process.env.GEMINI_API_KEY;
166
- if (!key) return [...GEMINI_FALLBACK];
167
- try {
168
- const data = await httpsGetJson(
169
- `https://generativelanguage.googleapis.com/v1beta/models?key=${key}&pageSize=50`,
170
- {}
171
- );
172
- const ids = (data.models ?? []).map((m) => String(m.name).replace("models/", "")).filter((id) => /^gemini-/.test(id) && !id.includes("embedding") && !id.includes("aqa"));
173
- return ids.length ? ids : [...GEMINI_FALLBACK];
174
- } catch {
175
- return [...GEMINI_FALLBACK];
176
- }
177
- }
178
120
  async runBlocking(prompt, model2) {
179
121
  const bin = which("gemini") || this.bin;
180
122
  let out;
181
123
  try {
182
- out = (0, import_child_process.execFileSync)(
183
- bin,
184
- ["--output-format", "json", "--model", model2, "--approval-mode", "yolo"],
185
- { input: prompt, encoding: "utf8", timeout: 3e5, env: process.env }
186
- );
124
+ out = (0, import_child_process.execFileSync)(bin, ["--output-format", "json", "--model", model2, "--approval-mode", "yolo"], {
125
+ input: prompt,
126
+ encoding: "utf8",
127
+ timeout: 3e5,
128
+ env: process.env
129
+ });
187
130
  } catch (e) {
188
131
  const err = e.stderr?.trim() ?? "";
189
- if (/auth|login|sign.?in/i.test(err)) {
132
+ if (/auth|login|sign.?in/i.test(err))
190
133
  throw new Error(`Gemini not authenticated. Run: gemini auth OR export GEMINI_API_KEY=<key>`);
191
- }
192
134
  throw new Error(err || `gemini exited non-zero`);
193
135
  }
194
136
  const raw = out.trim();
195
137
  try {
196
138
  const data = JSON.parse(raw);
197
- const text = String(data.response ?? data.result ?? data.text ?? raw);
198
- const usage = data.tokenCount ?? data.usage ?? null;
199
- return [text, usage];
139
+ return [String(data.response ?? data.result ?? data.text ?? raw), data.tokenCount ?? data.usage ?? null];
200
140
  } catch {
201
141
  return [raw, null];
202
142
  }
203
143
  }
204
144
  async *stream(prompt, model2) {
205
145
  const bin = which("gemini") || this.bin;
206
- yield* spawnStream(
207
- bin,
208
- ["--output-format", "text", "--model", model2, "--approval-mode", "yolo"],
209
- prompt
210
- );
146
+ yield* spawnStream(bin, ["--output-format", "text", "--model", model2, "--approval-mode", "yolo"], prompt);
211
147
  }
212
148
  };
213
- var CODEX_FALLBACK = ["o3", "o4-mini", "gpt-4.1", "gpt-4o", "gpt-4o-mini"];
214
149
  var CodexBackend = class {
215
150
  constructor() {
216
151
  this.name = "codex";
217
152
  this.prefixes = ["gpt", "o3", "o4", "o1"];
218
- this.models = [...CODEX_FALLBACK];
219
153
  }
220
154
  get bin() {
221
155
  return process.env.CODEX_BIN ?? which("codex") ?? "codex";
@@ -223,20 +157,6 @@ var CodexBackend = class {
223
157
  available() {
224
158
  return Boolean(which("codex"));
225
159
  }
226
- async fetchLiveModels() {
227
- const key = process.env.OPENAI_API_KEY;
228
- if (!key) return [...CODEX_FALLBACK];
229
- try {
230
- const data = await httpsGetJson("https://api.openai.com/v1/models", {
231
- Authorization: `Bearer ${key}`
232
- });
233
- const EXCLUDE = /instruct|audio|realtime|transcribe|tts|image|search|embed|diariz|whisper|babbage|davinci|curie|ada/i;
234
- const ids = (data.data ?? []).map((m) => String(m.id)).filter((id) => /^(gpt-[^i]|o[0-9])/.test(id) && !EXCLUDE.test(id)).sort();
235
- return ids.length ? ids : [...CODEX_FALLBACK];
236
- } catch {
237
- return [...CODEX_FALLBACK];
238
- }
239
- }
240
160
  async runBlocking(prompt, model2) {
241
161
  let out;
242
162
  try {
@@ -257,7 +177,6 @@ var CopilotBackend = class {
257
177
  constructor() {
258
178
  this.name = "copilot";
259
179
  this.prefixes = ["copilot", "github-copilot"];
260
- this.models = ["copilot", "github-copilot"];
261
180
  }
262
181
  get bin() {
263
182
  return process.env.GH_BIN ?? which("gh") ?? "gh";
@@ -271,9 +190,6 @@ var CopilotBackend = class {
271
190
  return false;
272
191
  }
273
192
  }
274
- async fetchLiveModels() {
275
- return this.models;
276
- }
277
193
  async runBlocking(prompt, model2) {
278
194
  let out;
279
195
  try {
@@ -290,32 +206,56 @@ var CopilotBackend = class {
290
206
  yield* spawnStream(this.bin, ["copilot", "suggest", "-t", "general", prompt]);
291
207
  }
292
208
  };
209
+ var DroidBackend = class {
210
+ constructor() {
211
+ this.name = "droid";
212
+ // Route Droid-exclusive model families + explicit "droid" prefix
213
+ this.prefixes = ["droid", "glm", "kimi", "minimax"];
214
+ }
215
+ get bin() {
216
+ return process.env.DROID_BIN ?? which("droid") ?? `${HOME}/.local/bin/droid`;
217
+ }
218
+ available() {
219
+ return (0, import_fs.existsSync)(this.bin) || Boolean(which("droid"));
220
+ }
221
+ async runBlocking(prompt, model2) {
222
+ const bin = which("droid") || this.bin;
223
+ let out;
224
+ try {
225
+ out = (0, import_child_process.execFileSync)(bin, ["exec", "--output-format", "text", "--model", model2, "-"], {
226
+ input: prompt,
227
+ encoding: "utf8",
228
+ timeout: 3e5
229
+ });
230
+ } catch (e) {
231
+ throw new Error(e.stderr?.trim() || `droid exited non-zero`);
232
+ }
233
+ return [out.trim(), null];
234
+ }
235
+ async *stream(prompt, model2) {
236
+ const bin = which("droid") || this.bin;
237
+ yield* spawnStream(bin, ["exec", "--output-format", "text", "--model", model2, "-"], prompt);
238
+ }
239
+ };
293
240
  var BACKENDS = [
294
241
  new ClaudeBackend(),
295
242
  new GeminiBackend(),
296
243
  new CodexBackend(),
297
- new CopilotBackend()
244
+ new CopilotBackend(),
245
+ new DroidBackend()
298
246
  ];
299
247
  function pickBackend(model2) {
248
+ const override = process.env.BRIDGERAPI_BACKEND?.toLowerCase();
249
+ if (override) {
250
+ const forced = BACKENDS.find((b) => b.name === override && b.available());
251
+ if (forced) return forced;
252
+ }
300
253
  const m = model2.toLowerCase();
301
254
  for (const b of BACKENDS) {
302
- if (b.prefixes.some((p) => m.startsWith(p))) {
303
- if (b.available()) return b;
304
- }
255
+ if (b.prefixes.some((p) => m.startsWith(p)) && b.available()) return b;
305
256
  }
306
257
  return BACKENDS.find((b) => b.available()) ?? BACKENDS[0];
307
258
  }
308
- function allModels() {
309
- return BACKENDS.filter((b) => b.available()).flatMap((b) => [...b.models]);
310
- }
311
- async function refreshModels() {
312
- const available = BACKENDS.filter((b) => b.available());
313
- await Promise.all(
314
- available.map(async (b) => {
315
- b.models = await b.fetchLiveModels();
316
- })
317
- );
318
- }
319
259
 
320
260
  // src/server.ts
321
261
  function sse(data) {
@@ -368,9 +308,15 @@ async function readBody(req) {
368
308
  }
369
309
  function handleModels(res) {
370
310
  const ts = Math.floor(Date.now() / 1e3);
311
+ const available = BACKENDS.filter((b) => b.available());
371
312
  sendJson(res, 200, {
372
313
  object: "list",
373
- data: allModels().map((id) => ({ id, object: "model", created: ts, owned_by: "bridge" }))
314
+ data: available.map((b) => ({
315
+ id: b.name,
316
+ object: "model",
317
+ created: ts,
318
+ owned_by: "bridgerapi"
319
+ }))
374
320
  });
375
321
  }
376
322
  function handleHealth(res, port2) {
@@ -394,10 +340,10 @@ async function handleChat(req, res) {
394
340
  const model2 = body.model ?? "claude-sonnet-4-6";
395
341
  const streaming = Boolean(body.stream);
396
342
  const prompt = messagesToPrompt(messages);
397
- const backend = pickBackend(model2);
343
+ const backend2 = pickBackend(model2);
398
344
  const id = `chatcmpl-${(0, import_crypto.randomUUID)().replace(/-/g, "").slice(0, 20)}`;
399
345
  const ts = Math.floor(Date.now() / 1e3);
400
- console.log(` ${backend.name} model=${model2} stream=${streaming} turns=${messages.length}`);
346
+ console.log(` ${backend2.name} model=${model2} stream=${streaming} turns=${messages.length}`);
401
347
  if (streaming) {
402
348
  cors(res, 200);
403
349
  res.setHeader("Content-Type", "text/event-stream");
@@ -406,7 +352,7 @@ async function handleChat(req, res) {
406
352
  res.flushHeaders();
407
353
  res.write(chunk(id, ts, model2, { role: "assistant" }));
408
354
  try {
409
- for await (const raw of backend.stream(prompt, model2)) {
355
+ for await (const raw of backend2.stream(prompt, model2)) {
410
356
  res.write(chunk(id, ts, model2, { content: raw.toString("utf8") }));
411
357
  }
412
358
  } catch (err) {
@@ -417,7 +363,7 @@ async function handleChat(req, res) {
417
363
  res.end();
418
364
  } else {
419
365
  try {
420
- const [text, usage] = await backend.runBlocking(prompt, model2);
366
+ const [text, usage] = await backend2.runBlocking(prompt, model2);
421
367
  sendJson(res, 200, completion(id, ts, model2, text, usage));
422
368
  } catch (err) {
423
369
  console.error(` error: ${err.message}`);
@@ -461,10 +407,13 @@ var LABEL = "com.bridgerapi.server";
461
407
  function plistPath() {
462
408
  return (0, import_path.join)(HOME2, "Library/LaunchAgents", `${LABEL}.plist`);
463
409
  }
464
- function writePlist(port2, scriptPath, nodePath) {
410
+ function writePlist(port2, scriptPath, nodePath, backend2) {
465
411
  const logDir = (0, import_path.join)(HOME2, ".bridgerapi");
466
412
  (0, import_fs2.mkdirSync)(logDir, { recursive: true });
467
413
  (0, import_fs2.mkdirSync)((0, import_path.join)(HOME2, "Library/LaunchAgents"), { recursive: true });
414
+ const backendEntry = backend2 ? `
415
+ <key>BRIDGERAPI_BACKEND</key>
416
+ <string>${backend2}</string>` : "";
468
417
  const plist = `<?xml version="1.0" encoding="UTF-8"?>
469
418
  <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"
470
419
  "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
@@ -489,7 +438,7 @@ function writePlist(port2, scriptPath, nodePath) {
489
438
  <key>HOME</key>
490
439
  <string>${HOME2}</string>
491
440
  <key>CUSTOM_OKBRIDGER_API_KEY</key>
492
- <string>local</string>
441
+ <string>local</string>${backendEntry}
493
442
  </dict>
494
443
 
495
444
  <key>StandardOutPath</key>
@@ -510,10 +459,12 @@ function unitPath() {
510
459
  const configHome = process.env.XDG_CONFIG_HOME ?? (0, import_path.join)(HOME2, ".config");
511
460
  return (0, import_path.join)(configHome, "systemd/user/bridgerapi.service");
512
461
  }
513
- function writeUnit(port2, scriptPath, nodePath) {
462
+ function writeUnit(port2, scriptPath, nodePath, backend2) {
514
463
  const logDir = (0, import_path.join)(HOME2, ".bridgerapi");
515
464
  (0, import_fs2.mkdirSync)(logDir, { recursive: true });
516
465
  (0, import_fs2.mkdirSync)((0, import_path.join)(HOME2, ".config/systemd/user"), { recursive: true });
466
+ const backendLine = backend2 ? `
467
+ Environment=BRIDGERAPI_BACKEND=${backend2}` : "";
517
468
  const unit = `[Unit]
518
469
  Description=bridgerapi \u2014 OpenAI-compatible bridge for AI CLIs
519
470
  After=network.target
@@ -524,7 +475,7 @@ ExecStart=${nodePath} ${scriptPath} start
524
475
  Environment=BRIDGERAPI_PORT=${port2}
525
476
  Environment=HOME=${HOME2}
526
477
  Environment=CUSTOM_OKBRIDGER_API_KEY=local
527
- Environment=PATH=${HOME2}/.local/bin:/usr/local/bin:/usr/bin:/bin
478
+ Environment=PATH=${HOME2}/.local/bin:/usr/local/bin:/usr/bin:/bin${backendLine}
528
479
  Restart=always
529
480
  StandardOutput=append:${logDir}/server.log
530
481
  StandardError=append:${logDir}/server.log
@@ -534,7 +485,7 @@ WantedBy=default.target
534
485
  `;
535
486
  (0, import_fs2.writeFileSync)(unitPath(), unit);
536
487
  }
537
- function installService(port2) {
488
+ function installService(port2, backend2) {
538
489
  const scriptPath = process.argv[1];
539
490
  const nodePath = process.execPath;
540
491
  const os = (0, import_os2.platform)();
@@ -543,11 +494,11 @@ function installService(port2) {
543
494
  (0, import_child_process2.execSync)(`launchctl unload "${plistPath()}" 2>/dev/null`, { stdio: "ignore" });
544
495
  } catch {
545
496
  }
546
- writePlist(port2, scriptPath, nodePath);
497
+ writePlist(port2, scriptPath, nodePath, backend2);
547
498
  (0, import_child_process2.execSync)(`launchctl load -w "${plistPath()}"`);
548
499
  console.log(`\u2713 LaunchAgent installed \u2192 ${plistPath()}`);
549
500
  } else if (os === "linux") {
550
- writeUnit(port2, scriptPath, nodePath);
501
+ writeUnit(port2, scriptPath, nodePath, backend2);
551
502
  try {
552
503
  (0, import_child_process2.execSync)("systemctl --user daemon-reload");
553
504
  } catch {
@@ -606,13 +557,40 @@ function serviceStatus() {
606
557
  return { running: false };
607
558
  }
608
559
 
609
- // src/cli.ts
560
+ // src/config.ts
610
561
  var import_fs3 = require("fs");
611
562
  var import_os3 = require("os");
612
563
  var import_path2 = require("path");
564
+ var CONFIG_DIR = (0, import_path2.join)((0, import_os3.homedir)(), ".bridgerapi");
565
+ var CONFIG_FILE = (0, import_path2.join)(CONFIG_DIR, "config.json");
566
+ function loadConfig() {
567
+ try {
568
+ if ((0, import_fs3.existsSync)(CONFIG_FILE)) {
569
+ return JSON.parse((0, import_fs3.readFileSync)(CONFIG_FILE, "utf8"));
570
+ }
571
+ } catch {
572
+ }
573
+ return {};
574
+ }
575
+ function saveConfig(cfg) {
576
+ (0, import_fs3.mkdirSync)(CONFIG_DIR, { recursive: true });
577
+ (0, import_fs3.writeFileSync)(CONFIG_FILE, JSON.stringify(cfg, null, 2) + "\n");
578
+ }
579
+
580
+ // src/cli.ts
581
+ var import_fs4 = require("fs");
582
+ var import_os4 = require("os");
583
+ var import_path3 = require("path");
613
584
  var import_readline = require("readline");
614
- var PORT = parseInt(process.env.BRIDGERAPI_PORT ?? "8082");
615
- var LOG_DIR = (0, import_path2.join)((0, import_os3.homedir)(), ".bridgerapi");
585
+ var DEFAULT_PORT = parseInt(process.env.BRIDGERAPI_PORT ?? "8082");
586
+ var LOG_DIR = (0, import_path3.join)((0, import_os4.homedir)(), ".bridgerapi");
587
+ var INSTALL_HINTS = {
588
+ claude: "claude login (Claude Code \u2014 claude.ai/download)",
589
+ gemini: "gemini auth (Gemini CLI \u2014 npm i -g @google/gemini-cli)",
590
+ codex: "codex auth (Codex CLI \u2014 npm i -g @openai/codex)",
591
+ copilot: "gh auth login (GitHub Copilot \u2014 gh extension install github/gh-copilot)",
592
+ droid: "export FACTORY_API_KEY=fk-... (Droid \u2014 factory.ai)"
593
+ };
616
594
  function ask(question) {
617
595
  const rl = (0, import_readline.createInterface)({ input: process.stdin, output: process.stdout });
618
596
  return new Promise((resolve) => {
@@ -622,96 +600,80 @@ function ask(question) {
622
600
  });
623
601
  });
624
602
  }
625
- var INSTALL_HINTS = {
626
- claude: "Install Claude Code: https://claude.ai/download \u2192 then sign in with: claude login",
627
- gemini: "Install Gemini CLI: npm install -g @google/gemini-cli \u2192 then: gemini auth",
628
- codex: "Install Codex CLI: npm install -g @openai/codex \u2192 then: codex auth",
629
- copilot: "Install Copilot: gh extension install github/gh-copilot \u2192 then: gh auth login"
630
- };
631
603
  async function cmdSetup() {
632
604
  console.log();
633
605
  console.log(" bridgerapi \u2014 OpenAI-compatible API bridge for AI CLI tools");
634
606
  console.log(" \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500");
635
607
  console.log();
636
- console.log(" Checking installed backends\u2026");
637
- await refreshModels();
638
- console.log();
639
608
  const available = BACKENDS.filter((b) => b.available());
640
- const missing = BACKENDS.filter((b) => !b.available());
609
+ console.log(" Backends detected:\n");
641
610
  for (const b of BACKENDS) {
642
611
  const ok = b.available();
643
- const icon = ok ? "\u2713" : "\u2717";
644
- const note = ok ? b.models.join(", ") : "not found";
645
- console.log(` ${icon} ${b.name.padEnd(10)} ${note}`);
612
+ console.log(` ${ok ? "\u2713" : "\u2717"} ${b.name}`);
613
+ if (!ok) console.log(` \u2192 ${INSTALL_HINTS[b.name]}`);
646
614
  }
647
615
  console.log();
648
616
  if (available.length === 0) {
649
- console.log(" No CLI backends found. Install at least one:\n");
650
- for (const b of missing) console.log(` ${INSTALL_HINTS[b.name]}`);
651
- console.log();
652
- console.log(" Then re-run: bridgerapi");
617
+ console.log(" No backends found. Install at least one CLI above, then re-run: bridgerapi");
653
618
  process.exit(1);
654
619
  }
655
- if (missing.length) {
656
- console.log(" Optional \u2014 to enable missing backends:");
657
- for (const b of missing) console.log(` ${INSTALL_HINTS[b.name]}`);
620
+ const cfg = loadConfig();
621
+ let chosenBackend;
622
+ if (available.length === 1) {
623
+ chosenBackend = available[0].name;
624
+ } else {
625
+ const names = available.map((b) => b.name);
626
+ const currentDefault = cfg.backend && names.includes(cfg.backend) ? cfg.backend : names[0];
627
+ const defaultIdx = names.indexOf(currentDefault);
628
+ console.log(" Which backend do you want to use as default?\n");
629
+ names.forEach((name, i) => {
630
+ const marker = i === defaultIdx ? " \u2190 default" : "";
631
+ console.log(` ${i + 1} ${name}${marker}`);
632
+ });
633
+ console.log();
634
+ const backendAnswer = await ask(` Choose [${defaultIdx + 1}]: `);
635
+ const parsed = parseInt(backendAnswer);
636
+ const backendIdx = backendAnswer && !isNaN(parsed) ? parsed - 1 : defaultIdx;
637
+ chosenBackend = names[Math.max(0, Math.min(backendIdx, names.length - 1))];
658
638
  console.log();
659
639
  }
660
- const portAnswer = await ask(` Port [${PORT}]: `);
661
- const port2 = portAnswer ? parseInt(portAnswer) || PORT : PORT;
640
+ const defaultPort = cfg.port ?? DEFAULT_PORT;
641
+ const portAnswer = await ask(` Port [${defaultPort}]: `);
642
+ const port2 = portAnswer ? parseInt(portAnswer) || defaultPort : defaultPort;
643
+ saveConfig({ backend: chosenBackend, port: port2 });
662
644
  console.log();
663
645
  console.log(" How do you want to run bridgerapi?");
664
- console.log(" 1 Start now in the foreground (stops when you close terminal)");
665
- console.log(" 2 Install as a background service (auto-starts on login)");
646
+ console.log(" 1 Foreground (stops when terminal closes)");
647
+ console.log(" 2 Background service (auto-starts on login)");
666
648
  console.log();
667
- const modeAnswer = await ask(" Choose [1/2]: ");
668
- const mode = modeAnswer === "2" ? "install" : "start";
649
+ const choice = await ask(" Choose [1/2]: ");
669
650
  console.log();
670
- if (mode === "install") {
671
- cmdInstall(port2);
651
+ if (choice === "2") {
652
+ cmdInstall(port2, chosenBackend);
672
653
  } else {
673
- cmdStart(port2);
654
+ cmdStart(port2, chosenBackend);
674
655
  }
675
656
  }
676
- function parseArgs() {
677
- const args = process.argv.slice(2);
678
- const cmd2 = args[0] ?? "";
679
- let port2 = PORT;
680
- let model2;
681
- for (let i = 1; i < args.length; i++) {
682
- if ((args[i] === "--port" || args[i] === "-p") && args[i + 1]) {
683
- port2 = parseInt(args[++i]);
684
- } else if ((args[i] === "--model" || args[i] === "-m") && args[i + 1]) {
685
- model2 = args[++i];
686
- }
687
- }
688
- return { cmd: cmd2, port: port2, model: model2 };
689
- }
690
- function cmdStart(port2) {
691
- (0, import_fs3.mkdirSync)(LOG_DIR, { recursive: true });
657
+ function cmdStart(port2, backend2) {
658
+ (0, import_fs4.mkdirSync)(LOG_DIR, { recursive: true });
659
+ const cfg = loadConfig();
660
+ const activeBackend = backend2 ?? cfg.backend;
661
+ if (activeBackend) process.env.BRIDGERAPI_BACKEND = activeBackend;
692
662
  const available = BACKENDS.filter((b) => b.available());
693
- const unavailable = BACKENDS.filter((b) => !b.available());
694
- console.log(` bridgerapi \u2192 http://127.0.0.1:${port2}`);
695
- console.log(` backends : ${available.map((b) => b.name).join(", ") || "none!"}`);
696
- if (unavailable.length) {
697
- console.log(` missing : ${unavailable.map((b) => b.name).join(", ")}`);
698
- }
699
- console.log(` logs : ${LOG_DIR}/server.log`);
700
- console.log();
701
663
  if (available.length === 0) {
702
- console.error(" Error: no CLI backends found. Run: bridgerapi to see setup instructions.");
664
+ console.error(" No CLI backends found. Run: bridgerapi to see setup instructions.");
703
665
  process.exit(1);
704
666
  }
705
667
  const server = createBridgeServer(port2);
706
668
  server.listen(port2, "127.0.0.1", () => {
707
- console.log(` GET /v1/models`);
708
- console.log(` POST /v1/chat/completions (streaming + blocking)`);
709
- console.log(` GET /health`);
669
+ console.log(` bridgerapi is running`);
710
670
  console.log();
711
- console.log(" OpenAI-compatible config:");
712
- console.log(` Base URL : http://127.0.0.1:${port2}/v1`);
713
- console.log(` API Key : local`);
714
- console.log(` Model : ${available[0].models[0]}`);
671
+ console.log(` Base URL : http://127.0.0.1:${port2}/v1`);
672
+ console.log(` API Key : local`);
673
+ console.log();
674
+ const backendLabel = activeBackend ? `${activeBackend} (all requests routed here)` : available.map((b) => b.name).join(", ") + " (auto-routed by model prefix)";
675
+ console.log(` Backend : ${backendLabel}`);
676
+ console.log(` Logs : ${LOG_DIR}/server.log`);
715
677
  console.log();
716
678
  console.log(" Ctrl+C to stop.");
717
679
  });
@@ -724,9 +686,11 @@ function cmdStart(port2) {
724
686
  process.exit(1);
725
687
  });
726
688
  }
727
- function cmdInstall(port2) {
689
+ function cmdInstall(port2, backend2) {
690
+ const cfg = loadConfig();
691
+ const activeBackend = backend2 ?? cfg.backend;
728
692
  try {
729
- installService(port2);
693
+ installService(port2, activeBackend);
730
694
  console.log();
731
695
  console.log(" Waiting for server to start\u2026");
732
696
  let attempts = 0;
@@ -738,12 +702,11 @@ function cmdInstall(port2) {
738
702
  if (res.statusCode === 200) {
739
703
  clearInterval(poll);
740
704
  console.log();
741
- console.log(` \u2713 bridgerapi is running on http://127.0.0.1:${port2}`);
705
+ console.log(` bridgerapi is running`);
742
706
  console.log();
743
- console.log(" OpenAI-compatible config:");
744
- console.log(` Base URL : http://127.0.0.1:${port2}/v1`);
745
- console.log(` API Key : local`);
746
- console.log(` Model : ${allModels()[0] ?? "claude-sonnet-4-6"}`);
707
+ console.log(` Base URL : http://127.0.0.1:${port2}/v1`);
708
+ console.log(` API Key : local`);
709
+ if (activeBackend) console.log(` Backend : ${activeBackend}`);
747
710
  console.log();
748
711
  console.log(` Logs : tail -f ${LOG_DIR}/server.log`);
749
712
  console.log(` Stop : bridgerapi uninstall`);
@@ -774,50 +737,99 @@ function cmdUninstall() {
774
737
  }
775
738
  }
776
739
  function cmdStatus(port2) {
740
+ const cfg = loadConfig();
777
741
  const { running, pid } = serviceStatus();
778
742
  if (running) {
779
- console.log(` \u2713 bridgerapi is running${pid ? ` (pid ${pid})` : ""} on port ${port2}`);
743
+ console.log(` bridgerapi is running${pid ? ` (pid ${pid})` : ""}`);
744
+ console.log(` Base URL : http://127.0.0.1:${port2}/v1`);
745
+ console.log(` API Key : local`);
746
+ if (cfg.backend) console.log(` Backend : ${cfg.backend}`);
780
747
  } else {
781
748
  console.log(" bridgerapi is not running.");
782
- console.log(" Run: bridgerapi \u2192 interactive setup");
783
- console.log(" Run: bridgerapi start \u2192 start in foreground");
784
- console.log(" Run: bridgerapi install \u2192 install background service");
749
+ console.log(" Run: bridgerapi \u2192 setup wizard");
750
+ console.log(" Run: bridgerapi start \u2192 start in foreground");
751
+ console.log(" Run: bridgerapi install \u2192 install background service");
752
+ }
753
+ }
754
+ function cmdConfig(args) {
755
+ const cfg = loadConfig();
756
+ if (args[0] === "set") {
757
+ for (const pair of args.slice(1)) {
758
+ const eqIdx = pair.indexOf("=");
759
+ if (eqIdx === -1) {
760
+ console.error(` Invalid format: ${pair} (use key=value)`);
761
+ process.exit(1);
762
+ }
763
+ const key = pair.slice(0, eqIdx);
764
+ const val = pair.slice(eqIdx + 1);
765
+ if (key === "backend") {
766
+ const known = BACKENDS.find((b) => b.name === val);
767
+ if (!known) {
768
+ console.error(` Unknown backend: ${val}`);
769
+ console.error(` Valid options: ${BACKENDS.map((b) => b.name).join(", ")}`);
770
+ process.exit(1);
771
+ }
772
+ cfg.backend = val;
773
+ console.log(` backend \u2192 ${val}`);
774
+ } else if (key === "port") {
775
+ const p = parseInt(val);
776
+ if (isNaN(p) || p < 1 || p > 65535) {
777
+ console.error(" Invalid port number");
778
+ process.exit(1);
779
+ }
780
+ cfg.port = p;
781
+ console.log(` port \u2192 ${p}`);
782
+ } else {
783
+ console.error(` Unknown key: ${key}`);
784
+ console.error(` Valid keys: backend, port`);
785
+ process.exit(1);
786
+ }
787
+ }
788
+ saveConfig(cfg);
789
+ console.log(" Saved.");
790
+ return;
785
791
  }
786
- }
787
- async function cmdBackends() {
788
- process.stdout.write("\n Fetching live model lists\u2026");
789
- await refreshModels();
790
- process.stdout.write(" done.\n\n CLI backends:\n\n");
791
- for (const b of BACKENDS) {
792
- const ok = b.available();
793
- const icon = ok ? "\u2713" : "\u2717";
794
- const models = ok ? b.models.join(", ") : "(not installed)";
795
- console.log(` ${icon} ${b.name.padEnd(10)} ${models}`);
796
- if (!ok) console.log(` ${INSTALL_HINTS[b.name]}`);
792
+ if (args[0] === "reset") {
793
+ saveConfig({});
794
+ console.log(" Config reset to defaults.");
795
+ return;
797
796
  }
798
797
  console.log();
799
- const available = BACKENDS.filter((b) => b.available());
800
- if (available.length) {
801
- console.log(` All available models:
802
- ${allModels().join(", ")}
803
- `);
804
- }
798
+ console.log(" bridgerapi config");
799
+ console.log(" \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500");
800
+ console.log(` backend : ${cfg.backend ?? "(auto \u2014 routed by model prefix)"}`);
801
+ console.log(` port : ${cfg.port ?? `${DEFAULT_PORT} (default)`}`);
802
+ console.log(` file : ${(0, import_path3.join)((0, import_os4.homedir)(), ".bridgerapi/config.json")}`);
803
+ console.log();
804
+ console.log(" To change:");
805
+ console.log(` bridgerapi config set backend=claude`);
806
+ console.log(` bridgerapi config set port=9000`);
807
+ console.log(` bridgerapi config reset`);
808
+ console.log();
805
809
  }
806
- async function cmdChat(model2) {
810
+ async function cmdChat(model2, backendFlag) {
811
+ const cfg = loadConfig();
812
+ const activeBackend = backendFlag ?? (model2 && BACKENDS.find((b) => b.name === model2?.toLowerCase())?.name) ?? cfg.backend;
813
+ if (activeBackend) process.env.BRIDGERAPI_BACKEND = activeBackend;
814
+ const resolvedModel = model2 && BACKENDS.find((b) => b.name === model2.toLowerCase()) ? void 0 : model2;
807
815
  const available = BACKENDS.filter((b) => b.available());
808
816
  if (available.length === 0) {
809
817
  console.error(" No backends found. Run: bridgerapi to see setup instructions.");
810
818
  process.exit(1);
811
819
  }
812
- const resolvedModel = model2 ?? available[0].models[0];
813
- const backend = pickBackend(resolvedModel);
820
+ const fallbackModel = `${activeBackend ?? available[0].name}-default`;
821
+ const backend2 = pickBackend(resolvedModel ?? fallbackModel);
814
822
  console.log();
815
- console.log(` bridgerapi chat \u2014 ${backend.name} \u2014 ${resolvedModel}`);
816
- console.log(" \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500");
817
- console.log(" Type your message and press Enter. Ctrl+C to exit.");
823
+ console.log(` bridgerapi chat \u2014 ${backend2.name}`);
824
+ console.log(" \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500");
825
+ console.log(" Type a message and press Enter. Ctrl+C to exit.");
818
826
  console.log();
819
827
  const history = [];
820
828
  const rl = (0, import_readline.createInterface)({ input: process.stdin, output: process.stdout });
829
+ rl.on("close", () => {
830
+ console.log("\n Goodbye.");
831
+ process.exit(0);
832
+ });
821
833
  const prompt = () => {
822
834
  rl.question("You: ", async (input) => {
823
835
  const text = input.trim();
@@ -829,8 +841,8 @@ async function cmdChat(model2) {
829
841
  process.stdout.write("\n");
830
842
  let reply = "";
831
843
  try {
832
- process.stdout.write(`${backend.name}: `);
833
- for await (const chunk2 of backend.stream(messagesToPrompt(history), resolvedModel)) {
844
+ process.stdout.write(`${backend2.name}: `);
845
+ for await (const chunk2 of backend2.stream(messagesToPrompt(history), resolvedModel ?? fallbackModel)) {
834
846
  const piece = chunk2.toString("utf8");
835
847
  process.stdout.write(piece);
836
848
  reply += piece;
@@ -844,10 +856,6 @@ async function cmdChat(model2) {
844
856
  prompt();
845
857
  });
846
858
  };
847
- rl.on("close", () => {
848
- console.log("\n Goodbye.");
849
- process.exit(0);
850
- });
851
859
  prompt();
852
860
  }
853
861
  function showHelp() {
@@ -856,34 +864,56 @@ function showHelp() {
856
864
 
857
865
  Usage:
858
866
  bridgerapi Interactive setup wizard
859
- bridgerapi chat [--model <name>] Interactive chat session in terminal
867
+ bridgerapi chat [--model m] Chat in the terminal (routes by model prefix)
868
+ bridgerapi chat --backend <name> Chat using a specific backend
860
869
  bridgerapi start [--port n] Start API server in the foreground
870
+ bridgerapi start --backend <name> Start forcing a specific backend for all requests
861
871
  bridgerapi install [--port n] Install as a background service
862
872
  bridgerapi uninstall Remove background service
863
873
  bridgerapi status Show service status
864
- bridgerapi backends List detected backends
874
+ bridgerapi config Show saved configuration
875
+ bridgerapi config set backend=<b> Set default backend (claude|gemini|codex|copilot|droid)
876
+ bridgerapi config set port=<n> Set default port
877
+ bridgerapi config reset Clear saved configuration
865
878
 
866
- Supported backends (auto-detected):
867
- claude-* \u2192 Claude Code CLI (claude login)
868
- gemini-* \u2192 Gemini CLI (gemini auth)
869
- gpt-*, o3 \u2192 Codex CLI (codex auth)
870
- copilot \u2192 GitHub Copilot (gh auth login)
879
+ Backends: claude, gemini, codex, copilot, droid
871
880
  `.trim());
872
881
  }
873
- var { cmd, port, model } = parseArgs();
882
+ function parseArgs() {
883
+ const cfg = loadConfig();
884
+ const args = process.argv.slice(2);
885
+ const cmd2 = args[0] ?? "";
886
+ let port2 = cfg.port ?? DEFAULT_PORT;
887
+ let model2;
888
+ let backend2;
889
+ const rest2 = [];
890
+ for (let i = 1; i < args.length; i++) {
891
+ if ((args[i] === "--port" || args[i] === "-p") && args[i + 1]) {
892
+ port2 = parseInt(args[++i]);
893
+ } else if ((args[i] === "--model" || args[i] === "-m") && args[i + 1]) {
894
+ model2 = args[++i];
895
+ } else if ((args[i] === "--backend" || args[i] === "-b") && args[i + 1]) {
896
+ backend2 = args[++i];
897
+ } else {
898
+ rest2.push(args[i]);
899
+ }
900
+ }
901
+ return { cmd: cmd2, port: port2, model: model2, backend: backend2, rest: rest2 };
902
+ }
903
+ var { cmd, port, model, backend, rest } = parseArgs();
874
904
  switch (cmd) {
875
905
  case "":
876
906
  case "setup":
877
907
  cmdSetup();
878
908
  break;
879
909
  case "chat":
880
- cmdChat(model);
910
+ cmdChat(model, backend);
881
911
  break;
882
912
  case "start":
883
- cmdStart(port);
913
+ cmdStart(port, backend);
884
914
  break;
885
915
  case "install":
886
- cmdInstall(port);
916
+ cmdInstall(port, backend);
887
917
  break;
888
918
  case "uninstall":
889
919
  cmdUninstall();
@@ -891,8 +921,8 @@ switch (cmd) {
891
921
  case "status":
892
922
  cmdStatus(port);
893
923
  break;
894
- case "backends":
895
- cmdBackends().catch(console.error);
924
+ case "config":
925
+ cmdConfig(rest);
896
926
  break;
897
927
  case "help":
898
928
  case "--help":
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bridgerapi",
3
- "version": "1.3.0",
3
+ "version": "1.6.0",
4
4
  "description": "Turn any AI CLI (Claude Code, Gemini, Codex, GitHub Copilot) into an OpenAI-compatible API — no API keys needed",
5
5
  "keywords": [
6
6
  "claude",