bridgerapi 1.2.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli.js +88 -7
  2. package/package.json +1 -1
package/dist/cli.js CHANGED
@@ -59,6 +59,7 @@ function messagesToPrompt(messages) {
59
59
  var import_child_process = require("child_process");
60
60
  var import_fs = require("fs");
61
61
  var import_os = require("os");
62
+ var import_https = require("https");
62
63
  var HOME = (0, import_os.homedir)();
63
64
  function which(cmd2) {
64
65
  try {
@@ -67,6 +68,27 @@ function which(cmd2) {
67
68
  return "";
68
69
  }
69
70
  }
71
+ function httpsGetJson(url, headers) {
72
+ return new Promise((resolve, reject) => {
73
+ const req = (0, import_https.request)(url, { headers }, (res) => {
74
+ const chunks = [];
75
+ res.on("data", (c) => chunks.push(c));
76
+ res.on("end", () => {
77
+ try {
78
+ resolve(JSON.parse(Buffer.concat(chunks).toString()));
79
+ } catch (e) {
80
+ reject(e);
81
+ }
82
+ });
83
+ });
84
+ req.on("error", reject);
85
+ req.setTimeout(6e3, () => {
86
+ req.destroy();
87
+ reject(new Error("timeout"));
88
+ });
89
+ req.end();
90
+ });
91
+ }
70
92
  async function* spawnStream(cmd2, args, stdin, env) {
71
93
  const proc = (0, import_child_process.spawn)(cmd2, args, {
72
94
  env: env ?? process.env,
@@ -79,11 +101,12 @@ async function* spawnStream(cmd2, args, stdin, env) {
79
101
  yield chunk2;
80
102
  }
81
103
  }
104
+ var CLAUDE_FALLBACK = ["claude-opus-4-6", "claude-sonnet-4-6", "claude-haiku-4-5"];
82
105
  var ClaudeBackend = class {
83
106
  constructor() {
84
107
  this.name = "claude";
85
- this.models = ["claude-opus-4-6", "claude-sonnet-4-6", "claude-haiku-4-5"];
86
108
  this.prefixes = ["claude"];
109
+ this.models = [...CLAUDE_FALLBACK];
87
110
  }
88
111
  get bin() {
89
112
  return process.env.CLAUDE_BIN ?? `${HOME}/.local/bin/claude`;
@@ -91,6 +114,20 @@ var ClaudeBackend = class {
91
114
  available() {
92
115
  return (0, import_fs.existsSync)(this.bin) || Boolean(which("claude"));
93
116
  }
117
+ async fetchLiveModels() {
118
+ const key = process.env.ANTHROPIC_API_KEY;
119
+ if (!key) return [...CLAUDE_FALLBACK];
120
+ try {
121
+ const data = await httpsGetJson("https://api.anthropic.com/v1/models", {
122
+ "x-api-key": key,
123
+ "anthropic-version": "2023-06-01"
124
+ });
125
+ const ids = (data.data ?? []).map((m) => String(m.id)).filter((id) => id.startsWith("claude-"));
126
+ return ids.length ? ids : [...CLAUDE_FALLBACK];
127
+ } catch {
128
+ return [...CLAUDE_FALLBACK];
129
+ }
130
+ }
94
131
  async runBlocking(prompt, model2) {
95
132
  const bin = which("claude") || this.bin;
96
133
  let out;
@@ -111,11 +148,12 @@ var ClaudeBackend = class {
111
148
  yield* spawnStream(bin, ["-p", "--output-format", "text", "--model", model2], prompt);
112
149
  }
113
150
  };
151
+ var GEMINI_FALLBACK = ["gemini-2.5-pro", "gemini-2.5-flash", "gemini-2.0-flash", "gemini-1.5-pro"];
114
152
  var GeminiBackend = class {
115
153
  constructor() {
116
154
  this.name = "gemini";
117
- this.models = ["gemini-2.5-pro", "gemini-2.5-flash", "gemini-2.0-flash", "gemini-1.5-pro"];
118
155
  this.prefixes = ["gemini"];
156
+ this.models = [...GEMINI_FALLBACK];
119
157
  }
120
158
  get bin() {
121
159
  return process.env.GEMINI_BIN ?? which("gemini") ?? "/opt/homebrew/bin/gemini";
@@ -123,6 +161,20 @@ var GeminiBackend = class {
123
161
  available() {
124
162
  return Boolean(which("gemini")) || (0, import_fs.existsSync)(this.bin);
125
163
  }
164
+ async fetchLiveModels() {
165
+ const key = process.env.GEMINI_API_KEY;
166
+ if (!key) return [...GEMINI_FALLBACK];
167
+ try {
168
+ const data = await httpsGetJson(
169
+ `https://generativelanguage.googleapis.com/v1beta/models?key=${key}&pageSize=50`,
170
+ {}
171
+ );
172
+ const ids = (data.models ?? []).map((m) => String(m.name).replace("models/", "")).filter((id) => /^gemini-/.test(id) && !id.includes("embedding") && !id.includes("aqa"));
173
+ return ids.length ? ids : [...GEMINI_FALLBACK];
174
+ } catch {
175
+ return [...GEMINI_FALLBACK];
176
+ }
177
+ }
126
178
  async runBlocking(prompt, model2) {
127
179
  const bin = which("gemini") || this.bin;
128
180
  let out;
@@ -158,11 +210,12 @@ var GeminiBackend = class {
158
210
  );
159
211
  }
160
212
  };
213
+ var CODEX_FALLBACK = ["o3", "o4-mini", "gpt-4.1", "gpt-4o", "gpt-4o-mini"];
161
214
  var CodexBackend = class {
162
215
  constructor() {
163
216
  this.name = "codex";
164
- this.models = ["o3", "o4-mini", "gpt-4.1", "gpt-4o"];
165
217
  this.prefixes = ["gpt", "o3", "o4", "o1"];
218
+ this.models = [...CODEX_FALLBACK];
166
219
  }
167
220
  get bin() {
168
221
  return process.env.CODEX_BIN ?? which("codex") ?? "codex";
@@ -170,6 +223,20 @@ var CodexBackend = class {
170
223
  available() {
171
224
  return Boolean(which("codex"));
172
225
  }
226
+ async fetchLiveModels() {
227
+ const key = process.env.OPENAI_API_KEY;
228
+ if (!key) return [...CODEX_FALLBACK];
229
+ try {
230
+ const data = await httpsGetJson("https://api.openai.com/v1/models", {
231
+ Authorization: `Bearer ${key}`
232
+ });
233
+ const EXCLUDE = /instruct|audio|realtime|transcribe|tts|image|search|embed|diariz|whisper|babbage|davinci|curie|ada/i;
234
+ const ids = (data.data ?? []).map((m) => String(m.id)).filter((id) => /^(gpt-[^i]|o[0-9])/.test(id) && !EXCLUDE.test(id)).sort();
235
+ return ids.length ? ids : [...CODEX_FALLBACK];
236
+ } catch {
237
+ return [...CODEX_FALLBACK];
238
+ }
239
+ }
173
240
  async runBlocking(prompt, model2) {
174
241
  let out;
175
242
  try {
@@ -189,8 +256,8 @@ var CodexBackend = class {
189
256
  var CopilotBackend = class {
190
257
  constructor() {
191
258
  this.name = "copilot";
192
- this.models = ["copilot", "github-copilot"];
193
259
  this.prefixes = ["copilot", "github-copilot"];
260
+ this.models = ["copilot", "github-copilot"];
194
261
  }
195
262
  get bin() {
196
263
  return process.env.GH_BIN ?? which("gh") ?? "gh";
@@ -204,6 +271,9 @@ var CopilotBackend = class {
204
271
  return false;
205
272
  }
206
273
  }
274
+ async fetchLiveModels() {
275
+ return this.models;
276
+ }
207
277
  async runBlocking(prompt, model2) {
208
278
  let out;
209
279
  try {
@@ -238,6 +308,14 @@ function pickBackend(model2) {
238
308
  function allModels() {
239
309
  return BACKENDS.filter((b) => b.available()).flatMap((b) => [...b.models]);
240
310
  }
311
+ async function refreshModels() {
312
+ const available = BACKENDS.filter((b) => b.available());
313
+ await Promise.all(
314
+ available.map(async (b) => {
315
+ b.models = await b.fetchLiveModels();
316
+ })
317
+ );
318
+ }
241
319
 
242
320
  // src/server.ts
243
321
  function sse(data) {
@@ -556,6 +634,7 @@ async function cmdSetup() {
556
634
  console.log(" \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500");
557
635
  console.log();
558
636
  console.log(" Checking installed backends\u2026");
637
+ await refreshModels();
559
638
  console.log();
560
639
  const available = BACKENDS.filter((b) => b.available());
561
640
  const missing = BACKENDS.filter((b) => !b.available());
@@ -705,8 +784,10 @@ function cmdStatus(port2) {
705
784
  console.log(" Run: bridgerapi install \u2192 install background service");
706
785
  }
707
786
  }
708
- function cmdBackends() {
709
- console.log("\n CLI backends:\n");
787
+ async function cmdBackends() {
788
+ process.stdout.write("\n Fetching live model lists\u2026");
789
+ await refreshModels();
790
+ process.stdout.write(" done.\n\n CLI backends:\n\n");
710
791
  for (const b of BACKENDS) {
711
792
  const ok = b.available();
712
793
  const icon = ok ? "\u2713" : "\u2717";
@@ -811,7 +892,7 @@ switch (cmd) {
811
892
  cmdStatus(port);
812
893
  break;
813
894
  case "backends":
814
- cmdBackends();
895
+ cmdBackends().catch(console.error);
815
896
  break;
816
897
  case "help":
817
898
  case "--help":
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bridgerapi",
3
- "version": "1.2.0",
3
+ "version": "1.3.0",
4
4
  "description": "Turn any AI CLI (Claude Code, Gemini, Codex, GitHub Copilot) into an OpenAI-compatible API — no API keys needed",
5
5
  "keywords": [
6
6
  "claude",