jbai-cli 1.6.0 → 1.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,857 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * jbai-proxy - Local reverse proxy for JetBrains AI Platform (Grazie)
5
+ *
6
+ * Runs a local HTTP server that transparently proxies OpenAI / Anthropic / Google
7
+ * API calls to Grazie, injecting the JWT authentication header automatically.
8
+ *
9
+ * This allows ANY tool that supports custom base URLs (Codex Desktop, Cursor,
10
+ * Continue, etc.) to work through JetBrains AI Platform without per-tool wrappers.
11
+ *
12
+ * Routes:
13
+ * /openai/v1/* → Grazie OpenAI endpoint (explicit)
14
+ * /anthropic/v1/* → Grazie Anthropic endpoint (explicit)
15
+ * /google/v1/* → Grazie Google endpoint (explicit)
16
+ *
17
+ * /v1/chat/completions → OpenAI (auto-detect)
18
+ * /v1/completions → OpenAI (auto-detect)
19
+ * /v1/responses → OpenAI (auto-detect)
20
+ * /v1/embeddings → OpenAI (auto-detect)
21
+ * /v1/models → synthetic model list
22
+ * /v1/messages → Anthropic (auto-detect)
23
+ *
24
+ * Usage:
25
+ * jbai proxy # start on default port 18080
26
+ * jbai proxy --port 9090 # custom port
27
+ * jbai proxy --daemon # run in background
28
+ * jbai proxy stop # stop background daemon
29
+ * jbai proxy status # check if running
30
+ */
31
+
32
+ const http = require('http');
33
+ const https = require('https');
34
+ const fs = require('fs');
35
+ const path = require('path');
36
+ const os = require('os');
37
+ const config = require('../lib/config');
38
+
39
+ const DEFAULT_PORT = 18080;
40
+ const PID_FILE = path.join(config.CONFIG_DIR, 'proxy.pid');
41
+ const LOG_FILE = path.join(config.CONFIG_DIR, 'proxy.log');
42
+
43
+ // ---------------------------------------------------------------------------
44
+ // Token management - re-reads from disk on each request for hot-refresh
45
+ // ---------------------------------------------------------------------------
46
+ let cachedToken = null;
47
+ let tokenMtime = 0;
48
+
49
+ function getToken() {
50
+ try {
51
+ const stat = fs.statSync(config.TOKEN_FILE);
52
+ if (stat.mtimeMs !== tokenMtime) {
53
+ cachedToken = fs.readFileSync(config.TOKEN_FILE, 'utf-8').trim();
54
+ tokenMtime = stat.mtimeMs;
55
+ }
56
+ } catch {
57
+ cachedToken = null;
58
+ }
59
+ return cachedToken;
60
+ }
61
+
62
+ // ---------------------------------------------------------------------------
63
+ // Route resolution
64
+ // ---------------------------------------------------------------------------
65
+
66
+ function resolveRoute(method, urlPath) {
67
+ const endpoints = config.getEndpoints();
68
+
69
+ // Explicit provider prefix routes
70
+ if (urlPath.startsWith('/openai/')) {
71
+ const rest = urlPath.slice('/openai'.length); // keeps /v1/...
72
+ return { target: endpoints.openai.replace(/\/v1$/, '') + rest, provider: 'openai' };
73
+ }
74
+ if (urlPath.startsWith('/anthropic/')) {
75
+ const rest = urlPath.slice('/anthropic'.length);
76
+ return { target: endpoints.anthropic.replace(/\/v1$/, '') + rest, provider: 'anthropic' };
77
+ }
78
+ if (urlPath.startsWith('/google/')) {
79
+ const rest = urlPath.slice('/google'.length);
80
+ return { target: endpoints.google + rest, provider: 'google' };
81
+ }
82
+
83
+ // Auto-detect routes based on standard SDK paths
84
+ // Anthropic SDK always calls /v1/messages
85
+ if (urlPath.startsWith('/v1/messages')) {
86
+ const rest = urlPath.slice('/v1'.length);
87
+ return { target: endpoints.anthropic + rest, provider: 'anthropic' };
88
+ }
89
+
90
+ // Everything else under /v1/* goes to OpenAI
91
+ if (urlPath.startsWith('/v1/')) {
92
+ // Special: /v1/models returns synthetic list
93
+ if (urlPath === '/v1/models') {
94
+ return { target: null, provider: 'models' };
95
+ }
96
+ const rest = urlPath.slice('/v1'.length);
97
+ return { target: endpoints.openai + rest, provider: 'openai' };
98
+ }
99
+
100
+ return null;
101
+ }
102
+
103
+ // ---------------------------------------------------------------------------
104
+ // Synthetic /v1/models response
105
+ // ---------------------------------------------------------------------------
106
+
107
+ function buildModelsResponse() {
108
+ const models = [];
109
+ const now = Math.floor(Date.now() / 1000);
110
+ const seen = new Set();
111
+
112
+ for (const m of config.MODELS.openai.available) {
113
+ models.push({ id: m, object: 'model', created: now, owned_by: 'openai' });
114
+ seen.add(m);
115
+ }
116
+ // Include codex-only models (responses API only) that aren't already in the openai list
117
+ for (const m of config.MODELS.codex.available) {
118
+ if (!seen.has(m)) {
119
+ models.push({ id: m, object: 'model', created: now, owned_by: 'openai' });
120
+ }
121
+ }
122
+ for (const m of config.MODELS.claude.available) {
123
+ models.push({ id: m, object: 'model', created: now, owned_by: 'anthropic' });
124
+ }
125
+ for (const m of config.MODELS.gemini.available) {
126
+ models.push({ id: m, object: 'model', created: now, owned_by: 'google' });
127
+ }
128
+
129
+ return { object: 'list', data: models };
130
+ }
131
+
132
+ // Codex CLI model picker response (matches chatgpt.com/backend-api/codex/models format)
133
+ function buildCodexModelsResponse() {
134
+ const descriptions = {
135
+ 'gpt-5.3-codex-api-preview': 'Latest GPT-5.3 Codex model. Designed for long-running, detailed software engineering tasks.',
136
+ 'gpt-5.2-codex': 'Latest frontier agentic coding model.',
137
+ 'gpt-5.2-pro-2025-12-11': 'GPT-5.2 Pro for deep reasoning and complex tasks.',
138
+ 'gpt-5.2-2025-12-11': 'Latest frontier model with improvements across knowledge, reasoning and coding.',
139
+ 'gpt-5.2': 'Latest frontier model (alias).',
140
+ 'gpt-5.1-codex-max': 'Codex-optimized flagship for deep and fast reasoning.',
141
+ 'gpt-5.1-codex': 'Codex-optimized for software engineering tasks.',
142
+ 'gpt-5.1-codex-mini': 'Optimized for codex. Cheaper, faster, but less capable.',
143
+ 'gpt-5.1-2025-11-13': 'GPT-5.1 general-purpose model.',
144
+ 'gpt-5-codex': 'GPT-5 Codex for agentic coding.',
145
+ 'gpt-5-2025-08-07': 'GPT-5 general-purpose model.',
146
+ 'o4-mini-2025-04-16': 'O4-mini reasoning model.',
147
+ 'o3-2025-04-16': 'O3 reasoning model.',
148
+ };
149
+
150
+ const models = config.MODELS.codex.available.map((id, i) => ({
151
+ slug: id,
152
+ name: id,
153
+ description: descriptions[id] || id,
154
+ default_active: i === 0,
155
+ }));
156
+
157
+ return { models };
158
+ }
159
+
160
+ // ---------------------------------------------------------------------------
161
+ // Proxy handler
162
+ // ---------------------------------------------------------------------------
163
+
164
+ function proxy(req, res) {
165
+ const startTime = Date.now();
166
+
167
+ // CORS preflight
168
+ if (req.method === 'OPTIONS') {
169
+ res.writeHead(204, {
170
+ 'Access-Control-Allow-Origin': '*',
171
+ 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
172
+ 'Access-Control-Allow-Headers': '*',
173
+ });
174
+ res.end();
175
+ return;
176
+ }
177
+
178
+ // Parse URL (strip query string for routing, preserve for forwarding)
179
+ const [urlPath, query] = (req.url || '/').split('?');
180
+ const route = resolveRoute(req.method, urlPath);
181
+
182
+ if (!route) {
183
+ // Codex CLI model picker endpoint (overrides chatgpt.com/backend-api/codex/models)
184
+ if (urlPath === '/backend-api/codex/models') {
185
+ res.writeHead(200, {
186
+ 'Content-Type': 'application/json',
187
+ 'Access-Control-Allow-Origin': '*',
188
+ });
189
+ res.end(JSON.stringify(buildCodexModelsResponse()));
190
+ log(`[codex] GET /backend-api/codex/models → 200 (${Date.now() - startTime}ms)`);
191
+ return;
192
+ }
193
+
194
+ // Health / info endpoint
195
+ if (urlPath === '/' || urlPath === '/health') {
196
+ const token = getToken();
197
+ const info = {
198
+ service: 'jbai-proxy',
199
+ status: 'ok',
200
+ environment: config.getEnvironment(),
201
+ tokenPresent: !!token,
202
+ tokenExpired: token ? config.isTokenExpired(token) : null,
203
+ routes: {
204
+ openai: 'http://localhost:' + (res.socket?.localPort || DEFAULT_PORT) + '/openai/v1 OR /v1/chat/completions',
205
+ anthropic: 'http://localhost:' + (res.socket?.localPort || DEFAULT_PORT) + '/anthropic/v1 OR /v1/messages',
206
+ google: 'http://localhost:' + (res.socket?.localPort || DEFAULT_PORT) + '/google/v1',
207
+ }
208
+ };
209
+ res.writeHead(200, { 'Content-Type': 'application/json' });
210
+ res.end(JSON.stringify(info, null, 2));
211
+ return;
212
+ }
213
+
214
+ res.writeHead(404, { 'Content-Type': 'application/json' });
215
+ res.end(JSON.stringify({ error: { message: `Unknown route: ${urlPath}`, type: 'invalid_request_error' } }));
216
+ return;
217
+ }
218
+
219
+ // Synthetic models endpoint
220
+ if (route.provider === 'models') {
221
+ res.writeHead(200, {
222
+ 'Content-Type': 'application/json',
223
+ 'Access-Control-Allow-Origin': '*',
224
+ });
225
+ res.end(JSON.stringify(buildModelsResponse()));
226
+ log(`[models] GET /v1/models → 200 (${Date.now() - startTime}ms)`);
227
+ return;
228
+ }
229
+
230
+ // Get token
231
+ const token = getToken();
232
+ if (!token) {
233
+ res.writeHead(401, { 'Content-Type': 'application/json' });
234
+ res.end(JSON.stringify({ error: { message: 'No Grazie token found. Run: jbai token set', type: 'authentication_error' } }));
235
+ return;
236
+ }
237
+
238
+ if (config.isTokenExpired(token)) {
239
+ res.writeHead(401, { 'Content-Type': 'application/json' });
240
+ res.end(JSON.stringify({ error: { message: 'Grazie token expired. Run: jbai token set', type: 'authentication_error' } }));
241
+ return;
242
+ }
243
+
244
+ // Read request body
245
+ const chunks = [];
246
+ req.on('data', (chunk) => chunks.push(chunk));
247
+ req.on('end', () => {
248
+ const body = Buffer.concat(chunks);
249
+ const targetUrl = new URL(route.target + (query ? '?' + query : ''));
250
+
251
+ // Build forwarded headers - pass through everything except host/authorization
252
+ const fwdHeaders = {};
253
+ for (const [key, value] of Object.entries(req.headers)) {
254
+ const lower = key.toLowerCase();
255
+ // Skip hop-by-hop and host headers
256
+ if (['host', 'connection', 'keep-alive', 'transfer-encoding', 'te', 'trailer', 'upgrade'].includes(lower)) continue;
257
+ // Skip authorization (we inject our own)
258
+ if (lower === 'authorization') continue;
259
+ fwdHeaders[key] = value;
260
+ }
261
+
262
+ // Inject Grazie auth
263
+ fwdHeaders['Grazie-Authenticate-JWT'] = token;
264
+
265
+ // Ensure content-length is correct for the body we have
266
+ if (body.length > 0) {
267
+ fwdHeaders['content-length'] = body.length;
268
+ }
269
+
270
+ const proxyReq = https.request({
271
+ hostname: targetUrl.hostname,
272
+ port: 443,
273
+ path: targetUrl.pathname + targetUrl.search,
274
+ method: req.method,
275
+ headers: fwdHeaders,
276
+ }, (proxyRes) => {
277
+ // Forward status and headers
278
+ const resHeaders = { ...proxyRes.headers, 'Access-Control-Allow-Origin': '*' };
279
+ res.writeHead(proxyRes.statusCode, resHeaders);
280
+
281
+ // Stream response (supports SSE streaming)
282
+ proxyRes.pipe(res);
283
+
284
+ proxyRes.on('end', () => {
285
+ const elapsed = Date.now() - startTime;
286
+ log(`[${route.provider}] ${req.method} ${urlPath} → ${proxyRes.statusCode} (${elapsed}ms)`);
287
+ });
288
+ });
289
+
290
+ proxyReq.on('error', (err) => {
291
+ log(`[${route.provider}] ${req.method} ${urlPath} → ERROR: ${err.message}`);
292
+ if (!res.headersSent) {
293
+ res.writeHead(502, { 'Content-Type': 'application/json' });
294
+ res.end(JSON.stringify({ error: { message: `Proxy error: ${err.message}`, type: 'proxy_error' } }));
295
+ }
296
+ });
297
+
298
+ if (body.length > 0) {
299
+ proxyReq.write(body);
300
+ }
301
+ proxyReq.end();
302
+ });
303
+ }
304
+
305
+ // ---------------------------------------------------------------------------
306
+ // Logging
307
+ // ---------------------------------------------------------------------------
308
+
309
+ let logToFile = false;
310
+
311
+ function log(msg) {
312
+ const line = `${new Date().toISOString()} ${msg}`;
313
+ if (logToFile) {
314
+ fs.appendFileSync(LOG_FILE, line + '\n');
315
+ } else {
316
+ console.log(line);
317
+ }
318
+ }
319
+
320
+ // ---------------------------------------------------------------------------
321
+ // Daemon management
322
+ // ---------------------------------------------------------------------------
323
+
324
+ function writePid(port) {
325
+ config.ensureConfigDir();
326
+ fs.writeFileSync(PID_FILE, JSON.stringify({ pid: process.pid, port }), { mode: 0o600 });
327
+ }
328
+
329
+ function readPid() {
330
+ try {
331
+ return JSON.parse(fs.readFileSync(PID_FILE, 'utf-8'));
332
+ } catch {
333
+ return null;
334
+ }
335
+ }
336
+
337
+ function removePid() {
338
+ try { fs.unlinkSync(PID_FILE); } catch {}
339
+ }
340
+
341
+ function isRunning(pid) {
342
+ try {
343
+ process.kill(pid, 0);
344
+ return true;
345
+ } catch {
346
+ return false;
347
+ }
348
+ }
349
+
350
+ function stopDaemon() {
351
+ const info = readPid();
352
+ if (!info) {
353
+ console.log('No proxy daemon found');
354
+ return;
355
+ }
356
+ if (!isRunning(info.pid)) {
357
+ console.log('Proxy daemon not running (stale pid file)');
358
+ removePid();
359
+ return;
360
+ }
361
+ try {
362
+ process.kill(info.pid, 'SIGTERM');
363
+ console.log(`Stopped proxy daemon (pid ${info.pid})`);
364
+ removePid();
365
+ } catch (e) {
366
+ console.log(`Failed to stop daemon: ${e.message}`);
367
+ }
368
+ }
369
+
370
+ function showStatus() {
371
+ const info = readPid();
372
+ if (!info) {
373
+ console.log('Proxy: not running');
374
+ return;
375
+ }
376
+ if (!isRunning(info.pid)) {
377
+ console.log('Proxy: not running (stale pid file)');
378
+ removePid();
379
+ return;
380
+ }
381
+ console.log(`Proxy: running on port ${info.port} (pid ${info.pid})`);
382
+ console.log(` OpenAI: http://localhost:${info.port}/openai/v1`);
383
+ console.log(` Anthropic: http://localhost:${info.port}/anthropic/v1`);
384
+ console.log(` Auto: http://localhost:${info.port}/v1/...`);
385
+ console.log(` Logs: ${LOG_FILE}`);
386
+ }
387
+
388
+ function startDaemon(port) {
389
+ const { spawn } = require('child_process');
390
+ const child = spawn(process.execPath, [__filename, '--port', String(port), '--_daemon'], {
391
+ detached: true,
392
+ stdio: 'ignore',
393
+ env: { ...process.env }
394
+ });
395
+ child.unref();
396
+ // Give it a moment to start
397
+ setTimeout(() => {
398
+ const info = readPid();
399
+ if (info && isRunning(info.pid)) {
400
+ console.log(`Proxy daemon started on port ${port} (pid ${info.pid})`);
401
+ console.log('');
402
+ printUsage(port);
403
+ } else {
404
+ console.log('Failed to start daemon. Check logs: ' + LOG_FILE);
405
+ }
406
+ }, 500);
407
+ }
408
+
409
+ // ---------------------------------------------------------------------------
410
+ // macOS launchd service
411
+ // ---------------------------------------------------------------------------
412
+
413
+ const LAUNCHD_LABEL = 'com.jetbrains.jbai-proxy';
414
+
415
+ function getLaunchdPlistPath() {
416
+ return path.join(os.homedir(), 'Library', 'LaunchAgents', `${LAUNCHD_LABEL}.plist`);
417
+ }
418
+
419
+ function installService(port) {
420
+ if (process.platform !== 'darwin') {
421
+ console.log('Auto-start service is only supported on macOS.');
422
+ console.log('On Linux, create a systemd user service manually.');
423
+ return;
424
+ }
425
+
426
+ const nodePath = process.execPath;
427
+ const proxyPath = path.resolve(__filename);
428
+ const home = os.homedir();
429
+ const plistPath = getLaunchdPlistPath();
430
+
431
+ const plist = `<?xml version="1.0" encoding="UTF-8"?>
432
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
433
+ <plist version="1.0">
434
+ <dict>
435
+ <key>Label</key>
436
+ <string>${LAUNCHD_LABEL}</string>
437
+ <key>ProgramArguments</key>
438
+ <array>
439
+ <string>${nodePath}</string>
440
+ <string>${proxyPath}</string>
441
+ <string>--port</string>
442
+ <string>${port}</string>
443
+ <string>--_daemon</string>
444
+ </array>
445
+ <key>RunAtLoad</key>
446
+ <true/>
447
+ <key>KeepAlive</key>
448
+ <true/>
449
+ <key>StandardOutPath</key>
450
+ <string>${home}/.jbai/proxy.log</string>
451
+ <key>StandardErrorPath</key>
452
+ <string>${home}/.jbai/proxy.log</string>
453
+ </dict>
454
+ </plist>`;
455
+
456
+ const launchAgentsDir = path.join(home, 'Library', 'LaunchAgents');
457
+ if (!fs.existsSync(launchAgentsDir)) {
458
+ fs.mkdirSync(launchAgentsDir, { recursive: true });
459
+ }
460
+
461
+ fs.writeFileSync(plistPath, plist);
462
+ console.log(`Written: ${plistPath}`);
463
+
464
+ // Load the service
465
+ const { execSync } = require('child_process');
466
+ try {
467
+ // Unload first in case it's already loaded
468
+ try { execSync(`launchctl unload "${plistPath}" 2>/dev/null`); } catch {}
469
+ execSync(`launchctl load "${plistPath}"`);
470
+ console.log(`Service loaded and started on port ${port}`);
471
+ console.log('');
472
+ console.log('The proxy will now auto-start on login.');
473
+ console.log(` Logs: ${home}/.jbai/proxy.log`);
474
+ console.log(` Remove: jbai proxy uninstall-service`);
475
+ console.log('');
476
+ printUsage(port);
477
+ } catch (e) {
478
+ console.log(`Written plist but failed to load: ${e.message}`);
479
+ console.log(`Try: launchctl load "${plistPath}"`);
480
+ }
481
+ }
482
+
483
+ function uninstallService() {
484
+ if (process.platform !== 'darwin') {
485
+ console.log('Auto-start service is only supported on macOS.');
486
+ return;
487
+ }
488
+
489
+ const plistPath = getLaunchdPlistPath();
490
+ if (!fs.existsSync(plistPath)) {
491
+ console.log('No launchd service installed.');
492
+ return;
493
+ }
494
+
495
+ const { execSync } = require('child_process');
496
+ try {
497
+ execSync(`launchctl unload "${plistPath}"`);
498
+ } catch {}
499
+ fs.unlinkSync(plistPath);
500
+ console.log('Service uninstalled. Proxy will no longer auto-start.');
501
+ }
502
+
503
+ // ---------------------------------------------------------------------------
504
+ // One-liner setup: proxy + codex + shell env
505
+ // ---------------------------------------------------------------------------
506
+
507
+ function setup(port) {
508
+ console.log('Setting up jbai-proxy...\n');
509
+
510
+ // 1. Check token
511
+ const token = config.getToken();
512
+ if (!token) {
513
+ console.log('No token found. Let\'s set one up first.\n');
514
+ console.log(' 1. Go to https://platform.jetbrains.ai/ (or staging: https://platform.stgn.jetbrains.ai/)');
515
+ console.log(' 2. Click Profile → "Copy Developer Token"');
516
+ console.log(' 3. Run: jbai token set\n');
517
+ console.log('Then re-run: jbai proxy setup');
518
+ process.exit(1);
519
+ }
520
+
521
+ if (config.isTokenExpired(token)) {
522
+ console.log('Your token is expired. Run: jbai token set');
523
+ console.log('Then re-run: jbai proxy setup');
524
+ process.exit(1);
525
+ }
526
+
527
+ console.log(`Token: valid`);
528
+ console.log(`Environment: ${config.getEnvironment()}\n`);
529
+
530
+ let steps = 0;
531
+
532
+ // 2. Configure Codex Desktop (config.toml)
533
+ steps += configureCodexDesktop(port);
534
+
535
+ // 3. Add JBAI_PROXY_KEY to shell rc
536
+ steps += configureShellEnv();
537
+
538
+ // 4. Install launchd service (macOS) or start daemon
539
+ steps += configureAutoStart(port);
540
+
541
+ // 5. Verify proxy is running
542
+ console.log('');
543
+ verifyProxy(port);
544
+
545
+ console.log(`\n--- Setup complete (${steps} changes) ---\n`);
546
+ printUsage(port);
547
+
548
+ console.log(`Cursor (manual step):
549
+ Open Cursor → Settings → Models → enable "Override OpenAI Base URL"
550
+ Base URL: http://localhost:${port}/openai/v1
551
+ API Key: placeholder
552
+ `);
553
+ }
554
+
555
+ function configureCodexDesktop(port) {
556
+ const codexDir = path.join(os.homedir(), '.codex');
557
+ const codexConfig = path.join(codexDir, 'config.toml');
558
+ let changed = 0;
559
+
560
+ if (!fs.existsSync(codexDir)) {
561
+ fs.mkdirSync(codexDir, { recursive: true });
562
+ }
563
+
564
+ let content = '';
565
+ if (fs.existsSync(codexConfig)) {
566
+ content = fs.readFileSync(codexConfig, 'utf-8');
567
+ }
568
+
569
+ // Add jbai-proxy provider if missing
570
+ if (!content.includes('[model_providers.jbai-proxy]')) {
571
+ const providerBlock = `
572
+ # JetBrains AI via local proxy (for Codex Desktop)
573
+ [model_providers.jbai-proxy]
574
+ name = "JetBrains AI (Proxy)"
575
+ base_url = "http://localhost:${port}/openai/v1"
576
+ env_key = "JBAI_PROXY_KEY"
577
+ wire_api = "responses"
578
+ `;
579
+ fs.appendFileSync(codexConfig, providerBlock);
580
+ console.log('Codex Desktop: added jbai-proxy provider to ~/.codex/config.toml');
581
+ changed++;
582
+ } else {
583
+ console.log('Codex Desktop: jbai-proxy provider already configured');
584
+ }
585
+
586
+ // Set model_provider = "jbai-proxy" if not already set
587
+ if (content.includes('model_provider')) {
588
+ if (!content.includes('model_provider = "jbai-proxy"')) {
589
+ // Replace existing model_provider line
590
+ const updated = content.replace(/^model_provider\s*=\s*"[^"]*"/m, 'model_provider = "jbai-proxy"');
591
+ fs.writeFileSync(codexConfig, updated);
592
+ console.log('Codex Desktop: switched model_provider to jbai-proxy');
593
+ changed++;
594
+ } else {
595
+ console.log('Codex Desktop: already using jbai-proxy provider');
596
+ }
597
+ } else {
598
+ // Add model_provider after model = line, or at top
599
+ if (content.includes('model =')) {
600
+ const updated = content.replace(/^(model\s*=\s*"[^"]*")/m, '$1\nmodel_provider = "jbai-proxy"');
601
+ fs.writeFileSync(codexConfig, updated);
602
+ } else {
603
+ const updated = 'model_provider = "jbai-proxy"\n' + content;
604
+ fs.writeFileSync(codexConfig, updated);
605
+ }
606
+ console.log('Codex Desktop: set model_provider = "jbai-proxy"');
607
+ changed++;
608
+ }
609
+
610
+ return changed;
611
+ }
612
+
613
+ function configureShellEnv() {
614
+ const home = os.homedir();
615
+ // Detect shell rc file
616
+ const shell = process.env.SHELL || '/bin/zsh';
617
+ let rcFile;
618
+ if (shell.includes('zsh')) {
619
+ rcFile = path.join(home, '.zshrc');
620
+ } else if (shell.includes('bash')) {
621
+ // macOS uses .bash_profile, Linux uses .bashrc
622
+ const profile = path.join(home, '.bash_profile');
623
+ const bashrc = path.join(home, '.bashrc');
624
+ rcFile = fs.existsSync(profile) ? profile : bashrc;
625
+ } else {
626
+ rcFile = path.join(home, '.profile');
627
+ }
628
+
629
+ if (!fs.existsSync(rcFile)) {
630
+ fs.writeFileSync(rcFile, '');
631
+ }
632
+
633
+ const rcContent = fs.readFileSync(rcFile, 'utf-8');
634
+ if (rcContent.includes('JBAI_PROXY_KEY')) {
635
+ console.log(`Shell: JBAI_PROXY_KEY already in ${path.basename(rcFile)}`);
636
+ return 0;
637
+ }
638
+
639
+ const envLine = '\n# jbai-proxy: placeholder key for Codex Desktop / Cursor\nexport JBAI_PROXY_KEY="placeholder"\n';
640
+ fs.appendFileSync(rcFile, envLine);
641
+ console.log(`Shell: added JBAI_PROXY_KEY to ~/${path.basename(rcFile)}`);
642
+
643
+ // Also set it in current process for immediate use
644
+ process.env.JBAI_PROXY_KEY = 'placeholder';
645
+ return 1;
646
+ }
647
+
648
+ function configureAutoStart(port) {
649
+ if (process.platform === 'darwin') {
650
+ const plistPath = getLaunchdPlistPath();
651
+ if (fs.existsSync(plistPath)) {
652
+ // Check if proxy is already running
653
+ const info = readPid();
654
+ if (info && isRunning(info.pid)) {
655
+ console.log(`Proxy: already running on port ${info.port} (pid ${info.pid})`);
656
+ return 0;
657
+ }
658
+ }
659
+ // Install service (starts proxy + auto-start on login)
660
+ installService(port);
661
+ return 1;
662
+ } else {
663
+ // Non-macOS: just start daemon
664
+ startDaemon(port);
665
+ return 1;
666
+ }
667
+ }
668
+
669
+ function verifyProxy(port) {
670
+ const http = require('http');
671
+ const req = http.get(`http://127.0.0.1:${port}/health`, { timeout: 2000 }, (res) => {
672
+ let body = '';
673
+ res.on('data', chunk => body += chunk);
674
+ res.on('end', () => {
675
+ try {
676
+ const info = JSON.parse(body);
677
+ if (info.status === 'ok' && info.tokenPresent) {
678
+ console.log(`Proxy: running on port ${port}`);
679
+ } else {
680
+ console.log(`Proxy: running but token issue — ${JSON.stringify(info)}`);
681
+ }
682
+ } catch {
683
+ console.log(`Proxy: running on port ${port} (health check returned non-JSON)`);
684
+ }
685
+ });
686
+ });
687
+ req.on('error', () => {
688
+ console.log('Proxy: not responding yet (may need a moment to start)');
689
+ });
690
+ }
691
+
692
+ // ---------------------------------------------------------------------------
693
+ // Usage instructions
694
+ // ---------------------------------------------------------------------------
695
+
696
+ function printUsage(port) {
697
+ console.log(`Configure your tools to use these base URLs:
698
+
699
+ OpenAI tools (Codex Desktop, Cursor, etc.):
700
+ OPENAI_BASE_URL=http://localhost:${port}/openai/v1
701
+ OPENAI_API_KEY=placeholder
702
+
703
+ Anthropic tools (Claude Desktop, etc.):
704
+ ANTHROPIC_BASE_URL=http://localhost:${port}/anthropic
705
+ ANTHROPIC_API_KEY=placeholder
706
+
707
+ Auto-detect mode (works for most tools):
708
+ Base URL: http://localhost:${port}/v1
709
+ API Key: placeholder
710
+
711
+ The API key can be any non-empty string — auth is handled by your Grazie JWT.
712
+ `);
713
+ }
714
+
715
+ // ---------------------------------------------------------------------------
716
+ // CLI
717
+ // ---------------------------------------------------------------------------
718
+
719
+ function parseProxyArgs(argv) {
720
+ const opts = { port: DEFAULT_PORT, daemon: false, _daemon: false };
721
+ for (let i = 0; i < argv.length; i++) {
722
+ const arg = argv[i];
723
+ if (arg === '--port' || arg === '-p') {
724
+ opts.port = parseInt(argv[++i], 10) || DEFAULT_PORT;
725
+ } else if (arg === '--daemon' || arg === '-d') {
726
+ opts.daemon = true;
727
+ } else if (arg === '--_daemon') {
728
+ opts._daemon = true;
729
+ } else if (arg === 'stop') {
730
+ opts.stop = true;
731
+ } else if (arg === 'status') {
732
+ opts.status = true;
733
+ } else if (arg === 'setup') {
734
+ opts.setup = true;
735
+ } else if (arg === 'install-service') {
736
+ opts.installService = true;
737
+ } else if (arg === 'uninstall-service') {
738
+ opts.uninstallService = true;
739
+ } else if (arg === 'help' || arg === '--help' || arg === '-h') {
740
+ opts.help = true;
741
+ }
742
+ }
743
+ return opts;
744
+ }
745
+
746
+ const PROXY_HELP = `
747
+ jbai proxy - Local reverse proxy for JetBrains AI Platform
748
+
749
+ USAGE:
750
+ jbai proxy setup One-liner: configure everything + start proxy
751
+ jbai proxy Start proxy (foreground, port ${DEFAULT_PORT})
752
+ jbai proxy --port 9090 Start on custom port
753
+ jbai proxy --daemon Start in background
754
+ jbai proxy stop Stop background daemon
755
+ jbai proxy status Check proxy status
756
+ jbai proxy install-service Auto-start on login (macOS launchd)
757
+ jbai proxy uninstall-service Remove auto-start
758
+
759
+ ROUTES:
760
+ /openai/v1/* → Grazie OpenAI endpoint
761
+ /anthropic/v1/* → Grazie Anthropic endpoint
762
+ /google/v1/* → Grazie Google endpoint
763
+ /v1/chat/completions → OpenAI (auto)
764
+ /v1/responses → OpenAI (auto)
765
+ /v1/messages → Anthropic (auto)
766
+ /v1/models → Synthetic model list
767
+ /health → Proxy status
768
+
769
+ TOOL SETUP:
770
+ Codex Desktop: OPENAI_BASE_URL=http://localhost:${DEFAULT_PORT}/openai/v1
771
+ Cursor: Set base URL to http://localhost:${DEFAULT_PORT}/openai/v1
772
+ Claude tools: ANTHROPIC_BASE_URL=http://localhost:${DEFAULT_PORT}/anthropic
773
+ API Key: Use any non-empty string (e.g. "placeholder")
774
+ `;
775
+
776
+ // ---------------------------------------------------------------------------
777
+ // Main
778
+ // ---------------------------------------------------------------------------
779
+
780
+ function main() {
781
+ // When invoked directly (not via jbai.js), parse from argv[2]
782
+ // When invoked via jbai.js, args are passed from the caller
783
+ const args = process.argv.slice(2);
784
+ const opts = parseProxyArgs(args);
785
+
786
+ if (opts.help) {
787
+ console.log(PROXY_HELP);
788
+ return;
789
+ }
790
+
791
+ if (opts.stop) {
792
+ stopDaemon();
793
+ return;
794
+ }
795
+
796
+ if (opts.status) {
797
+ showStatus();
798
+ return;
799
+ }
800
+
801
+ if (opts.setup) {
802
+ setup(opts.port);
803
+ return;
804
+ }
805
+
806
+ if (opts.installService) {
807
+ installService(opts.port);
808
+ return;
809
+ }
810
+
811
+ if (opts.uninstallService) {
812
+ uninstallService();
813
+ return;
814
+ }
815
+
816
+ if (opts.daemon && !opts._daemon) {
817
+ startDaemon(opts.port);
818
+ return;
819
+ }
820
+
821
+ // Actual server startup
822
+ if (opts._daemon) {
823
+ logToFile = true;
824
+ }
825
+
826
+ const server = http.createServer(proxy);
827
+
828
+ server.listen(opts.port, '127.0.0.1', () => {
829
+ writePid(opts.port);
830
+ log(`jbai-proxy listening on http://127.0.0.1:${opts.port}`);
831
+ log(`Environment: ${config.getEnvironment()}`);
832
+ log(`Token file: ${config.TOKEN_FILE}`);
833
+
834
+ if (!opts._daemon) {
835
+ console.log(`\njbai-proxy running on http://localhost:${opts.port}\n`);
836
+ printUsage(opts.port);
837
+ console.log('Press Ctrl+C to stop.\n');
838
+ }
839
+ });
840
+
841
+ // Graceful shutdown
842
+ const shutdown = () => {
843
+ log('Shutting down...');
844
+ removePid();
845
+ server.close(() => process.exit(0));
846
+ setTimeout(() => process.exit(0), 2000);
847
+ };
848
+ process.on('SIGTERM', shutdown);
849
+ process.on('SIGINT', shutdown);
850
+ }
851
+
852
+ // Allow both direct execution and require() from jbai.js
853
+ if (require.main === module) {
854
+ main();
855
+ } else {
856
+ module.exports = { main, parseProxyArgs, PROXY_HELP, stopDaemon, showStatus, startDaemon, DEFAULT_PORT };
857
+ }