jbai-cli 1.5.4 → 1.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +127 -5
- package/bin/jbai-opencode.js +5 -0
- package/bin/jbai-proxy.js +810 -0
- package/bin/jbai.js +13 -0
- package/lib/config.js +2 -13
- package/lib/interactive-handoff.js +27 -7
- package/lib/postinstall.js +23 -0
- package/package.json +2 -1
package/README.md
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
**Use AI coding tools with your JetBrains AI subscription** — no separate API keys needed.
|
|
4
4
|
|
|
5
|
-
One token, all tools: Claude Code, Codex, Aider, Gemini CLI, OpenCode
|
|
5
|
+
One token, all tools: Claude Code, Codex, Aider, Gemini CLI, OpenCode, **Codex Desktop, Cursor**.
|
|
6
6
|
|
|
7
7
|
## Install
|
|
8
8
|
|
|
@@ -41,7 +41,99 @@ Testing JetBrains AI Platform (staging)
|
|
|
41
41
|
4. Google Proxy (Gemini): ✅ Working
|
|
42
42
|
```
|
|
43
43
|
|
|
44
|
-
##
|
|
44
|
+
## Local Proxy (for Codex Desktop, Cursor, and other GUI tools)
|
|
45
|
+
|
|
46
|
+
jbai-cli includes a **local reverse proxy** that lets any tool with custom base URL support work through JetBrains AI Platform — no per-tool wrappers needed.
|
|
47
|
+
|
|
48
|
+
### One-liner setup
|
|
49
|
+
|
|
50
|
+
```bash
|
|
51
|
+
jbai proxy setup
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
This single command:
|
|
55
|
+
- Starts the proxy on `localhost:18080` (auto-starts on login via launchd)
|
|
56
|
+
- Configures **Codex Desktop** (`~/.codex/config.toml`)
|
|
57
|
+
- Adds the `JBAI_PROXY_KEY` env var to your shell
|
|
58
|
+
|
|
59
|
+
### How it works
|
|
60
|
+
|
|
61
|
+
```
|
|
62
|
+
Codex Desktop / Cursor / any tool
|
|
63
|
+
│ standard OpenAI / Anthropic API calls
|
|
64
|
+
▼
|
|
65
|
+
http://localhost:18080
|
|
66
|
+
│ injects Grazie-Authenticate-JWT header
|
|
67
|
+
│ routes to correct provider endpoint
|
|
68
|
+
▼
|
|
69
|
+
https://api.jetbrains.ai/user/v5/llm/{provider}/v1
|
|
70
|
+
│
|
|
71
|
+
▼
|
|
72
|
+
Actual LLM (GPT, Claude, Gemini)
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
### Codex Desktop
|
|
76
|
+
|
|
77
|
+
After `jbai proxy setup`, Codex Desktop works automatically. The setup configures `~/.codex/config.toml` with:
|
|
78
|
+
```toml
|
|
79
|
+
model_provider = "jbai-proxy"
|
|
80
|
+
|
|
81
|
+
[model_providers.jbai-proxy]
|
|
82
|
+
name = "JetBrains AI (Proxy)"
|
|
83
|
+
base_url = "http://localhost:18080/openai/v1"
|
|
84
|
+
env_key = "JBAI_PROXY_KEY"
|
|
85
|
+
wire_api = "responses"
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
### Cursor
|
|
89
|
+
|
|
90
|
+
Cursor requires manual configuration via its UI:
|
|
91
|
+
1. Open **Cursor** → **Settings** (gear icon) → **Models**
|
|
92
|
+
2. Enable **"Override OpenAI Base URL"**
|
|
93
|
+
3. Set:
|
|
94
|
+
- **Base URL**: `http://localhost:18080/openai/v1`
|
|
95
|
+
- **API Key**: `placeholder`
|
|
96
|
+
4. Click **Verify**
|
|
97
|
+
|
|
98
|
+
### Any OpenAI-compatible tool
|
|
99
|
+
|
|
100
|
+
Point it to the proxy:
|
|
101
|
+
```bash
|
|
102
|
+
export OPENAI_BASE_URL=http://localhost:18080/openai/v1
|
|
103
|
+
export OPENAI_API_KEY=placeholder
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
For Anthropic-compatible tools:
|
|
107
|
+
```bash
|
|
108
|
+
export ANTHROPIC_BASE_URL=http://localhost:18080/anthropic
|
|
109
|
+
export ANTHROPIC_API_KEY=placeholder
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
### Proxy commands
|
|
113
|
+
|
|
114
|
+
| Command | Description |
|
|
115
|
+
|---------|-------------|
|
|
116
|
+
| `jbai proxy setup` | One-liner: configure everything + start |
|
|
117
|
+
| `jbai proxy status` | Check if proxy is running |
|
|
118
|
+
| `jbai proxy stop` | Stop the proxy |
|
|
119
|
+
| `jbai proxy --daemon` | Start proxy in background |
|
|
120
|
+
| `jbai proxy install-service` | Auto-start on login (macOS launchd) |
|
|
121
|
+
| `jbai proxy uninstall-service` | Remove auto-start |
|
|
122
|
+
|
|
123
|
+
### Proxy routes
|
|
124
|
+
|
|
125
|
+
| Route | Target |
|
|
126
|
+
|-------|--------|
|
|
127
|
+
| `/openai/v1/*` | Grazie OpenAI endpoint |
|
|
128
|
+
| `/anthropic/v1/*` | Grazie Anthropic endpoint |
|
|
129
|
+
| `/google/v1/*` | Grazie Google endpoint |
|
|
130
|
+
| `/v1/chat/completions` | OpenAI (auto-detect) |
|
|
131
|
+
| `/v1/responses` | OpenAI (auto-detect) |
|
|
132
|
+
| `/v1/messages` | Anthropic (auto-detect) |
|
|
133
|
+
| `/v1/models` | Synthetic model list |
|
|
134
|
+
| `/health` | Proxy status |
|
|
135
|
+
|
|
136
|
+
## CLI Tool Wrappers
|
|
45
137
|
|
|
46
138
|
### Claude Code
|
|
47
139
|
```bash
|
|
@@ -76,6 +168,16 @@ jbai-opencode
|
|
|
76
168
|
jbai handoff --task "continue this work in orca-lab"
|
|
77
169
|
```
|
|
78
170
|
|
|
171
|
+
### Handoff to Orca Lab (nightly/staging)
|
|
172
|
+
```bash
|
|
173
|
+
export ORCA_LAB_URL="https://orca-lab-nightly.labs.jb.gg"
|
|
174
|
+
export FACADE_JWT_TOKEN="..." # required for /api/handoff
|
|
175
|
+
export GITHUB_TOKEN="..." # repo clone during provisioning
|
|
176
|
+
|
|
177
|
+
jbai handoff --task "add new e2e test" \
|
|
178
|
+
--repo "https://github.com/JetBrains/jcp-orca-facade.git"
|
|
179
|
+
```
|
|
180
|
+
|
|
79
181
|
### In-session handoff (interactive tools)
|
|
80
182
|
While running `jbai-codex`, `jbai-claude`, `jbai-gemini`, or `jbai-opencode`:
|
|
81
183
|
- Press `Ctrl+]` to trigger a handoff to Orca Lab.
|
|
@@ -83,7 +185,7 @@ While running `jbai-codex`, `jbai-claude`, `jbai-gemini`, or `jbai-opencode`:
|
|
|
83
185
|
|
|
84
186
|
Optional environment variables:
|
|
85
187
|
- `ORCA_LAB_URL` (default: `http://localhost:3000`)
|
|
86
|
-
- `FACADE_JWT_TOKEN` (
|
|
188
|
+
- `FACADE_JWT_TOKEN` (required for /api/handoff on hosted Orca Lab)
|
|
87
189
|
- `GITHUB_TOKEN` / `GH_TOKEN` (private repos)
|
|
88
190
|
- `JBAI_HANDOFF_TASK` (fallback task if no prompt captured)
|
|
89
191
|
- `JBAI_HANDOFF_REPO` (override repo URL)
|
|
@@ -116,8 +218,6 @@ jbai-aider --super
|
|
|
116
218
|
| Gemini CLI | `--yolo` |
|
|
117
219
|
| OpenCode | N/A (run mode is already non-interactive) |
|
|
118
220
|
|
|
119
|
-
⚠️ **Use with caution** - super mode allows the AI to make changes without confirmation.
|
|
120
|
-
|
|
121
221
|
## Using Different Models
|
|
122
222
|
|
|
123
223
|
Each tool has a sensible default, but you can specify any available model:
|
|
@@ -180,6 +280,9 @@ jbai-aider --model gemini/gemini-2.5-pro
|
|
|
180
280
|
| `jbai token set` | Set/update token |
|
|
181
281
|
| `jbai test` | Test API connections |
|
|
182
282
|
| `jbai models` | List all models |
|
|
283
|
+
| `jbai proxy setup` | Setup proxy + configure Codex Desktop |
|
|
284
|
+
| `jbai proxy status` | Check proxy status |
|
|
285
|
+
| `jbai proxy stop` | Stop proxy |
|
|
183
286
|
| `jbai handoff` | Continue a task in Orca Lab |
|
|
184
287
|
| `jbai install` | Install all AI tools |
|
|
185
288
|
| `jbai install claude` | Install specific tool |
|
|
@@ -247,6 +350,10 @@ jbai-cli uses JetBrains AI Platform's **Guarded Proxy**, which provides API-comp
|
|
|
247
350
|
|
|
248
351
|
Your JetBrains AI token authenticates all requests via the `Grazie-Authenticate-JWT` header.
|
|
249
352
|
|
|
353
|
+
**CLI wrappers** (`jbai-claude`, `jbai-codex`, etc.) set environment variables and launch the underlying tool directly.
|
|
354
|
+
|
|
355
|
+
**Local proxy** (`jbai proxy`) runs an HTTP server on localhost that forwards requests to Grazie, injecting the JWT header automatically. This enables GUI tools like Codex Desktop and Cursor that don't support custom headers but do support custom base URLs.
|
|
356
|
+
|
|
250
357
|
## Troubleshooting
|
|
251
358
|
|
|
252
359
|
### "Token expired"
|
|
@@ -269,6 +376,21 @@ jbai test
|
|
|
269
376
|
jbai token
|
|
270
377
|
```
|
|
271
378
|
|
|
379
|
+
### Proxy not working
|
|
380
|
+
```bash
|
|
381
|
+
# Check proxy status
|
|
382
|
+
jbai proxy status
|
|
383
|
+
|
|
384
|
+
# Check proxy health
|
|
385
|
+
curl http://localhost:18080/health
|
|
386
|
+
|
|
387
|
+
# Check logs
|
|
388
|
+
cat ~/.jbai/proxy.log
|
|
389
|
+
|
|
390
|
+
# Restart proxy
|
|
391
|
+
jbai proxy stop && jbai proxy --daemon
|
|
392
|
+
```
|
|
393
|
+
|
|
272
394
|
### Wrong environment
|
|
273
395
|
```bash
|
|
274
396
|
# Staging token won't work with production
|
package/bin/jbai-opencode.js
CHANGED
|
@@ -109,6 +109,11 @@ config.MODELS.claude.available.forEach(model => {
|
|
|
109
109
|
};
|
|
110
110
|
});
|
|
111
111
|
|
|
112
|
+
// NOTE: Gemini models are NOT available via Grazie OpenAI-compatible proxy.
|
|
113
|
+
// The /user/v5/llm/google/v1/vertex endpoint only works with native Google API
|
|
114
|
+
// format (used by jbai-gemini), not OpenAI chat/completions format.
|
|
115
|
+
// Use `jbai gemini` instead for Gemini models.
|
|
116
|
+
|
|
112
117
|
// Write config
|
|
113
118
|
fs.writeFileSync(configFile, JSON.stringify(opencodeConfig, null, 2));
|
|
114
119
|
|
|
@@ -0,0 +1,810 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* jbai-proxy - Local reverse proxy for JetBrains AI Platform (Grazie)
|
|
5
|
+
*
|
|
6
|
+
* Runs a local HTTP server that transparently proxies OpenAI / Anthropic / Google
|
|
7
|
+
* API calls to Grazie, injecting the JWT authentication header automatically.
|
|
8
|
+
*
|
|
9
|
+
* This allows ANY tool that supports custom base URLs (Codex Desktop, Cursor,
|
|
10
|
+
* Continue, etc.) to work through JetBrains AI Platform without per-tool wrappers.
|
|
11
|
+
*
|
|
12
|
+
* Routes:
|
|
13
|
+
* /openai/v1/* → Grazie OpenAI endpoint (explicit)
|
|
14
|
+
* /anthropic/v1/* → Grazie Anthropic endpoint (explicit)
|
|
15
|
+
* /google/v1/* → Grazie Google endpoint (explicit)
|
|
16
|
+
*
|
|
17
|
+
* /v1/chat/completions → OpenAI (auto-detect)
|
|
18
|
+
* /v1/completions → OpenAI (auto-detect)
|
|
19
|
+
* /v1/responses → OpenAI (auto-detect)
|
|
20
|
+
* /v1/embeddings → OpenAI (auto-detect)
|
|
21
|
+
* /v1/models → synthetic model list
|
|
22
|
+
* /v1/messages → Anthropic (auto-detect)
|
|
23
|
+
*
|
|
24
|
+
* Usage:
|
|
25
|
+
* jbai proxy # start on default port 18080
|
|
26
|
+
* jbai proxy --port 9090 # custom port
|
|
27
|
+
* jbai proxy --daemon # run in background
|
|
28
|
+
* jbai proxy stop # stop background daemon
|
|
29
|
+
* jbai proxy status # check if running
|
|
30
|
+
*/
|
|
31
|
+
|
|
32
|
+
const http = require('http');
|
|
33
|
+
const https = require('https');
|
|
34
|
+
const fs = require('fs');
|
|
35
|
+
const path = require('path');
|
|
36
|
+
const os = require('os');
|
|
37
|
+
const config = require('../lib/config');
|
|
38
|
+
|
|
39
|
+
const DEFAULT_PORT = 18080;
|
|
40
|
+
const PID_FILE = path.join(config.CONFIG_DIR, 'proxy.pid');
|
|
41
|
+
const LOG_FILE = path.join(config.CONFIG_DIR, 'proxy.log');
|
|
42
|
+
|
|
43
|
+
// ---------------------------------------------------------------------------
|
|
44
|
+
// Token management - re-reads from disk on each request for hot-refresh
|
|
45
|
+
// ---------------------------------------------------------------------------
|
|
46
|
+
let cachedToken = null;
|
|
47
|
+
let tokenMtime = 0;
|
|
48
|
+
|
|
49
|
+
function getToken() {
|
|
50
|
+
try {
|
|
51
|
+
const stat = fs.statSync(config.TOKEN_FILE);
|
|
52
|
+
if (stat.mtimeMs !== tokenMtime) {
|
|
53
|
+
cachedToken = fs.readFileSync(config.TOKEN_FILE, 'utf-8').trim();
|
|
54
|
+
tokenMtime = stat.mtimeMs;
|
|
55
|
+
}
|
|
56
|
+
} catch {
|
|
57
|
+
cachedToken = null;
|
|
58
|
+
}
|
|
59
|
+
return cachedToken;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// ---------------------------------------------------------------------------
|
|
63
|
+
// Route resolution
|
|
64
|
+
// ---------------------------------------------------------------------------
|
|
65
|
+
|
|
66
|
+
function resolveRoute(method, urlPath) {
|
|
67
|
+
const endpoints = config.getEndpoints();
|
|
68
|
+
|
|
69
|
+
// Explicit provider prefix routes
|
|
70
|
+
if (urlPath.startsWith('/openai/')) {
|
|
71
|
+
const rest = urlPath.slice('/openai'.length); // keeps /v1/...
|
|
72
|
+
return { target: endpoints.openai.replace(/\/v1$/, '') + rest, provider: 'openai' };
|
|
73
|
+
}
|
|
74
|
+
if (urlPath.startsWith('/anthropic/')) {
|
|
75
|
+
const rest = urlPath.slice('/anthropic'.length);
|
|
76
|
+
return { target: endpoints.anthropic.replace(/\/v1$/, '') + rest, provider: 'anthropic' };
|
|
77
|
+
}
|
|
78
|
+
if (urlPath.startsWith('/google/')) {
|
|
79
|
+
const rest = urlPath.slice('/google'.length);
|
|
80
|
+
return { target: endpoints.google + rest, provider: 'google' };
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// Auto-detect routes based on standard SDK paths
|
|
84
|
+
// Anthropic SDK always calls /v1/messages
|
|
85
|
+
if (urlPath.startsWith('/v1/messages')) {
|
|
86
|
+
const rest = urlPath.slice('/v1'.length);
|
|
87
|
+
return { target: endpoints.anthropic + rest, provider: 'anthropic' };
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Everything else under /v1/* goes to OpenAI
|
|
91
|
+
if (urlPath.startsWith('/v1/')) {
|
|
92
|
+
// Special: /v1/models returns synthetic list
|
|
93
|
+
if (urlPath === '/v1/models') {
|
|
94
|
+
return { target: null, provider: 'models' };
|
|
95
|
+
}
|
|
96
|
+
const rest = urlPath.slice('/v1'.length);
|
|
97
|
+
return { target: endpoints.openai + rest, provider: 'openai' };
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
return null;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// ---------------------------------------------------------------------------
|
|
104
|
+
// Synthetic /v1/models response
|
|
105
|
+
// ---------------------------------------------------------------------------
|
|
106
|
+
|
|
107
|
+
function buildModelsResponse() {
|
|
108
|
+
const models = [];
|
|
109
|
+
const now = Math.floor(Date.now() / 1000);
|
|
110
|
+
|
|
111
|
+
for (const m of config.MODELS.openai.available) {
|
|
112
|
+
models.push({ id: m, object: 'model', created: now, owned_by: 'openai' });
|
|
113
|
+
}
|
|
114
|
+
for (const m of config.MODELS.claude.available) {
|
|
115
|
+
models.push({ id: m, object: 'model', created: now, owned_by: 'anthropic' });
|
|
116
|
+
}
|
|
117
|
+
for (const m of config.MODELS.gemini.available) {
|
|
118
|
+
models.push({ id: m, object: 'model', created: now, owned_by: 'google' });
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
return { object: 'list', data: models };
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// ---------------------------------------------------------------------------
|
|
125
|
+
// Proxy handler
|
|
126
|
+
// ---------------------------------------------------------------------------
|
|
127
|
+
|
|
128
|
+
function proxy(req, res) {
|
|
129
|
+
const startTime = Date.now();
|
|
130
|
+
|
|
131
|
+
// CORS preflight
|
|
132
|
+
if (req.method === 'OPTIONS') {
|
|
133
|
+
res.writeHead(204, {
|
|
134
|
+
'Access-Control-Allow-Origin': '*',
|
|
135
|
+
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
|
136
|
+
'Access-Control-Allow-Headers': '*',
|
|
137
|
+
});
|
|
138
|
+
res.end();
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Parse URL (strip query string for routing, preserve for forwarding)
|
|
143
|
+
const [urlPath, query] = (req.url || '/').split('?');
|
|
144
|
+
const route = resolveRoute(req.method, urlPath);
|
|
145
|
+
|
|
146
|
+
if (!route) {
|
|
147
|
+
// Health / info endpoint
|
|
148
|
+
if (urlPath === '/' || urlPath === '/health') {
|
|
149
|
+
const token = getToken();
|
|
150
|
+
const info = {
|
|
151
|
+
service: 'jbai-proxy',
|
|
152
|
+
status: 'ok',
|
|
153
|
+
environment: config.getEnvironment(),
|
|
154
|
+
tokenPresent: !!token,
|
|
155
|
+
tokenExpired: token ? config.isTokenExpired(token) : null,
|
|
156
|
+
routes: {
|
|
157
|
+
openai: 'http://localhost:' + (res.socket?.localPort || DEFAULT_PORT) + '/openai/v1 OR /v1/chat/completions',
|
|
158
|
+
anthropic: 'http://localhost:' + (res.socket?.localPort || DEFAULT_PORT) + '/anthropic/v1 OR /v1/messages',
|
|
159
|
+
google: 'http://localhost:' + (res.socket?.localPort || DEFAULT_PORT) + '/google/v1',
|
|
160
|
+
}
|
|
161
|
+
};
|
|
162
|
+
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
163
|
+
res.end(JSON.stringify(info, null, 2));
|
|
164
|
+
return;
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
res.writeHead(404, { 'Content-Type': 'application/json' });
|
|
168
|
+
res.end(JSON.stringify({ error: { message: `Unknown route: ${urlPath}`, type: 'invalid_request_error' } }));
|
|
169
|
+
return;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// Synthetic models endpoint
|
|
173
|
+
if (route.provider === 'models') {
|
|
174
|
+
res.writeHead(200, {
|
|
175
|
+
'Content-Type': 'application/json',
|
|
176
|
+
'Access-Control-Allow-Origin': '*',
|
|
177
|
+
});
|
|
178
|
+
res.end(JSON.stringify(buildModelsResponse()));
|
|
179
|
+
log(`[models] GET /v1/models → 200 (${Date.now() - startTime}ms)`);
|
|
180
|
+
return;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// Get token
|
|
184
|
+
const token = getToken();
|
|
185
|
+
if (!token) {
|
|
186
|
+
res.writeHead(401, { 'Content-Type': 'application/json' });
|
|
187
|
+
res.end(JSON.stringify({ error: { message: 'No Grazie token found. Run: jbai token set', type: 'authentication_error' } }));
|
|
188
|
+
return;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
if (config.isTokenExpired(token)) {
|
|
192
|
+
res.writeHead(401, { 'Content-Type': 'application/json' });
|
|
193
|
+
res.end(JSON.stringify({ error: { message: 'Grazie token expired. Run: jbai token set', type: 'authentication_error' } }));
|
|
194
|
+
return;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
// Read request body
|
|
198
|
+
const chunks = [];
|
|
199
|
+
req.on('data', (chunk) => chunks.push(chunk));
|
|
200
|
+
req.on('end', () => {
|
|
201
|
+
const body = Buffer.concat(chunks);
|
|
202
|
+
const targetUrl = new URL(route.target + (query ? '?' + query : ''));
|
|
203
|
+
|
|
204
|
+
// Build forwarded headers - pass through everything except host/authorization
|
|
205
|
+
const fwdHeaders = {};
|
|
206
|
+
for (const [key, value] of Object.entries(req.headers)) {
|
|
207
|
+
const lower = key.toLowerCase();
|
|
208
|
+
// Skip hop-by-hop and host headers
|
|
209
|
+
if (['host', 'connection', 'keep-alive', 'transfer-encoding', 'te', 'trailer', 'upgrade'].includes(lower)) continue;
|
|
210
|
+
// Skip authorization (we inject our own)
|
|
211
|
+
if (lower === 'authorization') continue;
|
|
212
|
+
fwdHeaders[key] = value;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
// Inject Grazie auth
|
|
216
|
+
fwdHeaders['Grazie-Authenticate-JWT'] = token;
|
|
217
|
+
|
|
218
|
+
// Ensure content-length is correct for the body we have
|
|
219
|
+
if (body.length > 0) {
|
|
220
|
+
fwdHeaders['content-length'] = body.length;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
const proxyReq = https.request({
|
|
224
|
+
hostname: targetUrl.hostname,
|
|
225
|
+
port: 443,
|
|
226
|
+
path: targetUrl.pathname + targetUrl.search,
|
|
227
|
+
method: req.method,
|
|
228
|
+
headers: fwdHeaders,
|
|
229
|
+
}, (proxyRes) => {
|
|
230
|
+
// Forward status and headers
|
|
231
|
+
const resHeaders = { ...proxyRes.headers, 'Access-Control-Allow-Origin': '*' };
|
|
232
|
+
res.writeHead(proxyRes.statusCode, resHeaders);
|
|
233
|
+
|
|
234
|
+
// Stream response (supports SSE streaming)
|
|
235
|
+
proxyRes.pipe(res);
|
|
236
|
+
|
|
237
|
+
proxyRes.on('end', () => {
|
|
238
|
+
const elapsed = Date.now() - startTime;
|
|
239
|
+
log(`[${route.provider}] ${req.method} ${urlPath} → ${proxyRes.statusCode} (${elapsed}ms)`);
|
|
240
|
+
});
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
proxyReq.on('error', (err) => {
|
|
244
|
+
log(`[${route.provider}] ${req.method} ${urlPath} → ERROR: ${err.message}`);
|
|
245
|
+
if (!res.headersSent) {
|
|
246
|
+
res.writeHead(502, { 'Content-Type': 'application/json' });
|
|
247
|
+
res.end(JSON.stringify({ error: { message: `Proxy error: ${err.message}`, type: 'proxy_error' } }));
|
|
248
|
+
}
|
|
249
|
+
});
|
|
250
|
+
|
|
251
|
+
if (body.length > 0) {
|
|
252
|
+
proxyReq.write(body);
|
|
253
|
+
}
|
|
254
|
+
proxyReq.end();
|
|
255
|
+
});
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
// ---------------------------------------------------------------------------
|
|
259
|
+
// Logging
|
|
260
|
+
// ---------------------------------------------------------------------------
|
|
261
|
+
|
|
262
|
+
let logToFile = false;
|
|
263
|
+
|
|
264
|
+
function log(msg) {
|
|
265
|
+
const line = `${new Date().toISOString()} ${msg}`;
|
|
266
|
+
if (logToFile) {
|
|
267
|
+
fs.appendFileSync(LOG_FILE, line + '\n');
|
|
268
|
+
} else {
|
|
269
|
+
console.log(line);
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
// ---------------------------------------------------------------------------
|
|
274
|
+
// Daemon management
|
|
275
|
+
// ---------------------------------------------------------------------------
|
|
276
|
+
|
|
277
|
+
function writePid(port) {
|
|
278
|
+
config.ensureConfigDir();
|
|
279
|
+
fs.writeFileSync(PID_FILE, JSON.stringify({ pid: process.pid, port }), { mode: 0o600 });
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
function readPid() {
|
|
283
|
+
try {
|
|
284
|
+
return JSON.parse(fs.readFileSync(PID_FILE, 'utf-8'));
|
|
285
|
+
} catch {
|
|
286
|
+
return null;
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
function removePid() {
|
|
291
|
+
try { fs.unlinkSync(PID_FILE); } catch {}
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
function isRunning(pid) {
|
|
295
|
+
try {
|
|
296
|
+
process.kill(pid, 0);
|
|
297
|
+
return true;
|
|
298
|
+
} catch {
|
|
299
|
+
return false;
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
function stopDaemon() {
|
|
304
|
+
const info = readPid();
|
|
305
|
+
if (!info) {
|
|
306
|
+
console.log('No proxy daemon found');
|
|
307
|
+
return;
|
|
308
|
+
}
|
|
309
|
+
if (!isRunning(info.pid)) {
|
|
310
|
+
console.log('Proxy daemon not running (stale pid file)');
|
|
311
|
+
removePid();
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
try {
|
|
315
|
+
process.kill(info.pid, 'SIGTERM');
|
|
316
|
+
console.log(`Stopped proxy daemon (pid ${info.pid})`);
|
|
317
|
+
removePid();
|
|
318
|
+
} catch (e) {
|
|
319
|
+
console.log(`Failed to stop daemon: ${e.message}`);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
function showStatus() {
|
|
324
|
+
const info = readPid();
|
|
325
|
+
if (!info) {
|
|
326
|
+
console.log('Proxy: not running');
|
|
327
|
+
return;
|
|
328
|
+
}
|
|
329
|
+
if (!isRunning(info.pid)) {
|
|
330
|
+
console.log('Proxy: not running (stale pid file)');
|
|
331
|
+
removePid();
|
|
332
|
+
return;
|
|
333
|
+
}
|
|
334
|
+
console.log(`Proxy: running on port ${info.port} (pid ${info.pid})`);
|
|
335
|
+
console.log(` OpenAI: http://localhost:${info.port}/openai/v1`);
|
|
336
|
+
console.log(` Anthropic: http://localhost:${info.port}/anthropic/v1`);
|
|
337
|
+
console.log(` Auto: http://localhost:${info.port}/v1/...`);
|
|
338
|
+
console.log(` Logs: ${LOG_FILE}`);
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
function startDaemon(port) {
|
|
342
|
+
const { spawn } = require('child_process');
|
|
343
|
+
const child = spawn(process.execPath, [__filename, '--port', String(port), '--_daemon'], {
|
|
344
|
+
detached: true,
|
|
345
|
+
stdio: 'ignore',
|
|
346
|
+
env: { ...process.env }
|
|
347
|
+
});
|
|
348
|
+
child.unref();
|
|
349
|
+
// Give it a moment to start
|
|
350
|
+
setTimeout(() => {
|
|
351
|
+
const info = readPid();
|
|
352
|
+
if (info && isRunning(info.pid)) {
|
|
353
|
+
console.log(`Proxy daemon started on port ${port} (pid ${info.pid})`);
|
|
354
|
+
console.log('');
|
|
355
|
+
printUsage(port);
|
|
356
|
+
} else {
|
|
357
|
+
console.log('Failed to start daemon. Check logs: ' + LOG_FILE);
|
|
358
|
+
}
|
|
359
|
+
}, 500);
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
// ---------------------------------------------------------------------------
|
|
363
|
+
// macOS launchd service
|
|
364
|
+
// ---------------------------------------------------------------------------
|
|
365
|
+
|
|
366
|
+
const LAUNCHD_LABEL = 'com.jetbrains.jbai-proxy';
|
|
367
|
+
|
|
368
|
+
function getLaunchdPlistPath() {
|
|
369
|
+
return path.join(os.homedir(), 'Library', 'LaunchAgents', `${LAUNCHD_LABEL}.plist`);
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
function installService(port) {
|
|
373
|
+
if (process.platform !== 'darwin') {
|
|
374
|
+
console.log('Auto-start service is only supported on macOS.');
|
|
375
|
+
console.log('On Linux, create a systemd user service manually.');
|
|
376
|
+
return;
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
const nodePath = process.execPath;
|
|
380
|
+
const proxyPath = path.resolve(__filename);
|
|
381
|
+
const home = os.homedir();
|
|
382
|
+
const plistPath = getLaunchdPlistPath();
|
|
383
|
+
|
|
384
|
+
const plist = `<?xml version="1.0" encoding="UTF-8"?>
|
|
385
|
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
|
386
|
+
<plist version="1.0">
|
|
387
|
+
<dict>
|
|
388
|
+
<key>Label</key>
|
|
389
|
+
<string>${LAUNCHD_LABEL}</string>
|
|
390
|
+
<key>ProgramArguments</key>
|
|
391
|
+
<array>
|
|
392
|
+
<string>${nodePath}</string>
|
|
393
|
+
<string>${proxyPath}</string>
|
|
394
|
+
<string>--port</string>
|
|
395
|
+
<string>${port}</string>
|
|
396
|
+
<string>--_daemon</string>
|
|
397
|
+
</array>
|
|
398
|
+
<key>RunAtLoad</key>
|
|
399
|
+
<true/>
|
|
400
|
+
<key>KeepAlive</key>
|
|
401
|
+
<true/>
|
|
402
|
+
<key>StandardOutPath</key>
|
|
403
|
+
<string>${home}/.jbai/proxy.log</string>
|
|
404
|
+
<key>StandardErrorPath</key>
|
|
405
|
+
<string>${home}/.jbai/proxy.log</string>
|
|
406
|
+
</dict>
|
|
407
|
+
</plist>`;
|
|
408
|
+
|
|
409
|
+
const launchAgentsDir = path.join(home, 'Library', 'LaunchAgents');
|
|
410
|
+
if (!fs.existsSync(launchAgentsDir)) {
|
|
411
|
+
fs.mkdirSync(launchAgentsDir, { recursive: true });
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
fs.writeFileSync(plistPath, plist);
|
|
415
|
+
console.log(`Written: ${plistPath}`);
|
|
416
|
+
|
|
417
|
+
// Load the service
|
|
418
|
+
const { execSync } = require('child_process');
|
|
419
|
+
try {
|
|
420
|
+
// Unload first in case it's already loaded
|
|
421
|
+
try { execSync(`launchctl unload "${plistPath}" 2>/dev/null`); } catch {}
|
|
422
|
+
execSync(`launchctl load "${plistPath}"`);
|
|
423
|
+
console.log(`Service loaded and started on port ${port}`);
|
|
424
|
+
console.log('');
|
|
425
|
+
console.log('The proxy will now auto-start on login.');
|
|
426
|
+
console.log(` Logs: ${home}/.jbai/proxy.log`);
|
|
427
|
+
console.log(` Remove: jbai proxy uninstall-service`);
|
|
428
|
+
console.log('');
|
|
429
|
+
printUsage(port);
|
|
430
|
+
} catch (e) {
|
|
431
|
+
console.log(`Written plist but failed to load: ${e.message}`);
|
|
432
|
+
console.log(`Try: launchctl load "${plistPath}"`);
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
function uninstallService() {
|
|
437
|
+
if (process.platform !== 'darwin') {
|
|
438
|
+
console.log('Auto-start service is only supported on macOS.');
|
|
439
|
+
return;
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
const plistPath = getLaunchdPlistPath();
|
|
443
|
+
if (!fs.existsSync(plistPath)) {
|
|
444
|
+
console.log('No launchd service installed.');
|
|
445
|
+
return;
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
const { execSync } = require('child_process');
|
|
449
|
+
try {
|
|
450
|
+
execSync(`launchctl unload "${plistPath}"`);
|
|
451
|
+
} catch {}
|
|
452
|
+
fs.unlinkSync(plistPath);
|
|
453
|
+
console.log('Service uninstalled. Proxy will no longer auto-start.');
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
// ---------------------------------------------------------------------------
|
|
457
|
+
// One-liner setup: proxy + codex + shell env
|
|
458
|
+
// ---------------------------------------------------------------------------
|
|
459
|
+
|
|
460
|
+
function setup(port) {
|
|
461
|
+
console.log('Setting up jbai-proxy...\n');
|
|
462
|
+
|
|
463
|
+
// 1. Check token
|
|
464
|
+
const token = config.getToken();
|
|
465
|
+
if (!token) {
|
|
466
|
+
console.log('No token found. Let\'s set one up first.\n');
|
|
467
|
+
console.log(' 1. Go to https://platform.jetbrains.ai/ (or staging: https://platform.stgn.jetbrains.ai/)');
|
|
468
|
+
console.log(' 2. Click Profile → "Copy Developer Token"');
|
|
469
|
+
console.log(' 3. Run: jbai token set\n');
|
|
470
|
+
console.log('Then re-run: jbai proxy setup');
|
|
471
|
+
process.exit(1);
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
if (config.isTokenExpired(token)) {
|
|
475
|
+
console.log('Your token is expired. Run: jbai token set');
|
|
476
|
+
console.log('Then re-run: jbai proxy setup');
|
|
477
|
+
process.exit(1);
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
console.log(`Token: valid`);
|
|
481
|
+
console.log(`Environment: ${config.getEnvironment()}\n`);
|
|
482
|
+
|
|
483
|
+
let steps = 0;
|
|
484
|
+
|
|
485
|
+
// 2. Configure Codex Desktop (config.toml)
|
|
486
|
+
steps += configureCodexDesktop(port);
|
|
487
|
+
|
|
488
|
+
// 3. Add JBAI_PROXY_KEY to shell rc
|
|
489
|
+
steps += configureShellEnv();
|
|
490
|
+
|
|
491
|
+
// 4. Install launchd service (macOS) or start daemon
|
|
492
|
+
steps += configureAutoStart(port);
|
|
493
|
+
|
|
494
|
+
// 5. Verify proxy is running
|
|
495
|
+
console.log('');
|
|
496
|
+
verifyProxy(port);
|
|
497
|
+
|
|
498
|
+
console.log(`\n--- Setup complete (${steps} changes) ---\n`);
|
|
499
|
+
printUsage(port);
|
|
500
|
+
|
|
501
|
+
console.log(`Cursor (manual step):
|
|
502
|
+
Open Cursor → Settings → Models → enable "Override OpenAI Base URL"
|
|
503
|
+
Base URL: http://localhost:${port}/openai/v1
|
|
504
|
+
API Key: placeholder
|
|
505
|
+
`);
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
function configureCodexDesktop(port) {
|
|
509
|
+
const codexDir = path.join(os.homedir(), '.codex');
|
|
510
|
+
const codexConfig = path.join(codexDir, 'config.toml');
|
|
511
|
+
let changed = 0;
|
|
512
|
+
|
|
513
|
+
if (!fs.existsSync(codexDir)) {
|
|
514
|
+
fs.mkdirSync(codexDir, { recursive: true });
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
let content = '';
|
|
518
|
+
if (fs.existsSync(codexConfig)) {
|
|
519
|
+
content = fs.readFileSync(codexConfig, 'utf-8');
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
// Add jbai-proxy provider if missing
|
|
523
|
+
if (!content.includes('[model_providers.jbai-proxy]')) {
|
|
524
|
+
const providerBlock = `
|
|
525
|
+
# JetBrains AI via local proxy (for Codex Desktop)
|
|
526
|
+
[model_providers.jbai-proxy]
|
|
527
|
+
name = "JetBrains AI (Proxy)"
|
|
528
|
+
base_url = "http://localhost:${port}/openai/v1"
|
|
529
|
+
env_key = "JBAI_PROXY_KEY"
|
|
530
|
+
wire_api = "responses"
|
|
531
|
+
`;
|
|
532
|
+
fs.appendFileSync(codexConfig, providerBlock);
|
|
533
|
+
console.log('Codex Desktop: added jbai-proxy provider to ~/.codex/config.toml');
|
|
534
|
+
changed++;
|
|
535
|
+
} else {
|
|
536
|
+
console.log('Codex Desktop: jbai-proxy provider already configured');
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
// Set model_provider = "jbai-proxy" if not already set
|
|
540
|
+
if (content.includes('model_provider')) {
|
|
541
|
+
if (!content.includes('model_provider = "jbai-proxy"')) {
|
|
542
|
+
// Replace existing model_provider line
|
|
543
|
+
const updated = content.replace(/^model_provider\s*=\s*"[^"]*"/m, 'model_provider = "jbai-proxy"');
|
|
544
|
+
fs.writeFileSync(codexConfig, updated);
|
|
545
|
+
console.log('Codex Desktop: switched model_provider to jbai-proxy');
|
|
546
|
+
changed++;
|
|
547
|
+
} else {
|
|
548
|
+
console.log('Codex Desktop: already using jbai-proxy provider');
|
|
549
|
+
}
|
|
550
|
+
} else {
|
|
551
|
+
// Add model_provider after model = line, or at top
|
|
552
|
+
if (content.includes('model =')) {
|
|
553
|
+
const updated = content.replace(/^(model\s*=\s*"[^"]*")/m, '$1\nmodel_provider = "jbai-proxy"');
|
|
554
|
+
fs.writeFileSync(codexConfig, updated);
|
|
555
|
+
} else {
|
|
556
|
+
const updated = 'model_provider = "jbai-proxy"\n' + content;
|
|
557
|
+
fs.writeFileSync(codexConfig, updated);
|
|
558
|
+
}
|
|
559
|
+
console.log('Codex Desktop: set model_provider = "jbai-proxy"');
|
|
560
|
+
changed++;
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
return changed;
|
|
564
|
+
}
|
|
565
|
+
|
|
566
|
+
function configureShellEnv() {
|
|
567
|
+
const home = os.homedir();
|
|
568
|
+
// Detect shell rc file
|
|
569
|
+
const shell = process.env.SHELL || '/bin/zsh';
|
|
570
|
+
let rcFile;
|
|
571
|
+
if (shell.includes('zsh')) {
|
|
572
|
+
rcFile = path.join(home, '.zshrc');
|
|
573
|
+
} else if (shell.includes('bash')) {
|
|
574
|
+
// macOS uses .bash_profile, Linux uses .bashrc
|
|
575
|
+
const profile = path.join(home, '.bash_profile');
|
|
576
|
+
const bashrc = path.join(home, '.bashrc');
|
|
577
|
+
rcFile = fs.existsSync(profile) ? profile : bashrc;
|
|
578
|
+
} else {
|
|
579
|
+
rcFile = path.join(home, '.profile');
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
if (!fs.existsSync(rcFile)) {
|
|
583
|
+
fs.writeFileSync(rcFile, '');
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
const rcContent = fs.readFileSync(rcFile, 'utf-8');
|
|
587
|
+
if (rcContent.includes('JBAI_PROXY_KEY')) {
|
|
588
|
+
console.log(`Shell: JBAI_PROXY_KEY already in ${path.basename(rcFile)}`);
|
|
589
|
+
return 0;
|
|
590
|
+
}
|
|
591
|
+
|
|
592
|
+
const envLine = '\n# jbai-proxy: placeholder key for Codex Desktop / Cursor\nexport JBAI_PROXY_KEY="placeholder"\n';
|
|
593
|
+
fs.appendFileSync(rcFile, envLine);
|
|
594
|
+
console.log(`Shell: added JBAI_PROXY_KEY to ~/${path.basename(rcFile)}`);
|
|
595
|
+
|
|
596
|
+
// Also set it in current process for immediate use
|
|
597
|
+
process.env.JBAI_PROXY_KEY = 'placeholder';
|
|
598
|
+
return 1;
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
function configureAutoStart(port) {
|
|
602
|
+
if (process.platform === 'darwin') {
|
|
603
|
+
const plistPath = getLaunchdPlistPath();
|
|
604
|
+
if (fs.existsSync(plistPath)) {
|
|
605
|
+
// Check if proxy is already running
|
|
606
|
+
const info = readPid();
|
|
607
|
+
if (info && isRunning(info.pid)) {
|
|
608
|
+
console.log(`Proxy: already running on port ${info.port} (pid ${info.pid})`);
|
|
609
|
+
return 0;
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
// Install service (starts proxy + auto-start on login)
|
|
613
|
+
installService(port);
|
|
614
|
+
return 1;
|
|
615
|
+
} else {
|
|
616
|
+
// Non-macOS: just start daemon
|
|
617
|
+
startDaemon(port);
|
|
618
|
+
return 1;
|
|
619
|
+
}
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
function verifyProxy(port) {
|
|
623
|
+
const http = require('http');
|
|
624
|
+
const req = http.get(`http://127.0.0.1:${port}/health`, { timeout: 2000 }, (res) => {
|
|
625
|
+
let body = '';
|
|
626
|
+
res.on('data', chunk => body += chunk);
|
|
627
|
+
res.on('end', () => {
|
|
628
|
+
try {
|
|
629
|
+
const info = JSON.parse(body);
|
|
630
|
+
if (info.status === 'ok' && info.tokenPresent) {
|
|
631
|
+
console.log(`Proxy: running on port ${port}`);
|
|
632
|
+
} else {
|
|
633
|
+
console.log(`Proxy: running but token issue — ${JSON.stringify(info)}`);
|
|
634
|
+
}
|
|
635
|
+
} catch {
|
|
636
|
+
console.log(`Proxy: running on port ${port} (health check returned non-JSON)`);
|
|
637
|
+
}
|
|
638
|
+
});
|
|
639
|
+
});
|
|
640
|
+
req.on('error', () => {
|
|
641
|
+
console.log('Proxy: not responding yet (may need a moment to start)');
|
|
642
|
+
});
|
|
643
|
+
}
|
|
644
|
+
|
|
645
|
+
// ---------------------------------------------------------------------------
|
|
646
|
+
// Usage instructions
|
|
647
|
+
// ---------------------------------------------------------------------------
|
|
648
|
+
|
|
649
|
+
function printUsage(port) {
|
|
650
|
+
console.log(`Configure your tools to use these base URLs:
|
|
651
|
+
|
|
652
|
+
OpenAI tools (Codex Desktop, Cursor, etc.):
|
|
653
|
+
OPENAI_BASE_URL=http://localhost:${port}/openai/v1
|
|
654
|
+
OPENAI_API_KEY=placeholder
|
|
655
|
+
|
|
656
|
+
Anthropic tools (Claude Desktop, etc.):
|
|
657
|
+
ANTHROPIC_BASE_URL=http://localhost:${port}/anthropic
|
|
658
|
+
ANTHROPIC_API_KEY=placeholder
|
|
659
|
+
|
|
660
|
+
Auto-detect mode (works for most tools):
|
|
661
|
+
Base URL: http://localhost:${port}/v1
|
|
662
|
+
API Key: placeholder
|
|
663
|
+
|
|
664
|
+
The API key can be any non-empty string — auth is handled by your Grazie JWT.
|
|
665
|
+
`);
|
|
666
|
+
}
|
|
667
|
+
|
|
668
|
+
// ---------------------------------------------------------------------------
|
|
669
|
+
// CLI
|
|
670
|
+
// ---------------------------------------------------------------------------
|
|
671
|
+
|
|
672
|
+
function parseProxyArgs(argv) {
|
|
673
|
+
const opts = { port: DEFAULT_PORT, daemon: false, _daemon: false };
|
|
674
|
+
for (let i = 0; i < argv.length; i++) {
|
|
675
|
+
const arg = argv[i];
|
|
676
|
+
if (arg === '--port' || arg === '-p') {
|
|
677
|
+
opts.port = parseInt(argv[++i], 10) || DEFAULT_PORT;
|
|
678
|
+
} else if (arg === '--daemon' || arg === '-d') {
|
|
679
|
+
opts.daemon = true;
|
|
680
|
+
} else if (arg === '--_daemon') {
|
|
681
|
+
opts._daemon = true;
|
|
682
|
+
} else if (arg === 'stop') {
|
|
683
|
+
opts.stop = true;
|
|
684
|
+
} else if (arg === 'status') {
|
|
685
|
+
opts.status = true;
|
|
686
|
+
} else if (arg === 'setup') {
|
|
687
|
+
opts.setup = true;
|
|
688
|
+
} else if (arg === 'install-service') {
|
|
689
|
+
opts.installService = true;
|
|
690
|
+
} else if (arg === 'uninstall-service') {
|
|
691
|
+
opts.uninstallService = true;
|
|
692
|
+
} else if (arg === 'help' || arg === '--help' || arg === '-h') {
|
|
693
|
+
opts.help = true;
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
return opts;
|
|
697
|
+
}
|
|
698
|
+
|
|
699
|
+
const PROXY_HELP = `
|
|
700
|
+
jbai proxy - Local reverse proxy for JetBrains AI Platform
|
|
701
|
+
|
|
702
|
+
USAGE:
|
|
703
|
+
jbai proxy setup One-liner: configure everything + start proxy
|
|
704
|
+
jbai proxy Start proxy (foreground, port ${DEFAULT_PORT})
|
|
705
|
+
jbai proxy --port 9090 Start on custom port
|
|
706
|
+
jbai proxy --daemon Start in background
|
|
707
|
+
jbai proxy stop Stop background daemon
|
|
708
|
+
jbai proxy status Check proxy status
|
|
709
|
+
jbai proxy install-service Auto-start on login (macOS launchd)
|
|
710
|
+
jbai proxy uninstall-service Remove auto-start
|
|
711
|
+
|
|
712
|
+
ROUTES:
|
|
713
|
+
/openai/v1/* → Grazie OpenAI endpoint
|
|
714
|
+
/anthropic/v1/* → Grazie Anthropic endpoint
|
|
715
|
+
/google/v1/* → Grazie Google endpoint
|
|
716
|
+
/v1/chat/completions → OpenAI (auto)
|
|
717
|
+
/v1/responses → OpenAI (auto)
|
|
718
|
+
/v1/messages → Anthropic (auto)
|
|
719
|
+
/v1/models → Synthetic model list
|
|
720
|
+
/health → Proxy status
|
|
721
|
+
|
|
722
|
+
TOOL SETUP:
|
|
723
|
+
Codex Desktop: OPENAI_BASE_URL=http://localhost:${DEFAULT_PORT}/openai/v1
|
|
724
|
+
Cursor: Set base URL to http://localhost:${DEFAULT_PORT}/openai/v1
|
|
725
|
+
Claude tools: ANTHROPIC_BASE_URL=http://localhost:${DEFAULT_PORT}/anthropic
|
|
726
|
+
API Key: Use any non-empty string (e.g. "placeholder")
|
|
727
|
+
`;
|
|
728
|
+
|
|
729
|
+
// ---------------------------------------------------------------------------
|
|
730
|
+
// Main
|
|
731
|
+
// ---------------------------------------------------------------------------
|
|
732
|
+
|
|
733
|
+
function main() {
|
|
734
|
+
// When invoked directly (not via jbai.js), parse from argv[2]
|
|
735
|
+
// When invoked via jbai.js, args are passed from the caller
|
|
736
|
+
const args = process.argv.slice(2);
|
|
737
|
+
const opts = parseProxyArgs(args);
|
|
738
|
+
|
|
739
|
+
if (opts.help) {
|
|
740
|
+
console.log(PROXY_HELP);
|
|
741
|
+
return;
|
|
742
|
+
}
|
|
743
|
+
|
|
744
|
+
if (opts.stop) {
|
|
745
|
+
stopDaemon();
|
|
746
|
+
return;
|
|
747
|
+
}
|
|
748
|
+
|
|
749
|
+
if (opts.status) {
|
|
750
|
+
showStatus();
|
|
751
|
+
return;
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
if (opts.setup) {
|
|
755
|
+
setup(opts.port);
|
|
756
|
+
return;
|
|
757
|
+
}
|
|
758
|
+
|
|
759
|
+
if (opts.installService) {
|
|
760
|
+
installService(opts.port);
|
|
761
|
+
return;
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
if (opts.uninstallService) {
|
|
765
|
+
uninstallService();
|
|
766
|
+
return;
|
|
767
|
+
}
|
|
768
|
+
|
|
769
|
+
if (opts.daemon && !opts._daemon) {
|
|
770
|
+
startDaemon(opts.port);
|
|
771
|
+
return;
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
// Actual server startup
|
|
775
|
+
if (opts._daemon) {
|
|
776
|
+
logToFile = true;
|
|
777
|
+
}
|
|
778
|
+
|
|
779
|
+
const server = http.createServer(proxy);
|
|
780
|
+
|
|
781
|
+
server.listen(opts.port, '127.0.0.1', () => {
|
|
782
|
+
writePid(opts.port);
|
|
783
|
+
log(`jbai-proxy listening on http://127.0.0.1:${opts.port}`);
|
|
784
|
+
log(`Environment: ${config.getEnvironment()}`);
|
|
785
|
+
log(`Token file: ${config.TOKEN_FILE}`);
|
|
786
|
+
|
|
787
|
+
if (!opts._daemon) {
|
|
788
|
+
console.log(`\njbai-proxy running on http://localhost:${opts.port}\n`);
|
|
789
|
+
printUsage(opts.port);
|
|
790
|
+
console.log('Press Ctrl+C to stop.\n');
|
|
791
|
+
}
|
|
792
|
+
});
|
|
793
|
+
|
|
794
|
+
// Graceful shutdown
|
|
795
|
+
const shutdown = () => {
|
|
796
|
+
log('Shutting down...');
|
|
797
|
+
removePid();
|
|
798
|
+
server.close(() => process.exit(0));
|
|
799
|
+
setTimeout(() => process.exit(0), 2000);
|
|
800
|
+
};
|
|
801
|
+
process.on('SIGTERM', shutdown);
|
|
802
|
+
process.on('SIGINT', shutdown);
|
|
803
|
+
}
|
|
804
|
+
|
|
805
|
+
// Allow both direct execution and require() from jbai.js
|
|
806
|
+
if (require.main === module) {
|
|
807
|
+
main();
|
|
808
|
+
} else {
|
|
809
|
+
module.exports = { main, parseProxyArgs, PROXY_HELP, stopDaemon, showStatus, startDaemon, DEFAULT_PORT };
|
|
810
|
+
}
|
package/bin/jbai.js
CHANGED
|
@@ -51,6 +51,12 @@ COMMANDS:
|
|
|
51
51
|
jbai doctor Check which tools are installed
|
|
52
52
|
jbai help Show this help
|
|
53
53
|
|
|
54
|
+
PROXY (for Codex Desktop, Cursor, etc.):
|
|
55
|
+
jbai proxy setup One-liner setup: proxy + Codex + shell env
|
|
56
|
+
jbai proxy --daemon Start proxy in background
|
|
57
|
+
jbai proxy stop Stop background proxy
|
|
58
|
+
jbai proxy status Check proxy status
|
|
59
|
+
|
|
54
60
|
TOOL WRAPPERS:
|
|
55
61
|
jbai-claude Launch Claude Code with JetBrains AI
|
|
56
62
|
jbai-codex Launch Codex CLI with JetBrains AI
|
|
@@ -586,6 +592,13 @@ switch (command) {
|
|
|
586
592
|
case 'status':
|
|
587
593
|
doctor();
|
|
588
594
|
break;
|
|
595
|
+
case 'proxy': {
|
|
596
|
+
const proxyMod = require('./jbai-proxy');
|
|
597
|
+
// Re-set argv so jbai-proxy sees the right args
|
|
598
|
+
process.argv = [process.argv[0], process.argv[1], ...args];
|
|
599
|
+
proxyMod.main();
|
|
600
|
+
break;
|
|
601
|
+
}
|
|
589
602
|
case 'help':
|
|
590
603
|
case '--help':
|
|
591
604
|
case '-h':
|
package/lib/config.js
CHANGED
|
@@ -77,21 +77,10 @@ const MODELS = {
|
|
|
77
77
|
// Codex CLI uses OpenAI models via the "responses" API (wire_api = "responses")
|
|
78
78
|
// Uses the same models as openai, just different API wire format
|
|
79
79
|
codex: {
|
|
80
|
-
default: '
|
|
80
|
+
default: 'gpt-5.2-2025-12-11',
|
|
81
81
|
available: [
|
|
82
|
-
// O-series models (best for coding tasks)
|
|
83
|
-
'o3-2025-04-16',
|
|
84
|
-
'o3-mini-2025-01-31',
|
|
85
|
-
'o4-mini-2025-04-16',
|
|
86
|
-
// GPT-5.x series
|
|
87
82
|
'gpt-5.2-2025-12-11',
|
|
88
|
-
'gpt-5.2'
|
|
89
|
-
'gpt-5.1-2025-11-13',
|
|
90
|
-
'gpt-5-2025-08-07',
|
|
91
|
-
// GPT-4.x series
|
|
92
|
-
'gpt-4.1-2025-04-14',
|
|
93
|
-
'gpt-4o-2024-11-20',
|
|
94
|
-
'gpt-4-turbo-2024-04-09'
|
|
83
|
+
'gpt-5.2'
|
|
95
84
|
]
|
|
96
85
|
},
|
|
97
86
|
gemini: {
|
|
@@ -54,13 +54,33 @@ function runWithHandoff({
|
|
|
54
54
|
|
|
55
55
|
process.stderr.write(`ℹ️ Handoff trigger: ${label}\n`);
|
|
56
56
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
57
|
+
let ptyProcess;
|
|
58
|
+
try {
|
|
59
|
+
ptyProcess = pty.spawn(command, args, {
|
|
60
|
+
name: 'xterm-256color',
|
|
61
|
+
cols: process.stdout.columns || 80,
|
|
62
|
+
rows: process.stdout.rows || 24,
|
|
63
|
+
cwd: process.cwd(),
|
|
64
|
+
env,
|
|
65
|
+
});
|
|
66
|
+
} catch (err) {
|
|
67
|
+
// node-pty throws synchronous errors (e.g., posix_spawnp failed) when executable not found
|
|
68
|
+
const error = new Error(err.message || 'Failed to spawn process');
|
|
69
|
+
// Check if this is likely a "command not found" error
|
|
70
|
+
const isNotFound = err.message && (
|
|
71
|
+
err.message.includes('posix_spawnp failed') ||
|
|
72
|
+
err.message.includes('ENOENT') ||
|
|
73
|
+
err.message.includes('not found')
|
|
74
|
+
);
|
|
75
|
+
error.code = isNotFound ? 'ENOENT' : 'SPAWN_ERROR';
|
|
76
|
+
error.command = command;
|
|
77
|
+
error.originalError = err;
|
|
78
|
+
// Return a minimal event emitter that immediately emits the error
|
|
79
|
+
const { EventEmitter } = require('events');
|
|
80
|
+
const fakeChild = new EventEmitter();
|
|
81
|
+
setImmediate(() => fakeChild.emit('error', error));
|
|
82
|
+
return fakeChild;
|
|
83
|
+
}
|
|
64
84
|
|
|
65
85
|
let lineBuffer = '';
|
|
66
86
|
let lastPrompt = '';
|
package/lib/postinstall.js
CHANGED
|
@@ -1,7 +1,30 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
3
5
|
const config = require('./config');
|
|
4
6
|
|
|
7
|
+
// Fix node-pty spawn-helper permissions (macOS/Linux)
|
|
8
|
+
// The prebuilt binary sometimes loses execute permissions during npm install
|
|
9
|
+
try {
|
|
10
|
+
const platform = process.platform === 'darwin' ? 'darwin' : process.platform;
|
|
11
|
+
const arch = process.arch;
|
|
12
|
+
const spawnHelperPath = path.join(
|
|
13
|
+
__dirname,
|
|
14
|
+
'..',
|
|
15
|
+
'node_modules',
|
|
16
|
+
'node-pty',
|
|
17
|
+
'prebuilds',
|
|
18
|
+
`${platform}-${arch}`,
|
|
19
|
+
'spawn-helper'
|
|
20
|
+
);
|
|
21
|
+
if (fs.existsSync(spawnHelperPath)) {
|
|
22
|
+
fs.chmodSync(spawnHelperPath, 0o755);
|
|
23
|
+
}
|
|
24
|
+
} catch {
|
|
25
|
+
// Ignore errors - this is a best-effort fix
|
|
26
|
+
}
|
|
27
|
+
|
|
5
28
|
console.log(`
|
|
6
29
|
╔══════════════════════════════════════════════════════════════╗
|
|
7
30
|
║ jbai-cli installed! ║
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "jbai-cli",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.7.0",
|
|
4
4
|
"description": "CLI wrappers to use AI coding tools (Claude Code, Codex, Gemini CLI, OpenCode) with JetBrains AI Platform",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"jetbrains",
|
|
@@ -26,6 +26,7 @@
|
|
|
26
26
|
"homepage": "https://github.com/JetBrains/jbai-cli#readme",
|
|
27
27
|
"bin": {
|
|
28
28
|
"jbai": "bin/jbai.js",
|
|
29
|
+
"jbai-proxy": "bin/jbai-proxy.js",
|
|
29
30
|
"jbai-claude": "bin/jbai-claude.js",
|
|
30
31
|
"jbai-codex": "bin/jbai-codex.js",
|
|
31
32
|
"jbai-gemini": "bin/jbai-gemini.js",
|