jbai-cli 1.9.1 → 1.9.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -2
- package/bin/jbai-proxy.js +23 -2
- package/lib/config.js +10 -2
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -236,7 +236,7 @@ Each tool has a sensible default, but you can specify any available model:
|
|
|
236
236
|
jbai-claude --model claude-opus-4-6
|
|
237
237
|
|
|
238
238
|
# Codex with GPT-5.3
|
|
239
|
-
jbai-codex --model gpt-5.3-codex
|
|
239
|
+
jbai-codex --model gpt-5.3-codex
|
|
240
240
|
|
|
241
241
|
# Goose with GPT-5.2
|
|
242
242
|
jbai-goose run -t "your task" --provider openai --model gpt-5.2-2025-12-11
|
|
@@ -275,7 +275,8 @@ jbai-continue # select model in TUI
|
|
|
275
275
|
**Codex (OpenAI Responses)** - Use with Codex CLI: `jbai-codex --model <model>`
|
|
276
276
|
| Model | Notes |
|
|
277
277
|
|-------|-------|
|
|
278
|
-
| `gpt-5.3-codex
|
|
278
|
+
| `gpt-5.3-codex` | Default, latest |
|
|
279
|
+
| `gpt-5.3-codex-api-preview` | Alias |
|
|
279
280
|
| `gpt-5.2-codex` | Coding-optimized |
|
|
280
281
|
| `gpt-5.2-pro-2025-12-11` | |
|
|
281
282
|
| `gpt-5.1-codex` | |
|
package/bin/jbai-proxy.js
CHANGED
|
@@ -68,6 +68,10 @@ function resolveRoute(method, urlPath) {
|
|
|
68
68
|
|
|
69
69
|
// Explicit provider prefix routes
|
|
70
70
|
if (urlPath.startsWith('/openai/')) {
|
|
71
|
+
// Intercept /openai/v1/models → return synthetic list (Grazie doesn't list codex models)
|
|
72
|
+
if (urlPath === '/openai/v1/models') {
|
|
73
|
+
return { target: null, provider: 'models' };
|
|
74
|
+
}
|
|
71
75
|
const rest = urlPath.slice('/openai'.length); // keeps /v1/...
|
|
72
76
|
return { target: endpoints.openai.replace(/\/v1$/, '') + rest, provider: 'openai' };
|
|
73
77
|
}
|
|
@@ -132,7 +136,8 @@ function buildModelsResponse() {
|
|
|
132
136
|
// Codex CLI model picker response (matches chatgpt.com/backend-api/codex/models format)
|
|
133
137
|
function buildCodexModelsResponse() {
|
|
134
138
|
const descriptions = {
|
|
135
|
-
'gpt-5.3-codex
|
|
139
|
+
'gpt-5.3-codex': 'Latest GPT-5.3 Codex model. Designed for long-running, detailed software engineering tasks.',
|
|
140
|
+
'gpt-5.3-codex-api-preview': 'GPT-5.3 Codex (api-preview alias).',
|
|
136
141
|
'gpt-5.2-codex': 'Latest frontier agentic coding model.',
|
|
137
142
|
'gpt-5.2-pro-2025-12-11': 'GPT-5.2 Pro for deep reasoning and complex tasks.',
|
|
138
143
|
'gpt-5.2-2025-12-11': 'Latest frontier model with improvements across knowledge, reasoning and coding.',
|
|
@@ -245,7 +250,23 @@ function proxy(req, res) {
|
|
|
245
250
|
const chunks = [];
|
|
246
251
|
req.on('data', (chunk) => chunks.push(chunk));
|
|
247
252
|
req.on('end', () => {
|
|
248
|
-
|
|
253
|
+
let body = Buffer.concat(chunks);
|
|
254
|
+
|
|
255
|
+
// Rewrite model aliases so Grazie accepts the request
|
|
256
|
+
if (body.length > 0 && (req.headers['content-type'] || '').includes('application/json')) {
|
|
257
|
+
try {
|
|
258
|
+
const parsed = JSON.parse(body.toString('utf-8'));
|
|
259
|
+
if (parsed.model && config.MODEL_ALIASES[parsed.model]) {
|
|
260
|
+
const original = parsed.model;
|
|
261
|
+
parsed.model = config.MODEL_ALIASES[parsed.model];
|
|
262
|
+
body = Buffer.from(JSON.stringify(parsed), 'utf-8');
|
|
263
|
+
log(`[alias] Rewrote model "${original}" → "${parsed.model}"`);
|
|
264
|
+
}
|
|
265
|
+
} catch {
|
|
266
|
+
// Not valid JSON or parse error — forward as-is
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
|
|
249
270
|
const targetUrl = new URL(route.target + (query ? '?' + query : ''));
|
|
250
271
|
|
|
251
272
|
// Build forwarded headers - pass through everything except host/authorization
|
package/lib/config.js
CHANGED
|
@@ -53,7 +53,7 @@ const MODELS = {
|
|
|
53
53
|
default: 'gpt-5.2-2025-12-11',
|
|
54
54
|
available: [
|
|
55
55
|
// GPT-5.x series (latest) - require date-versioned names
|
|
56
|
-
// NOTE: gpt-5.3-codex-api-preview
|
|
56
|
+
// NOTE: gpt-5.3-codex / gpt-5.3-codex-api-preview are Responses-API-only → use via jbai codex
|
|
57
57
|
'gpt-5.2-2025-12-11',
|
|
58
58
|
'gpt-5.2',
|
|
59
59
|
'gpt-5.1-2025-11-13',
|
|
@@ -81,9 +81,10 @@ const MODELS = {
|
|
|
81
81
|
// Codex CLI uses OpenAI models via the "responses" API (wire_api = "responses")
|
|
82
82
|
// Includes chat-capable models PLUS codex-only models (responses API only)
|
|
83
83
|
codex: {
|
|
84
|
-
default: 'gpt-5.3-codex
|
|
84
|
+
default: 'gpt-5.3-codex',
|
|
85
85
|
available: [
|
|
86
86
|
// Codex-specific models (responses API only, NOT available via chat/completions)
|
|
87
|
+
'gpt-5.3-codex',
|
|
87
88
|
'gpt-5.3-codex-api-preview',
|
|
88
89
|
// GPT-5.x chat models (also work via responses API)
|
|
89
90
|
'gpt-5.2-2025-12-11',
|
|
@@ -121,6 +122,12 @@ const MODELS = {
|
|
|
121
122
|
// They are not supported by CLI tools that use OpenAI API format.
|
|
122
123
|
};
|
|
123
124
|
|
|
125
|
+
// Model aliases: new Codex CLI sends short names that Grazie doesn't recognise yet.
|
|
126
|
+
// Map them to the Grazie-accepted equivalents so the proxy can rewrite on the fly.
|
|
127
|
+
const MODEL_ALIASES = {
|
|
128
|
+
'gpt-5.3-codex': 'gpt-5.3-codex-api-preview',
|
|
129
|
+
};
|
|
130
|
+
|
|
124
131
|
// All models for tools that support multiple providers (OpenCode, Codex)
|
|
125
132
|
const ALL_MODELS = {
|
|
126
133
|
openai: MODELS.openai.available,
|
|
@@ -315,6 +322,7 @@ module.exports = {
|
|
|
315
322
|
CONFIG_FILE,
|
|
316
323
|
ENDPOINTS,
|
|
317
324
|
MODELS,
|
|
325
|
+
MODEL_ALIASES,
|
|
318
326
|
ALL_MODELS,
|
|
319
327
|
TOOLS,
|
|
320
328
|
ensureConfigDir,
|
package/package.json
CHANGED