free-coding-models 0.1.83 → 0.1.84
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -17
- package/bin/free-coding-models.js +297 -4754
- package/package.json +2 -2
- package/src/analysis.js +197 -0
- package/src/constants.js +116 -0
- package/src/favorites.js +98 -0
- package/src/key-handler.js +1005 -0
- package/src/openclaw.js +131 -0
- package/src/opencode.js +952 -0
- package/src/overlays.js +840 -0
- package/src/ping.js +186 -0
- package/src/provider-metadata.js +218 -0
- package/src/quota-capabilities.js +112 -0
- package/src/render-helpers.js +239 -0
- package/src/render-table.js +567 -0
- package/src/setup.js +105 -0
- package/src/telemetry.js +382 -0
- package/src/tier-colors.js +37 -0
- package/{lib → src}/token-stats.js +71 -3
- package/src/token-usage-reader.js +63 -0
- package/src/updater.js +237 -0
- package/{lib → src}/usage-reader.js +63 -21
- package/lib/quota-capabilities.js +0 -79
- /package/{lib → src}/account-manager.js +0 -0
- /package/{lib → src}/config.js +0 -0
- /package/{lib → src}/error-classifier.js +0 -0
- /package/{lib → src}/log-reader.js +0 -0
- /package/{lib → src}/model-merger.js +0 -0
- /package/{lib → src}/opencode-sync.js +0 -0
- /package/{lib → src}/provider-quota-fetchers.js +0 -0
- /package/{lib → src}/proxy-server.js +0 -0
- /package/{lib → src}/request-transformer.js +0 -0
- /package/{lib → src}/utils.js +0 -0
package/src/opencode.js
ADDED
|
@@ -0,0 +1,952 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @file opencode.js
|
|
3
|
+
* @description OpenCode integration and multi-account proxy lifecycle.
|
|
4
|
+
*
|
|
5
|
+
* @details
|
|
6
|
+
* This module owns all OpenCode-related behavior:
|
|
7
|
+
* - Configure opencode.json with selected models/providers
|
|
8
|
+
* - Launch OpenCode CLI or Desktop
|
|
9
|
+
* - Manage ZAI proxy bridge for non-standard API paths
|
|
10
|
+
* - Start/stop the multi-account proxy server (fcm-proxy)
|
|
11
|
+
* - Auto-start proxy when OpenCode config is already synced
|
|
12
|
+
*
|
|
13
|
+
* 🎯 Key features:
|
|
14
|
+
* - Provider-aware config setup for OpenCode (NIM, Groq, Cerebras, etc.)
|
|
15
|
+
* - ZAI proxy bridge to rewrite /v1/* → /api/coding/paas/v4/*
|
|
16
|
+
* - Auto-pick tmux port for OpenCode sub-agents
|
|
17
|
+
* - Multi-account proxy with rotation + auto-sync to opencode.json
|
|
18
|
+
*
|
|
19
|
+
* → Functions:
|
|
20
|
+
* - `setOpenCodeModelData` — Provide merged model lists for proxy topology
|
|
21
|
+
* - `startOpenCode` — Launch OpenCode CLI with selected model
|
|
22
|
+
* - `startOpenCodeDesktop` — Set model and open Desktop app
|
|
23
|
+
* - `startProxyAndLaunch` — Start fcm-proxy then launch OpenCode
|
|
24
|
+
* - `autoStartProxyIfSynced` — Auto-start proxy if opencode.json has fcm-proxy
|
|
25
|
+
* - `ensureProxyRunning` — Ensure proxy is running (start or reuse)
|
|
26
|
+
*
|
|
27
|
+
* @see src/opencode-sync.js — syncToOpenCode/load/save utilities
|
|
28
|
+
* @see src/proxy-server.js — ProxyServer implementation
|
|
29
|
+
*/
|
|
30
|
+
|
|
31
|
+
import chalk from 'chalk'
|
|
32
|
+
import { createServer } from 'net'
|
|
33
|
+
import { createServer as createHttpServer } from 'http'
|
|
34
|
+
import { request as httpsRequest } from 'https'
|
|
35
|
+
import { randomUUID } from 'crypto'
|
|
36
|
+
import { homedir } from 'os'
|
|
37
|
+
import { join } from 'path'
|
|
38
|
+
import { copyFileSync, existsSync } from 'fs'
|
|
39
|
+
import { sources } from '../sources.js'
|
|
40
|
+
import { resolveCloudflareUrl } from './ping.js'
|
|
41
|
+
import { ProxyServer } from './proxy-server.js'
|
|
42
|
+
import { loadOpenCodeConfig, saveOpenCodeConfig, syncToOpenCode } from './opencode-sync.js'
|
|
43
|
+
import { getApiKey, resolveApiKeys } from './config.js'
|
|
44
|
+
import { ENV_VAR_NAMES, OPENCODE_MODEL_MAP, isWindows, isMac, isLinux } from './provider-metadata.js'
|
|
45
|
+
import { setActiveProxy } from './render-table.js'
|
|
46
|
+
|
|
47
|
+
// 📖 OpenCode config location: ~/.config/opencode/opencode.json on ALL platforms.
|
|
48
|
+
// 📖 OpenCode uses xdg-basedir which resolves to %USERPROFILE%\.config on Windows.
|
|
49
|
+
const OPENCODE_CONFIG = join(homedir(), '.config', 'opencode', 'opencode.json')
|
|
50
|
+
const OPENCODE_PORT_RANGE_START = 4096
|
|
51
|
+
const OPENCODE_PORT_RANGE_END = 5096
|
|
52
|
+
|
|
53
|
+
// 📖 Module-level proxy state — shared between startProxyAndLaunch and cleanup.
|
|
54
|
+
let activeProxy = null
|
|
55
|
+
let proxyCleanedUp = false
|
|
56
|
+
let exitHandlersRegistered = false
|
|
57
|
+
|
|
58
|
+
// 📖 Merged model references for proxy topology.
|
|
59
|
+
let mergedModelsRef = []
|
|
60
|
+
let mergedModelByLabelRef = new Map()
|
|
61
|
+
|
|
62
|
+
// 📖 setOpenCodeModelData: Provide mergedModels + mergedModelByLabel to this module.
|
|
63
|
+
export function setOpenCodeModelData(mergedModels, mergedModelByLabel) {
|
|
64
|
+
mergedModelsRef = Array.isArray(mergedModels) ? mergedModels : []
|
|
65
|
+
mergedModelByLabelRef = mergedModelByLabel instanceof Map ? mergedModelByLabel : new Map()
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// 📖 isTcpPortAvailable: checks if a local TCP port is free for OpenCode.
|
|
69
|
+
// 📖 Used to avoid tmux sub-agent port conflicts when multiple projects run in parallel.
|
|
70
|
+
function isTcpPortAvailable(port) {
|
|
71
|
+
return new Promise((resolve) => {
|
|
72
|
+
const server = createServer()
|
|
73
|
+
server.once('error', () => resolve(false))
|
|
74
|
+
server.once('listening', () => {
|
|
75
|
+
server.close(() => resolve(true))
|
|
76
|
+
})
|
|
77
|
+
server.listen(port)
|
|
78
|
+
})
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// 📖 resolveOpenCodeTmuxPort: selects a safe port for OpenCode when inside tmux.
|
|
82
|
+
// 📖 Priority:
|
|
83
|
+
// 📖 1) OPENCODE_PORT from env (if valid and available)
|
|
84
|
+
// 📖 2) First available port in 4096-5095
|
|
85
|
+
async function resolveOpenCodeTmuxPort() {
|
|
86
|
+
const envPortRaw = process.env.OPENCODE_PORT
|
|
87
|
+
const envPort = Number.parseInt(envPortRaw || '', 10)
|
|
88
|
+
|
|
89
|
+
if (Number.isInteger(envPort) && envPort > 0 && envPort <= 65535) {
|
|
90
|
+
if (await isTcpPortAvailable(envPort)) {
|
|
91
|
+
return { port: envPort, source: 'env' }
|
|
92
|
+
}
|
|
93
|
+
console.log(chalk.yellow(` ⚠ OPENCODE_PORT=${envPort} is already in use; selecting another port for this run.`))
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
for (let port = OPENCODE_PORT_RANGE_START; port < OPENCODE_PORT_RANGE_END; port++) {
|
|
97
|
+
if (await isTcpPortAvailable(port)) {
|
|
98
|
+
return { port, source: 'auto' }
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
return null
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
function getOpenCodeConfigPath() {
|
|
106
|
+
return OPENCODE_CONFIG
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// 📖 Map source model IDs to OpenCode built-in IDs when they differ.
|
|
110
|
+
function getOpenCodeModelId(providerKey, modelId) {
|
|
111
|
+
if (providerKey === 'nvidia') return modelId.replace(/^nvidia\//, '')
|
|
112
|
+
if (providerKey === 'zai') return modelId.replace(/^zai\//, '')
|
|
113
|
+
return OPENCODE_MODEL_MAP[providerKey]?.[modelId] || modelId
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// ─── ZAI proxy bridge ─────────────────────────────────────────────────────────
|
|
117
|
+
|
|
118
|
+
// 📖 createZaiProxy: Localhost reverse proxy bridging ZAI's non-standard API paths
|
|
119
|
+
// 📖 to OpenCode's expected /v1/* OpenAI-compatible format.
|
|
120
|
+
// 📖 Returns { server, port } — caller must server.close() when done.
|
|
121
|
+
async function createZaiProxy(apiKey) {
|
|
122
|
+
const server = createHttpServer((req, res) => {
|
|
123
|
+
let targetPath = req.url
|
|
124
|
+
// 📖 Rewrite /v1/* → /api/coding/paas/v4/*
|
|
125
|
+
if (targetPath.startsWith('/v1/')) {
|
|
126
|
+
targetPath = '/api/coding/paas/v4/' + targetPath.slice(4)
|
|
127
|
+
} else if (targetPath.startsWith('/v1')) {
|
|
128
|
+
targetPath = '/api/coding/paas/v4' + targetPath.slice(3)
|
|
129
|
+
} else {
|
|
130
|
+
// 📖 Non /v1 paths (e.g. /api/v0/ health checks) — reject
|
|
131
|
+
res.writeHead(404)
|
|
132
|
+
res.end()
|
|
133
|
+
return
|
|
134
|
+
}
|
|
135
|
+
const headers = { ...req.headers, host: 'api.z.ai' }
|
|
136
|
+
if (apiKey) headers.authorization = `Bearer ${apiKey}`
|
|
137
|
+
// 📖 Remove transfer-encoding to avoid chunked encoding issues with https.request
|
|
138
|
+
delete headers['transfer-encoding']
|
|
139
|
+
const proxyReq = httpsRequest({
|
|
140
|
+
hostname: 'api.z.ai',
|
|
141
|
+
port: 443,
|
|
142
|
+
path: targetPath,
|
|
143
|
+
method: req.method,
|
|
144
|
+
headers,
|
|
145
|
+
}, (proxyRes) => {
|
|
146
|
+
res.writeHead(proxyRes.statusCode, proxyRes.headers)
|
|
147
|
+
proxyRes.pipe(res)
|
|
148
|
+
})
|
|
149
|
+
proxyReq.on('error', () => { res.writeHead(502); res.end() })
|
|
150
|
+
req.pipe(proxyReq)
|
|
151
|
+
})
|
|
152
|
+
await new Promise(r => server.listen(0, '127.0.0.1', r))
|
|
153
|
+
return { server, port: server.address().port }
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// ─── Shared OpenCode spawn helper ─────────────────────────────────────────────
|
|
157
|
+
|
|
158
|
+
// 📖 spawnOpenCode: Resolve API keys + spawn opencode CLI with correct env.
|
|
159
|
+
async function spawnOpenCode(args, providerKey, fcmConfig, existingZaiProxy = null) {
|
|
160
|
+
const envVarName = ENV_VAR_NAMES[providerKey]
|
|
161
|
+
const resolvedKey = getApiKey(fcmConfig, providerKey)
|
|
162
|
+
const childEnv = { ...process.env }
|
|
163
|
+
// 📖 Suppress MaxListenersExceededWarning from @modelcontextprotocol/sdk
|
|
164
|
+
childEnv.NODE_NO_WARNINGS = '1'
|
|
165
|
+
const finalArgs = [...args]
|
|
166
|
+
const hasExplicitPortArg = finalArgs.includes('--port')
|
|
167
|
+
if (envVarName && resolvedKey) childEnv[envVarName] = resolvedKey
|
|
168
|
+
|
|
169
|
+
// 📖 ZAI proxy: OpenCode's Go binary doesn't know about ZAI as a provider.
|
|
170
|
+
// 📖 Start proxy if needed, or reuse existing proxy if passed in.
|
|
171
|
+
let zaiProxy = existingZaiProxy
|
|
172
|
+
if (providerKey === 'zai' && resolvedKey && !zaiProxy) {
|
|
173
|
+
const { server, port } = await createZaiProxy(resolvedKey)
|
|
174
|
+
zaiProxy = server
|
|
175
|
+
console.log(chalk.dim(` 🔀 ZAI proxy listening on port ${port} (rewrites /v1/* → ZAI API)`))
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// 📖 In tmux, OpenCode sub-agents need a listening port to open extra panes.
|
|
179
|
+
if (process.env.TMUX && !hasExplicitPortArg) {
|
|
180
|
+
const tmuxPort = await resolveOpenCodeTmuxPort()
|
|
181
|
+
if (tmuxPort) {
|
|
182
|
+
const portValue = String(tmuxPort.port)
|
|
183
|
+
childEnv.OPENCODE_PORT = portValue
|
|
184
|
+
finalArgs.push('--port', portValue)
|
|
185
|
+
if (tmuxPort.source === 'env') {
|
|
186
|
+
console.log(chalk.dim(` 📺 tmux detected — using OPENCODE_PORT=${portValue}.`))
|
|
187
|
+
} else {
|
|
188
|
+
console.log(chalk.dim(` 📺 tmux detected — using OpenCode port ${portValue} for sub-agent panes.`))
|
|
189
|
+
}
|
|
190
|
+
} else {
|
|
191
|
+
console.log(chalk.yellow(` ⚠ tmux detected but no free OpenCode port found in ${OPENCODE_PORT_RANGE_START}-${OPENCODE_PORT_RANGE_END - 1}; launching without --port.`))
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
const { spawn } = await import('child_process')
|
|
196
|
+
const child = spawn('opencode', finalArgs, {
|
|
197
|
+
stdio: 'inherit',
|
|
198
|
+
shell: true,
|
|
199
|
+
detached: false,
|
|
200
|
+
env: childEnv
|
|
201
|
+
})
|
|
202
|
+
|
|
203
|
+
return new Promise((resolve, reject) => {
|
|
204
|
+
child.on('exit', (code) => {
|
|
205
|
+
if (zaiProxy) zaiProxy.close()
|
|
206
|
+
// 📖 ZAI cleanup: remove the ephemeral proxy provider from opencode.json
|
|
207
|
+
if (providerKey === 'zai') {
|
|
208
|
+
try {
|
|
209
|
+
const cfg = loadOpenCodeConfig()
|
|
210
|
+
if (cfg.provider?.zai) delete cfg.provider.zai
|
|
211
|
+
if (typeof cfg.model === 'string' && cfg.model.startsWith('zai/')) delete cfg.model
|
|
212
|
+
saveOpenCodeConfig(cfg)
|
|
213
|
+
} catch { /* best-effort cleanup */ }
|
|
214
|
+
}
|
|
215
|
+
resolve(code)
|
|
216
|
+
})
|
|
217
|
+
child.on('error', (err) => {
|
|
218
|
+
if (zaiProxy) zaiProxy.close()
|
|
219
|
+
if (err.code === 'ENOENT') {
|
|
220
|
+
console.error(chalk.red('\n X Could not find "opencode" -- is it installed and in your PATH?'))
|
|
221
|
+
console.error(chalk.dim(' Install: npm i -g opencode or see https://opencode.ai'))
|
|
222
|
+
resolve(1)
|
|
223
|
+
} else {
|
|
224
|
+
reject(err)
|
|
225
|
+
}
|
|
226
|
+
})
|
|
227
|
+
})
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// ─── Start OpenCode CLI ───────────────────────────────────────────────────────
|
|
231
|
+
|
|
232
|
+
export async function startOpenCode(model, fcmConfig) {
|
|
233
|
+
const providerKey = model.providerKey ?? 'nvidia'
|
|
234
|
+
const ocModelId = getOpenCodeModelId(providerKey, model.modelId)
|
|
235
|
+
const modelRef = `${providerKey}/${ocModelId}`
|
|
236
|
+
|
|
237
|
+
if (providerKey === 'nvidia') {
|
|
238
|
+
const config = loadOpenCodeConfig()
|
|
239
|
+
const backupPath = `${getOpenCodeConfigPath()}.backup-${Date.now()}`
|
|
240
|
+
|
|
241
|
+
if (existsSync(getOpenCodeConfigPath())) {
|
|
242
|
+
copyFileSync(getOpenCodeConfigPath(), backupPath)
|
|
243
|
+
console.log(chalk.dim(` Backup: ${backupPath}`))
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
if (!config.provider) config.provider = {}
|
|
247
|
+
if (!config.provider.nvidia) {
|
|
248
|
+
config.provider.nvidia = {
|
|
249
|
+
npm: '@ai-sdk/openai-compatible',
|
|
250
|
+
name: 'NVIDIA NIM',
|
|
251
|
+
options: {
|
|
252
|
+
baseURL: 'https://integrate.api.nvidia.com/v1',
|
|
253
|
+
apiKey: '{env:NVIDIA_API_KEY}'
|
|
254
|
+
},
|
|
255
|
+
models: {}
|
|
256
|
+
}
|
|
257
|
+
console.log(chalk.green(' + Auto-configured NVIDIA NIM provider in OpenCode'))
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
console.log(chalk.green(` Setting ${chalk.bold(model.label)} as default...`))
|
|
261
|
+
console.log(chalk.dim(` Model: ${modelRef}`))
|
|
262
|
+
console.log()
|
|
263
|
+
|
|
264
|
+
config.model = modelRef
|
|
265
|
+
if (!config.provider.nvidia.models) config.provider.nvidia.models = {}
|
|
266
|
+
config.provider.nvidia.models[ocModelId] = { name: model.label }
|
|
267
|
+
|
|
268
|
+
saveOpenCodeConfig(config)
|
|
269
|
+
|
|
270
|
+
const savedConfig = loadOpenCodeConfig()
|
|
271
|
+
console.log(chalk.dim(` Config saved to: ${getOpenCodeConfigPath()}`))
|
|
272
|
+
console.log(chalk.dim(` Default model in config: ${savedConfig.model || 'NOT SET'}`))
|
|
273
|
+
console.log()
|
|
274
|
+
|
|
275
|
+
if (savedConfig.model === config.model) {
|
|
276
|
+
console.log(chalk.green(` Default model set to: ${modelRef}`))
|
|
277
|
+
} else {
|
|
278
|
+
console.log(chalk.yellow(` Config might not have been saved correctly`))
|
|
279
|
+
}
|
|
280
|
+
console.log()
|
|
281
|
+
console.log(chalk.dim(' Starting OpenCode...'))
|
|
282
|
+
console.log()
|
|
283
|
+
|
|
284
|
+
await spawnOpenCode(['--model', modelRef], providerKey, fcmConfig)
|
|
285
|
+
return
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
if (providerKey === 'replicate') {
|
|
289
|
+
console.log(chalk.yellow(' Replicate models are monitor-only for now in OpenCode mode.'))
|
|
290
|
+
console.log(chalk.dim(' Reason: Replicate uses /v1/predictions instead of OpenAI chat-completions.'))
|
|
291
|
+
console.log(chalk.dim(' You can still benchmark this model in the TUI and use other providers for OpenCode launch.'))
|
|
292
|
+
console.log()
|
|
293
|
+
return
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
if (providerKey === 'zai') {
|
|
297
|
+
const resolvedKey = getApiKey(fcmConfig, providerKey)
|
|
298
|
+
if (!resolvedKey) {
|
|
299
|
+
console.log(chalk.yellow(' ZAI API key not found. Set ZAI_API_KEY environment variable.'))
|
|
300
|
+
console.log()
|
|
301
|
+
return
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
const { server: zaiProxyServer, port: zaiProxyPort } = await createZaiProxy(resolvedKey)
|
|
305
|
+
console.log(chalk.dim(` ZAI proxy listening on port ${zaiProxyPort} (rewrites /v1/* -> ZAI API)`))
|
|
306
|
+
|
|
307
|
+
console.log(chalk.green(` Setting ${chalk.bold(model.label)} as default...`))
|
|
308
|
+
console.log(chalk.dim(` Model: ${modelRef}`))
|
|
309
|
+
console.log()
|
|
310
|
+
|
|
311
|
+
const config = loadOpenCodeConfig()
|
|
312
|
+
const backupPath = `${getOpenCodeConfigPath()}.backup-${Date.now()}`
|
|
313
|
+
|
|
314
|
+
if (existsSync(getOpenCodeConfigPath())) {
|
|
315
|
+
copyFileSync(getOpenCodeConfigPath(), backupPath)
|
|
316
|
+
console.log(chalk.dim(` Backup: ${backupPath}`))
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
if (!config.provider) config.provider = {}
|
|
320
|
+
config.provider.zai = {
|
|
321
|
+
npm: '@ai-sdk/openai-compatible',
|
|
322
|
+
name: 'ZAI',
|
|
323
|
+
options: {
|
|
324
|
+
baseURL: `http://127.0.0.1:${zaiProxyPort}/v1`,
|
|
325
|
+
apiKey: 'zai-proxy',
|
|
326
|
+
},
|
|
327
|
+
models: {}
|
|
328
|
+
}
|
|
329
|
+
config.provider.zai.models[ocModelId] = { name: model.label }
|
|
330
|
+
config.model = modelRef
|
|
331
|
+
|
|
332
|
+
saveOpenCodeConfig(config)
|
|
333
|
+
|
|
334
|
+
const savedConfig = loadOpenCodeConfig()
|
|
335
|
+
console.log(chalk.dim(` Config saved to: ${getOpenCodeConfigPath()}`))
|
|
336
|
+
console.log(chalk.dim(` Default model in config: ${savedConfig.model || 'NOT SET'}`))
|
|
337
|
+
console.log()
|
|
338
|
+
|
|
339
|
+
if (savedConfig.model === config.model) {
|
|
340
|
+
console.log(chalk.green(` Default model set to: ${modelRef}`))
|
|
341
|
+
} else {
|
|
342
|
+
console.log(chalk.yellow(` Config might not have been saved correctly`))
|
|
343
|
+
}
|
|
344
|
+
console.log()
|
|
345
|
+
console.log(chalk.dim(' Starting OpenCode...'))
|
|
346
|
+
console.log()
|
|
347
|
+
|
|
348
|
+
await spawnOpenCode(['--model', modelRef], providerKey, fcmConfig, zaiProxyServer)
|
|
349
|
+
return
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
console.log(chalk.green(` Setting ${chalk.bold(model.label)} as default...`))
|
|
353
|
+
console.log(chalk.dim(` Model: ${modelRef}`))
|
|
354
|
+
console.log()
|
|
355
|
+
|
|
356
|
+
const config = loadOpenCodeConfig()
|
|
357
|
+
const backupPath = `${getOpenCodeConfigPath()}.backup-${Date.now()}`
|
|
358
|
+
|
|
359
|
+
if (existsSync(getOpenCodeConfigPath())) {
|
|
360
|
+
copyFileSync(getOpenCodeConfigPath(), backupPath)
|
|
361
|
+
console.log(chalk.dim(` Backup: ${backupPath}`))
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
if (!config.provider) config.provider = {}
|
|
365
|
+
if (!config.provider[providerKey]) {
|
|
366
|
+
if (providerKey === 'groq') {
|
|
367
|
+
config.provider.groq = { options: { apiKey: '{env:GROQ_API_KEY}' }, models: {} }
|
|
368
|
+
} else if (providerKey === 'cerebras') {
|
|
369
|
+
config.provider.cerebras = {
|
|
370
|
+
npm: '@ai-sdk/openai-compatible',
|
|
371
|
+
name: 'Cerebras',
|
|
372
|
+
options: { baseURL: 'https://api.cerebras.ai/v1', apiKey: '{env:CEREBRAS_API_KEY}' },
|
|
373
|
+
models: {}
|
|
374
|
+
}
|
|
375
|
+
} else if (providerKey === 'sambanova') {
|
|
376
|
+
config.provider.sambanova = {
|
|
377
|
+
npm: '@ai-sdk/openai-compatible',
|
|
378
|
+
name: 'SambaNova',
|
|
379
|
+
options: { baseURL: 'https://api.sambanova.ai/v1', apiKey: '{env:SAMBANOVA_API_KEY}' },
|
|
380
|
+
models: {}
|
|
381
|
+
}
|
|
382
|
+
} else if (providerKey === 'openrouter') {
|
|
383
|
+
config.provider.openrouter = {
|
|
384
|
+
npm: '@ai-sdk/openai-compatible',
|
|
385
|
+
name: 'OpenRouter',
|
|
386
|
+
options: { baseURL: 'https://openrouter.ai/api/v1', apiKey: '{env:OPENROUTER_API_KEY}' },
|
|
387
|
+
models: {}
|
|
388
|
+
}
|
|
389
|
+
} else if (providerKey === 'huggingface') {
|
|
390
|
+
config.provider.huggingface = {
|
|
391
|
+
npm: '@ai-sdk/openai-compatible',
|
|
392
|
+
name: 'Hugging Face Inference',
|
|
393
|
+
options: { baseURL: 'https://router.huggingface.co/v1', apiKey: '{env:HUGGINGFACE_API_KEY}' },
|
|
394
|
+
models: {}
|
|
395
|
+
}
|
|
396
|
+
} else if (providerKey === 'deepinfra') {
|
|
397
|
+
config.provider.deepinfra = {
|
|
398
|
+
npm: '@ai-sdk/openai-compatible',
|
|
399
|
+
name: 'DeepInfra',
|
|
400
|
+
options: { baseURL: 'https://api.deepinfra.com/v1/openai', apiKey: '{env:DEEPINFRA_API_KEY}' },
|
|
401
|
+
models: {}
|
|
402
|
+
}
|
|
403
|
+
} else if (providerKey === 'fireworks') {
|
|
404
|
+
config.provider.fireworks = {
|
|
405
|
+
npm: '@ai-sdk/openai-compatible',
|
|
406
|
+
name: 'Fireworks AI',
|
|
407
|
+
options: { baseURL: 'https://api.fireworks.ai/inference/v1', apiKey: '{env:FIREWORKS_API_KEY}' },
|
|
408
|
+
models: {}
|
|
409
|
+
}
|
|
410
|
+
} else if (providerKey === 'codestral') {
|
|
411
|
+
config.provider.codestral = {
|
|
412
|
+
npm: '@ai-sdk/openai-compatible',
|
|
413
|
+
name: 'Mistral Codestral',
|
|
414
|
+
options: { baseURL: 'https://codestral.mistral.ai/v1', apiKey: '{env:CODESTRAL_API_KEY}' },
|
|
415
|
+
models: {}
|
|
416
|
+
}
|
|
417
|
+
} else if (providerKey === 'hyperbolic') {
|
|
418
|
+
config.provider.hyperbolic = {
|
|
419
|
+
npm: '@ai-sdk/openai-compatible',
|
|
420
|
+
name: 'Hyperbolic',
|
|
421
|
+
options: { baseURL: 'https://api.hyperbolic.xyz/v1', apiKey: '{env:HYPERBOLIC_API_KEY}' },
|
|
422
|
+
models: {}
|
|
423
|
+
}
|
|
424
|
+
} else if (providerKey === 'scaleway') {
|
|
425
|
+
config.provider.scaleway = {
|
|
426
|
+
npm: '@ai-sdk/openai-compatible',
|
|
427
|
+
name: 'Scaleway',
|
|
428
|
+
options: { baseURL: 'https://api.scaleway.ai/v1', apiKey: '{env:SCALEWAY_API_KEY}' },
|
|
429
|
+
models: {}
|
|
430
|
+
}
|
|
431
|
+
} else if (providerKey === 'googleai') {
|
|
432
|
+
config.provider.googleai = {
|
|
433
|
+
npm: '@ai-sdk/openai-compatible',
|
|
434
|
+
name: 'Google AI Studio',
|
|
435
|
+
options: { baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai', apiKey: '{env:GOOGLE_API_KEY}' },
|
|
436
|
+
models: {}
|
|
437
|
+
}
|
|
438
|
+
} else if (providerKey === 'siliconflow') {
|
|
439
|
+
config.provider.siliconflow = {
|
|
440
|
+
npm: '@ai-sdk/openai-compatible',
|
|
441
|
+
name: 'SiliconFlow',
|
|
442
|
+
options: { baseURL: 'https://api.siliconflow.com/v1', apiKey: '{env:SILICONFLOW_API_KEY}' },
|
|
443
|
+
models: {}
|
|
444
|
+
}
|
|
445
|
+
} else if (providerKey === 'together') {
|
|
446
|
+
config.provider.together = {
|
|
447
|
+
npm: '@ai-sdk/openai-compatible',
|
|
448
|
+
name: 'Together AI',
|
|
449
|
+
options: { baseURL: 'https://api.together.xyz/v1', apiKey: '{env:TOGETHER_API_KEY}' },
|
|
450
|
+
models: {}
|
|
451
|
+
}
|
|
452
|
+
} else if (providerKey === 'cloudflare') {
|
|
453
|
+
const cloudflareAccountId = (process.env.CLOUDFLARE_ACCOUNT_ID || '').trim()
|
|
454
|
+
if (!cloudflareAccountId) {
|
|
455
|
+
console.log(chalk.yellow(' Cloudflare Workers AI requires CLOUDFLARE_ACCOUNT_ID for OpenCode integration.'))
|
|
456
|
+
console.log(chalk.dim(' Export CLOUDFLARE_ACCOUNT_ID and retry this selection.'))
|
|
457
|
+
console.log()
|
|
458
|
+
return
|
|
459
|
+
}
|
|
460
|
+
config.provider.cloudflare = {
|
|
461
|
+
npm: '@ai-sdk/openai-compatible',
|
|
462
|
+
name: 'Cloudflare Workers AI',
|
|
463
|
+
options: { baseURL: `https://api.cloudflare.com/client/v4/accounts/${cloudflareAccountId}/ai/v1`, apiKey: '{env:CLOUDFLARE_API_TOKEN}' },
|
|
464
|
+
models: {}
|
|
465
|
+
}
|
|
466
|
+
} else if (providerKey === 'perplexity') {
|
|
467
|
+
config.provider.perplexity = {
|
|
468
|
+
npm: '@ai-sdk/openai-compatible',
|
|
469
|
+
name: 'Perplexity API',
|
|
470
|
+
options: { baseURL: 'https://api.perplexity.ai', apiKey: '{env:PERPLEXITY_API_KEY}' },
|
|
471
|
+
models: {}
|
|
472
|
+
}
|
|
473
|
+
} else if (providerKey === 'iflow') {
|
|
474
|
+
config.provider.iflow = {
|
|
475
|
+
npm: '@ai-sdk/openai-compatible',
|
|
476
|
+
name: 'iFlow',
|
|
477
|
+
options: { baseURL: 'https://apis.iflow.cn/v1', apiKey: '{env:IFLOW_API_KEY}' },
|
|
478
|
+
models: {}
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
const isBuiltinMapped = OPENCODE_MODEL_MAP[providerKey]?.[model.modelId]
|
|
484
|
+
if (!isBuiltinMapped) {
|
|
485
|
+
if (!config.provider[providerKey].models) config.provider[providerKey].models = {}
|
|
486
|
+
config.provider[providerKey].models[ocModelId] = { name: model.label }
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
config.model = modelRef
|
|
490
|
+
saveOpenCodeConfig(config)
|
|
491
|
+
|
|
492
|
+
const savedConfig = loadOpenCodeConfig()
|
|
493
|
+
console.log(chalk.dim(` Config saved to: ${getOpenCodeConfigPath()}`))
|
|
494
|
+
console.log(chalk.dim(` Default model in config: ${savedConfig.model || 'NOT SET'}`))
|
|
495
|
+
console.log()
|
|
496
|
+
|
|
497
|
+
if (savedConfig.model === config.model) {
|
|
498
|
+
console.log(chalk.green(` Default model set to: ${modelRef}`))
|
|
499
|
+
} else {
|
|
500
|
+
console.log(chalk.yellow(` Config might not have been saved correctly`))
|
|
501
|
+
}
|
|
502
|
+
console.log()
|
|
503
|
+
console.log(chalk.dim(' Starting OpenCode...'))
|
|
504
|
+
console.log()
|
|
505
|
+
|
|
506
|
+
await spawnOpenCode(['--model', modelRef], providerKey, fcmConfig)
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
// ─── Proxy lifecycle ─────────────────────────────────────────────────────────
|
|
510
|
+
|
|
511
|
+
async function cleanupProxy() {
|
|
512
|
+
if (proxyCleanedUp || !activeProxy) return
|
|
513
|
+
proxyCleanedUp = true
|
|
514
|
+
const proxy = activeProxy
|
|
515
|
+
activeProxy = null
|
|
516
|
+
setActiveProxy(activeProxy)
|
|
517
|
+
try {
|
|
518
|
+
await proxy.stop()
|
|
519
|
+
} catch { /* best-effort */ }
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
function registerExitHandlers() {
|
|
523
|
+
if (exitHandlersRegistered) return
|
|
524
|
+
exitHandlersRegistered = true
|
|
525
|
+
const cleanup = () => { cleanupProxy().catch(() => {}) }
|
|
526
|
+
process.once('SIGINT', cleanup)
|
|
527
|
+
process.once('SIGTERM', cleanup)
|
|
528
|
+
process.once('exit', cleanup)
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
export function buildProxyTopologyFromConfig(fcmConfig) {
|
|
532
|
+
const accounts = []
|
|
533
|
+
const proxyModels = {}
|
|
534
|
+
|
|
535
|
+
for (const merged of mergedModelsRef) {
|
|
536
|
+
proxyModels[merged.slug] = { name: merged.label }
|
|
537
|
+
|
|
538
|
+
for (const providerEntry of merged.providers) {
|
|
539
|
+
const keys = resolveApiKeys(fcmConfig, providerEntry.providerKey)
|
|
540
|
+
const providerSource = sources[providerEntry.providerKey]
|
|
541
|
+
if (!providerSource) continue
|
|
542
|
+
|
|
543
|
+
const rawUrl = resolveCloudflareUrl(providerSource.url)
|
|
544
|
+
const baseUrl = rawUrl.replace(/\/chat\/completions$/, '')
|
|
545
|
+
|
|
546
|
+
keys.forEach((apiKey, keyIdx) => {
|
|
547
|
+
accounts.push({
|
|
548
|
+
id: `${providerEntry.providerKey}/${merged.slug}/${keyIdx}`,
|
|
549
|
+
providerKey: providerEntry.providerKey,
|
|
550
|
+
proxyModelId: merged.slug,
|
|
551
|
+
modelId: providerEntry.modelId,
|
|
552
|
+
url: baseUrl,
|
|
553
|
+
apiKey,
|
|
554
|
+
})
|
|
555
|
+
})
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
return { accounts, proxyModels }
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
export async function ensureProxyRunning(fcmConfig, { forceRestart = false } = {}) {
|
|
563
|
+
registerExitHandlers()
|
|
564
|
+
proxyCleanedUp = false
|
|
565
|
+
|
|
566
|
+
if (forceRestart && activeProxy) {
|
|
567
|
+
await cleanupProxy()
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
const existingStatus = activeProxy?.getStatus?.()
|
|
571
|
+
if (existingStatus?.running === true) {
|
|
572
|
+
const availableModelSlugs = new Set(
|
|
573
|
+
(activeProxy._accounts || []).map(a => a.proxyModelId).filter(Boolean)
|
|
574
|
+
)
|
|
575
|
+
return {
|
|
576
|
+
port: existingStatus.port,
|
|
577
|
+
accountCount: existingStatus.accountCount,
|
|
578
|
+
proxyToken: activeProxy?._proxyApiKey,
|
|
579
|
+
proxyModels: null,
|
|
580
|
+
availableModelSlugs,
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
const { accounts, proxyModels } = buildProxyTopologyFromConfig(fcmConfig)
|
|
585
|
+
if (accounts.length === 0) {
|
|
586
|
+
throw new Error('No API keys found for proxy-capable models')
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
const proxyToken = `fcm_${randomUUID().replace(/-/g, '')}`
|
|
590
|
+
const proxy = new ProxyServer({ accounts, proxyApiKey: proxyToken })
|
|
591
|
+
const { port } = await proxy.start()
|
|
592
|
+
activeProxy = proxy
|
|
593
|
+
setActiveProxy(activeProxy)
|
|
594
|
+
|
|
595
|
+
const availableModelSlugs = new Set(accounts.map(a => a.proxyModelId).filter(Boolean))
|
|
596
|
+
return { port, accountCount: accounts.length, proxyToken, proxyModels, availableModelSlugs }
|
|
597
|
+
}
|
|
598
|
+
|
|
599
|
+
export async function autoStartProxyIfSynced(fcmConfig, state) {
|
|
600
|
+
try {
|
|
601
|
+
const ocConfig = loadOpenCodeConfig()
|
|
602
|
+
if (!ocConfig?.provider?.['fcm-proxy']) return
|
|
603
|
+
|
|
604
|
+
state.proxyStartupStatus = { phase: 'starting' }
|
|
605
|
+
|
|
606
|
+
const started = await ensureProxyRunning(fcmConfig)
|
|
607
|
+
|
|
608
|
+
syncToOpenCode(fcmConfig, sources, mergedModelsRef, {
|
|
609
|
+
proxyPort: started.port,
|
|
610
|
+
proxyToken: started.proxyToken,
|
|
611
|
+
availableModelSlugs: started.availableModelSlugs,
|
|
612
|
+
})
|
|
613
|
+
|
|
614
|
+
state.proxyStartupStatus = {
|
|
615
|
+
phase: 'running',
|
|
616
|
+
port: started.port,
|
|
617
|
+
accountCount: started.accountCount,
|
|
618
|
+
}
|
|
619
|
+
} catch (err) {
|
|
620
|
+
state.proxyStartupStatus = {
|
|
621
|
+
phase: 'failed',
|
|
622
|
+
reason: err?.message ?? String(err),
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
export async function startProxyAndLaunch(model, fcmConfig) {
|
|
628
|
+
try {
|
|
629
|
+
const started = await ensureProxyRunning(fcmConfig, { forceRestart: true })
|
|
630
|
+
const merged = mergedModelByLabelRef.get(model.label)
|
|
631
|
+
const defaultProxyModelId = merged?.slug ?? model.modelId
|
|
632
|
+
|
|
633
|
+
if (!started.proxyModels || Object.keys(started.proxyModels).length === 0) {
|
|
634
|
+
throw new Error('Proxy model catalog is empty')
|
|
635
|
+
}
|
|
636
|
+
|
|
637
|
+
console.log(chalk.dim(` 🔀 Multi-account proxy listening on port ${started.port} (${started.accountCount} accounts)`))
|
|
638
|
+
await startOpenCodeWithProxy(model, started.port, defaultProxyModelId, started.proxyModels, fcmConfig, started.proxyToken)
|
|
639
|
+
} catch (err) {
|
|
640
|
+
console.error(chalk.red(` ✗ Proxy failed to start: ${err.message}`))
|
|
641
|
+
console.log(chalk.dim(' Falling back to direct single-account flow…'))
|
|
642
|
+
await cleanupProxy()
|
|
643
|
+
await startOpenCode(model, fcmConfig)
|
|
644
|
+
}
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
async function startOpenCodeWithProxy(model, port, proxyModelId, proxyModels, fcmConfig, proxyToken) {
|
|
648
|
+
const config = loadOpenCodeConfig()
|
|
649
|
+
if (!config.provider) config.provider = {}
|
|
650
|
+
const previousProxyProvider = config.provider['fcm-proxy']
|
|
651
|
+
const previousModel = config.model
|
|
652
|
+
|
|
653
|
+
const fallbackModelId = Object.keys(proxyModels)[0]
|
|
654
|
+
const selectedProxyModelId = proxyModels[proxyModelId] ? proxyModelId : fallbackModelId
|
|
655
|
+
|
|
656
|
+
config.provider['fcm-proxy'] = {
|
|
657
|
+
npm: '@ai-sdk/openai-compatible',
|
|
658
|
+
name: 'FCM Proxy',
|
|
659
|
+
options: {
|
|
660
|
+
baseURL: `http://127.0.0.1:${port}/v1`,
|
|
661
|
+
apiKey: proxyToken
|
|
662
|
+
},
|
|
663
|
+
models: proxyModels
|
|
664
|
+
}
|
|
665
|
+
config.model = `fcm-proxy/${selectedProxyModelId}`
|
|
666
|
+
saveOpenCodeConfig(config)
|
|
667
|
+
|
|
668
|
+
console.log(chalk.green(` Setting ${chalk.bold(model.label)} via proxy as default for OpenCode…`))
|
|
669
|
+
console.log(chalk.dim(` Model: fcm-proxy/${selectedProxyModelId} • Proxy: http://127.0.0.1:${port}/v1`))
|
|
670
|
+
console.log(chalk.dim(` Catalog: ${Object.keys(proxyModels).length} models available via fcm-proxy`))
|
|
671
|
+
console.log()
|
|
672
|
+
|
|
673
|
+
try {
|
|
674
|
+
await spawnOpenCode(['--model', `fcm-proxy/${selectedProxyModelId}`], 'fcm-proxy', fcmConfig)
|
|
675
|
+
} finally {
|
|
676
|
+
try {
|
|
677
|
+
const savedCfg = loadOpenCodeConfig()
|
|
678
|
+
if (!savedCfg.provider) savedCfg.provider = {}
|
|
679
|
+
|
|
680
|
+
if (previousProxyProvider) {
|
|
681
|
+
savedCfg.provider['fcm-proxy'] = previousProxyProvider
|
|
682
|
+
} else if (savedCfg.provider['fcm-proxy']) {
|
|
683
|
+
delete savedCfg.provider['fcm-proxy']
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
if (typeof previousModel === 'string' && previousModel.length > 0) {
|
|
687
|
+
savedCfg.model = previousModel
|
|
688
|
+
} else if (typeof savedCfg.model === 'string' && savedCfg.model.startsWith('fcm-proxy/')) {
|
|
689
|
+
delete savedCfg.model
|
|
690
|
+
}
|
|
691
|
+
|
|
692
|
+
saveOpenCodeConfig(savedCfg)
|
|
693
|
+
} catch { /* best-effort */ }
|
|
694
|
+
await cleanupProxy()
|
|
695
|
+
}
|
|
696
|
+
}
|
|
697
|
+
|
|
698
|
+
// ─── Start OpenCode Desktop ───────────────────────────────────────────────────
|
|
699
|
+
|
|
700
|
+
export async function startOpenCodeDesktop(model, fcmConfig) {
|
|
701
|
+
const providerKey = model.providerKey ?? 'nvidia'
|
|
702
|
+
const ocModelId = getOpenCodeModelId(providerKey, model.modelId)
|
|
703
|
+
const modelRef = `${providerKey}/${ocModelId}`
|
|
704
|
+
|
|
705
|
+
const launchDesktop = async () => {
|
|
706
|
+
const { exec } = await import('child_process')
|
|
707
|
+
let command
|
|
708
|
+
if (isMac) {
|
|
709
|
+
command = 'open -a OpenCode'
|
|
710
|
+
} else if (isWindows) {
|
|
711
|
+
command = 'start "" "%LOCALAPPDATA%\\Programs\\OpenCode\\OpenCode.exe" 2>nul || start "" "%PROGRAMFILES%\\OpenCode\\OpenCode.exe" 2>nul || start OpenCode'
|
|
712
|
+
} else if (isLinux) {
|
|
713
|
+
command = `opencode-desktop --model ${modelRef} 2>/dev/null || flatpak run ai.opencode.OpenCode --model ${modelRef} 2>/dev/null || snap run opencode --model ${modelRef} 2>/dev/null || xdg-open /usr/share/applications/opencode.desktop 2>/dev/null || echo "OpenCode not found"`
|
|
714
|
+
}
|
|
715
|
+
exec(command, (err) => {
|
|
716
|
+
if (err) {
|
|
717
|
+
console.error(chalk.red(' Could not open OpenCode Desktop'))
|
|
718
|
+
if (isWindows) {
|
|
719
|
+
console.error(chalk.dim(' Make sure OpenCode is installed from https://opencode.ai'))
|
|
720
|
+
} else if (isLinux) {
|
|
721
|
+
console.error(chalk.dim(' Install via: snap install opencode OR flatpak install ai.opencode.OpenCode'))
|
|
722
|
+
console.error(chalk.dim(' Or download from https://opencode.ai'))
|
|
723
|
+
} else {
|
|
724
|
+
console.error(chalk.dim(' Is it installed at /Applications/OpenCode.app?'))
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
})
|
|
728
|
+
}
|
|
729
|
+
|
|
730
|
+
if (providerKey === 'nvidia') {
|
|
731
|
+
const config = loadOpenCodeConfig()
|
|
732
|
+
const backupPath = `${getOpenCodeConfigPath()}.backup-${Date.now()}`
|
|
733
|
+
|
|
734
|
+
if (existsSync(getOpenCodeConfigPath())) {
|
|
735
|
+
copyFileSync(getOpenCodeConfigPath(), backupPath)
|
|
736
|
+
console.log(chalk.dim(` Backup: ${backupPath}`))
|
|
737
|
+
}
|
|
738
|
+
|
|
739
|
+
if (!config.provider) config.provider = {}
|
|
740
|
+
if (!config.provider.nvidia) {
|
|
741
|
+
config.provider.nvidia = {
|
|
742
|
+
npm: '@ai-sdk/openai-compatible',
|
|
743
|
+
name: 'NVIDIA NIM',
|
|
744
|
+
options: {
|
|
745
|
+
baseURL: 'https://integrate.api.nvidia.com/v1',
|
|
746
|
+
apiKey: '{env:NVIDIA_API_KEY}'
|
|
747
|
+
},
|
|
748
|
+
models: {}
|
|
749
|
+
}
|
|
750
|
+
console.log(chalk.green(' + Auto-configured NVIDIA NIM provider in OpenCode'))
|
|
751
|
+
}
|
|
752
|
+
|
|
753
|
+
console.log(chalk.green(` Setting ${chalk.bold(model.label)} as default for OpenCode Desktop...`))
|
|
754
|
+
console.log(chalk.dim(` Model: ${modelRef}`))
|
|
755
|
+
console.log()
|
|
756
|
+
|
|
757
|
+
config.model = modelRef
|
|
758
|
+
if (!config.provider.nvidia.models) config.provider.nvidia.models = {}
|
|
759
|
+
config.provider.nvidia.models[ocModelId] = { name: model.label }
|
|
760
|
+
|
|
761
|
+
saveOpenCodeConfig(config)
|
|
762
|
+
|
|
763
|
+
const savedConfig = loadOpenCodeConfig()
|
|
764
|
+
console.log(chalk.dim(` Config saved to: ${getOpenCodeConfigPath()}`))
|
|
765
|
+
console.log(chalk.dim(` Default model in config: ${savedConfig.model || 'NOT SET'}`))
|
|
766
|
+
console.log()
|
|
767
|
+
|
|
768
|
+
if (savedConfig.model === config.model) {
|
|
769
|
+
console.log(chalk.green(` Default model set to: ${modelRef}`))
|
|
770
|
+
} else {
|
|
771
|
+
console.log(chalk.yellow(` Config might not have been saved correctly`))
|
|
772
|
+
}
|
|
773
|
+
console.log()
|
|
774
|
+
console.log(chalk.dim(' Opening OpenCode Desktop...'))
|
|
775
|
+
console.log()
|
|
776
|
+
|
|
777
|
+
await launchDesktop()
|
|
778
|
+
return
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
if (providerKey === 'replicate') {
|
|
782
|
+
console.log(chalk.yellow(' Replicate models are monitor-only for now in OpenCode Desktop mode.'))
|
|
783
|
+
console.log(chalk.dim(' Reason: Replicate uses /v1/predictions instead of OpenAI chat-completions.'))
|
|
784
|
+
console.log(chalk.dim(' You can still benchmark this model in the TUI and use other providers for Desktop launch.'))
|
|
785
|
+
console.log()
|
|
786
|
+
return
|
|
787
|
+
}
|
|
788
|
+
|
|
789
|
+
if (providerKey === 'zai') {
|
|
790
|
+
console.log(chalk.yellow(' ZAI models are supported in OpenCode CLI mode only (not Desktop).'))
|
|
791
|
+
console.log(chalk.dim(' Reason: ZAI requires a localhost proxy that only works with the CLI spawn.'))
|
|
792
|
+
console.log(chalk.dim(' Use OpenCode CLI mode (default) to launch ZAI models.'))
|
|
793
|
+
console.log()
|
|
794
|
+
return
|
|
795
|
+
}
|
|
796
|
+
|
|
797
|
+
console.log(chalk.green(` Setting ${chalk.bold(model.label)} as default for OpenCode Desktop...`))
|
|
798
|
+
console.log(chalk.dim(` Model: ${modelRef}`))
|
|
799
|
+
console.log()
|
|
800
|
+
|
|
801
|
+
const config = loadOpenCodeConfig()
|
|
802
|
+
const backupPath = `${getOpenCodeConfigPath()}.backup-${Date.now()}`
|
|
803
|
+
|
|
804
|
+
if (existsSync(getOpenCodeConfigPath())) {
|
|
805
|
+
copyFileSync(getOpenCodeConfigPath(), backupPath)
|
|
806
|
+
console.log(chalk.dim(` Backup: ${backupPath}`))
|
|
807
|
+
}
|
|
808
|
+
|
|
809
|
+
if (!config.provider) config.provider = {}
|
|
810
|
+
if (!config.provider[providerKey]) {
|
|
811
|
+
if (providerKey === 'groq') {
|
|
812
|
+
config.provider.groq = { options: { apiKey: '{env:GROQ_API_KEY}' }, models: {} }
|
|
813
|
+
} else if (providerKey === 'cerebras') {
|
|
814
|
+
config.provider.cerebras = {
|
|
815
|
+
npm: '@ai-sdk/openai-compatible',
|
|
816
|
+
name: 'Cerebras',
|
|
817
|
+
options: { baseURL: 'https://api.cerebras.ai/v1', apiKey: '{env:CEREBRAS_API_KEY}' },
|
|
818
|
+
models: {}
|
|
819
|
+
}
|
|
820
|
+
} else if (providerKey === 'sambanova') {
|
|
821
|
+
config.provider.sambanova = {
|
|
822
|
+
npm: '@ai-sdk/openai-compatible',
|
|
823
|
+
name: 'SambaNova',
|
|
824
|
+
options: { baseURL: 'https://api.sambanova.ai/v1', apiKey: '{env:SAMBANOVA_API_KEY}' },
|
|
825
|
+
models: {}
|
|
826
|
+
}
|
|
827
|
+
} else if (providerKey === 'openrouter') {
|
|
828
|
+
config.provider.openrouter = {
|
|
829
|
+
npm: '@ai-sdk/openai-compatible',
|
|
830
|
+
name: 'OpenRouter',
|
|
831
|
+
options: { baseURL: 'https://openrouter.ai/api/v1', apiKey: '{env:OPENROUTER_API_KEY}' },
|
|
832
|
+
models: {}
|
|
833
|
+
}
|
|
834
|
+
} else if (providerKey === 'huggingface') {
|
|
835
|
+
config.provider.huggingface = {
|
|
836
|
+
npm: '@ai-sdk/openai-compatible',
|
|
837
|
+
name: 'Hugging Face Inference',
|
|
838
|
+
options: { baseURL: 'https://router.huggingface.co/v1', apiKey: '{env:HUGGINGFACE_API_KEY}' },
|
|
839
|
+
models: {}
|
|
840
|
+
}
|
|
841
|
+
} else if (providerKey === 'deepinfra') {
|
|
842
|
+
config.provider.deepinfra = {
|
|
843
|
+
npm: '@ai-sdk/openai-compatible',
|
|
844
|
+
name: 'DeepInfra',
|
|
845
|
+
options: { baseURL: 'https://api.deepinfra.com/v1/openai', apiKey: '{env:DEEPINFRA_API_KEY}' },
|
|
846
|
+
models: {}
|
|
847
|
+
}
|
|
848
|
+
} else if (providerKey === 'fireworks') {
|
|
849
|
+
config.provider.fireworks = {
|
|
850
|
+
npm: '@ai-sdk/openai-compatible',
|
|
851
|
+
name: 'Fireworks AI',
|
|
852
|
+
options: { baseURL: 'https://api.fireworks.ai/inference/v1', apiKey: '{env:FIREWORKS_API_KEY}' },
|
|
853
|
+
models: {}
|
|
854
|
+
}
|
|
855
|
+
} else if (providerKey === 'codestral') {
|
|
856
|
+
config.provider.codestral = {
|
|
857
|
+
npm: '@ai-sdk/openai-compatible',
|
|
858
|
+
name: 'Mistral Codestral',
|
|
859
|
+
options: { baseURL: 'https://codestral.mistral.ai/v1', apiKey: '{env:CODESTRAL_API_KEY}' },
|
|
860
|
+
models: {}
|
|
861
|
+
}
|
|
862
|
+
} else if (providerKey === 'hyperbolic') {
|
|
863
|
+
config.provider.hyperbolic = {
|
|
864
|
+
npm: '@ai-sdk/openai-compatible',
|
|
865
|
+
name: 'Hyperbolic',
|
|
866
|
+
options: { baseURL: 'https://api.hyperbolic.xyz/v1', apiKey: '{env:HYPERBOLIC_API_KEY}' },
|
|
867
|
+
models: {}
|
|
868
|
+
}
|
|
869
|
+
} else if (providerKey === 'scaleway') {
|
|
870
|
+
config.provider.scaleway = {
|
|
871
|
+
npm: '@ai-sdk/openai-compatible',
|
|
872
|
+
name: 'Scaleway',
|
|
873
|
+
options: { baseURL: 'https://api.scaleway.ai/v1', apiKey: '{env:SCALEWAY_API_KEY}' },
|
|
874
|
+
models: {}
|
|
875
|
+
}
|
|
876
|
+
} else if (providerKey === 'googleai') {
|
|
877
|
+
config.provider.googleai = {
|
|
878
|
+
npm: '@ai-sdk/openai-compatible',
|
|
879
|
+
name: 'Google AI Studio',
|
|
880
|
+
options: { baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai', apiKey: '{env:GOOGLE_API_KEY}' },
|
|
881
|
+
models: {}
|
|
882
|
+
}
|
|
883
|
+
} else if (providerKey === 'siliconflow') {
|
|
884
|
+
config.provider.siliconflow = {
|
|
885
|
+
npm: '@ai-sdk/openai-compatible',
|
|
886
|
+
name: 'SiliconFlow',
|
|
887
|
+
options: { baseURL: 'https://api.siliconflow.com/v1', apiKey: '{env:SILICONFLOW_API_KEY}' },
|
|
888
|
+
models: {}
|
|
889
|
+
}
|
|
890
|
+
} else if (providerKey === 'together') {
|
|
891
|
+
config.provider.together = {
|
|
892
|
+
npm: '@ai-sdk/openai-compatible',
|
|
893
|
+
name: 'Together AI',
|
|
894
|
+
options: { baseURL: 'https://api.together.xyz/v1', apiKey: '{env:TOGETHER_API_KEY}' },
|
|
895
|
+
models: {}
|
|
896
|
+
}
|
|
897
|
+
} else if (providerKey === 'cloudflare') {
|
|
898
|
+
const cloudflareAccountId = (process.env.CLOUDFLARE_ACCOUNT_ID || '').trim()
|
|
899
|
+
if (!cloudflareAccountId) {
|
|
900
|
+
console.log(chalk.yellow(' Cloudflare Workers AI requires CLOUDFLARE_ACCOUNT_ID for OpenCode integration.'))
|
|
901
|
+
console.log(chalk.dim(' Export CLOUDFLARE_ACCOUNT_ID and retry this selection.'))
|
|
902
|
+
console.log()
|
|
903
|
+
return
|
|
904
|
+
}
|
|
905
|
+
config.provider.cloudflare = {
|
|
906
|
+
npm: '@ai-sdk/openai-compatible',
|
|
907
|
+
name: 'Cloudflare Workers AI',
|
|
908
|
+
options: { baseURL: `https://api.cloudflare.com/client/v4/accounts/${cloudflareAccountId}/ai/v1`, apiKey: '{env:CLOUDFLARE_API_TOKEN}' },
|
|
909
|
+
models: {}
|
|
910
|
+
}
|
|
911
|
+
} else if (providerKey === 'perplexity') {
|
|
912
|
+
config.provider.perplexity = {
|
|
913
|
+
npm: '@ai-sdk/openai-compatible',
|
|
914
|
+
name: 'Perplexity API',
|
|
915
|
+
options: { baseURL: 'https://api.perplexity.ai', apiKey: '{env:PERPLEXITY_API_KEY}' },
|
|
916
|
+
models: {}
|
|
917
|
+
}
|
|
918
|
+
} else if (providerKey === 'iflow') {
|
|
919
|
+
config.provider.iflow = {
|
|
920
|
+
npm: '@ai-sdk/openai-compatible',
|
|
921
|
+
name: 'iFlow',
|
|
922
|
+
options: { baseURL: 'https://apis.iflow.cn/v1', apiKey: '{env:IFLOW_API_KEY}' },
|
|
923
|
+
models: {}
|
|
924
|
+
}
|
|
925
|
+
}
|
|
926
|
+
}
|
|
927
|
+
|
|
928
|
+
const isBuiltinMapped = OPENCODE_MODEL_MAP[providerKey]?.[model.modelId]
|
|
929
|
+
if (!isBuiltinMapped) {
|
|
930
|
+
if (!config.provider[providerKey].models) config.provider[providerKey].models = {}
|
|
931
|
+
config.provider[providerKey].models[ocModelId] = { name: model.label }
|
|
932
|
+
}
|
|
933
|
+
|
|
934
|
+
config.model = modelRef
|
|
935
|
+
saveOpenCodeConfig(config)
|
|
936
|
+
|
|
937
|
+
const savedConfig = loadOpenCodeConfig()
|
|
938
|
+
console.log(chalk.dim(` Config saved to: ${getOpenCodeConfigPath()}`))
|
|
939
|
+
console.log(chalk.dim(` Default model in config: ${savedConfig.model || 'NOT SET'}`))
|
|
940
|
+
console.log()
|
|
941
|
+
|
|
942
|
+
if (savedConfig.model === config.model) {
|
|
943
|
+
console.log(chalk.green(` Default model set to: ${modelRef}`))
|
|
944
|
+
} else {
|
|
945
|
+
console.log(chalk.yellow(` Config might not have been saved correctly`))
|
|
946
|
+
}
|
|
947
|
+
console.log()
|
|
948
|
+
console.log(chalk.dim(' Opening OpenCode Desktop...'))
|
|
949
|
+
console.log()
|
|
950
|
+
|
|
951
|
+
await launchDesktop()
|
|
952
|
+
}
|