free-coding-models 0.1.67 → 0.1.68

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,7 +11,7 @@
11
11
  *
12
12
  * 🎯 Key features:
13
13
  * - Parallel pings across all models with animated real-time updates (multi-provider)
14
- * - Continuous monitoring with 2-second ping intervals (never stops)
14
+ * - Continuous monitoring with 60-second ping intervals (never stops)
15
15
  * - Rolling averages calculated from ALL successful pings since start
16
16
  * - Best-per-tier highlighting with medals (🥇🥈🥉)
17
17
  * - Interactive navigation with arrow keys directly in the table
@@ -60,13 +60,14 @@
60
60
  * ⚙️ Configuration:
61
61
  * - API keys stored per-provider in ~/.free-coding-models.json (0600 perms)
62
62
  * - Old ~/.free-coding-models plain-text auto-migrated as nvidia key on first run
63
- * - Env vars override config: NVIDIA_API_KEY, GROQ_API_KEY, CEREBRAS_API_KEY, OPENROUTER_API_KEY, HUGGINGFACE_API_KEY/HF_TOKEN, REPLICATE_API_TOKEN, DEEPINFRA_API_KEY/DEEPINFRA_TOKEN, FIREWORKS_API_KEY, SILICONFLOW_API_KEY, TOGETHER_API_KEY, PERPLEXITY_API_KEY, etc.
63
+ * - Env vars override config: NVIDIA_API_KEY, GROQ_API_KEY, CEREBRAS_API_KEY, OPENROUTER_API_KEY, HUGGINGFACE_API_KEY/HF_TOKEN, REPLICATE_API_TOKEN, DEEPINFRA_API_KEY/DEEPINFRA_TOKEN, FIREWORKS_API_KEY, SILICONFLOW_API_KEY, TOGETHER_API_KEY, PERPLEXITY_API_KEY, ZAI_API_KEY, etc.
64
+ * - ZAI (z.ai) uses a non-standard base path; cloudflare needs CLOUDFLARE_ACCOUNT_ID in env.
64
65
  * - Cloudflare Workers AI requires both CLOUDFLARE_API_TOKEN (or CLOUDFLARE_API_KEY) and CLOUDFLARE_ACCOUNT_ID
65
66
  * - Models loaded from sources.js — all provider/model definitions are centralized there
66
67
  * - OpenCode config: ~/.config/opencode/opencode.json
67
68
  * - OpenClaw config: ~/.openclaw/openclaw.json
68
69
  * - Ping timeout: 15s per attempt
69
- * - Ping interval: 2 seconds (continuous monitoring mode)
70
+ * - Ping interval: 60 seconds (continuous monitoring mode)
70
71
  * - Animation: 12 FPS with braille spinners
71
72
  *
72
73
  * 🚀 CLI flags:
@@ -91,10 +92,12 @@ import { randomUUID } from 'crypto'
91
92
  import { homedir } from 'os'
92
93
  import { join, dirname } from 'path'
93
94
  import { createServer } from 'net'
95
+ import { createServer as createHttpServer } from 'http'
96
+ import { request as httpsRequest } from 'https'
94
97
  import { MODELS, sources } from '../sources.js'
95
98
  import { patchOpenClawModelsJson } from '../patch-openclaw-models.js'
96
- import { getAvg, getVerdict, getUptime, getP95, getJitter, getStabilityScore, sortResults, filterByTier, findBestModel, parseArgs, TIER_ORDER, VERDICT_ORDER, TIER_LETTER_MAP } from '../lib/utils.js'
97
- import { loadConfig, saveConfig, getApiKey, isProviderEnabled } from '../lib/config.js'
99
+ import { getAvg, getVerdict, getUptime, getP95, getJitter, getStabilityScore, sortResults, filterByTier, findBestModel, parseArgs, TIER_ORDER, VERDICT_ORDER, TIER_LETTER_MAP, scoreModelForTask, getTopRecommendations, TASK_TYPES, PRIORITY_TYPES, CONTEXT_BUDGETS } from '../lib/utils.js'
100
+ import { loadConfig, saveConfig, getApiKey, isProviderEnabled, saveAsProfile, loadProfile, listProfiles, deleteProfile, getActiveProfileName, setActiveProfile, _emptyProfileSettings } from '../lib/config.js'
98
101
 
99
102
  const require = createRequire(import.meta.url)
100
103
  const readline = require('readline')
@@ -717,7 +720,7 @@ const ALT_HOME = '\x1b[H'
717
720
  // 📖 This allows easy addition of new model sources beyond NVIDIA NIM
718
721
 
719
722
  const PING_TIMEOUT = 15_000 // 📖 15s per attempt before abort - slow models get more time
720
- const PING_INTERVAL = 3_000 // 📖 Ping all models every 3 seconds in continuous mode
723
+ const PING_INTERVAL = 60_000 // 📖 60s between pings avoids provider rate-limit bans
721
724
 
722
725
  const FPS = 12
723
726
  const COL_MODEL = 22
@@ -760,6 +763,7 @@ const spinCell = (f, o = 0) => chalk.dim.yellow(FRAMES[(f + o) % FRAMES.length].
760
763
  // 📖 from the main table and from each other.
761
764
  const SETTINGS_OVERLAY_BG = chalk.bgRgb(14, 20, 30)
762
765
  const HELP_OVERLAY_BG = chalk.bgRgb(24, 16, 32)
766
+ const RECOMMEND_OVERLAY_BG = chalk.bgRgb(10, 25, 15) // 📖 Green tint for Smart Recommend
763
767
  const OVERLAY_PANEL_WIDTH = 116
764
768
 
765
769
  // 📖 Strip ANSI color/control sequences to estimate visible text width before padding.
@@ -852,7 +856,7 @@ function sliceOverlayLines(lines, offset, terminalRows) {
852
856
  // 📖 Keep these constants in sync with renderTable() fixed shell lines.
853
857
  // 📖 If this drifts, model rows overflow and can push the title row out of view.
854
858
  const TABLE_HEADER_LINES = 4 // 📖 title, spacer, column headers, separator
855
- const TABLE_FOOTER_LINES = 6 // 📖 spacer, hints, spacer, credit+contributors, discord, spacer
859
+ const TABLE_FOOTER_LINES = 7 // 📖 spacer, hints line 1, hints line 2, spacer, credit+contributors, discord, spacer
856
860
  const TABLE_FIXED_LINES = TABLE_HEADER_LINES + TABLE_FOOTER_LINES
857
861
 
858
862
  // 📖 Computes the visible slice of model rows that fits in the terminal.
@@ -871,18 +875,27 @@ function calculateViewport(terminalRows, scrollOffset, totalModels) {
871
875
  return { startIdx: scrollOffset, endIdx, hasAbove, hasBelow }
872
876
  }
873
877
 
874
- // 📖 Favorites are always pinned at the top and keep insertion order.
875
- // 📖 Non-favorites still use the active sort column/direction.
878
+ // 📖 Recommended models are pinned above favorites, favorites above non-favorites.
879
+ // 📖 Recommended: sorted by recommendation score (highest first).
880
+ // 📖 Favorites: keep insertion order (favoriteRank).
881
+ // 📖 Non-favorites: active sort column/direction.
876
882
  function sortResultsWithPinnedFavorites(results, sortColumn, sortDirection) {
883
+ const recommendedRows = results
884
+ .filter((r) => r.isRecommended && !r.isFavorite)
885
+ .sort((a, b) => (b.recommendScore || 0) - (a.recommendScore || 0))
877
886
  const favoriteRows = results
878
- .filter((r) => r.isFavorite)
887
+ .filter((r) => r.isFavorite && !r.isRecommended)
879
888
  .sort((a, b) => a.favoriteRank - b.favoriteRank)
880
- const nonFavoriteRows = sortResults(results.filter((r) => !r.isFavorite), sortColumn, sortDirection)
881
- return [...favoriteRows, ...nonFavoriteRows]
889
+ // 📖 Models that are both recommended AND favorite — show in recommended section
890
+ const bothRows = results
891
+ .filter((r) => r.isRecommended && r.isFavorite)
892
+ .sort((a, b) => (b.recommendScore || 0) - (a.recommendScore || 0))
893
+ const nonSpecialRows = sortResults(results.filter((r) => !r.isFavorite && !r.isRecommended), sortColumn, sortDirection)
894
+ return [...bothRows, ...recommendedRows, ...favoriteRows, ...nonSpecialRows]
882
895
  }
883
896
 
884
897
  // 📖 renderTable: mode param controls footer hint text (opencode vs openclaw)
885
- function renderTable(results, pendingPings, frame, cursor = null, sortColumn = 'avg', sortDirection = 'asc', pingInterval = PING_INTERVAL, lastPingTime = Date.now(), mode = 'opencode', tierFilterMode = 0, scrollOffset = 0, terminalRows = 0, originFilterMode = 0) {
898
+ function renderTable(results, pendingPings, frame, cursor = null, sortColumn = 'avg', sortDirection = 'asc', pingInterval = PING_INTERVAL, lastPingTime = Date.now(), mode = 'opencode', tierFilterMode = 0, scrollOffset = 0, terminalRows = 0, originFilterMode = 0, activeProfile = null, profileSaveMode = false, profileSaveBuffer = '') {
886
899
  // 📖 Filter out hidden models for display
887
900
  const visibleResults = results.filter(r => !r.hidden)
888
901
 
@@ -934,6 +947,12 @@ function renderTable(results, pendingPings, frame, cursor = null, sortColumn = '
934
947
  }
935
948
  }
936
949
 
950
+ // 📖 Profile badge — shown when a named profile is active (Shift+P to cycle, Shift+S to save)
951
+ let profileBadge = ''
952
+ if (activeProfile) {
953
+ profileBadge = chalk.bold.rgb(200, 150, 255)(` [📋 ${activeProfile}]`)
954
+ }
955
+
937
956
  // 📖 Column widths (generous spacing with margins)
938
957
  const W_RANK = 6
939
958
  const W_TIER = 6
@@ -952,7 +971,7 @@ function renderTable(results, pendingPings, frame, cursor = null, sortColumn = '
952
971
  const sorted = sortResultsWithPinnedFavorites(visibleResults, sortColumn, sortDirection)
953
972
 
954
973
  const lines = [
955
- ` ${chalk.bold('⚡ Free Coding Models')} ${chalk.dim('v' + LOCAL_VERSION)}${modeBadge}${modeHint}${tierBadge}${originBadge} ` +
974
+ ` ${chalk.bold('⚡ Free Coding Models')} ${chalk.dim('v' + LOCAL_VERSION)}${modeBadge}${modeHint}${tierBadge}${originBadge}${profileBadge} ` +
956
975
  chalk.greenBright(`✅ ${up}`) + chalk.dim(' up ') +
957
976
  chalk.yellow(`⏳ ${timeout}`) + chalk.dim(' timeout ') +
958
977
  chalk.red(`❌ ${down}`) + chalk.dim(' down ') +
@@ -1054,8 +1073,8 @@ function renderTable(results, pendingPings, frame, cursor = null, sortColumn = '
1054
1073
  const providerName = sources[r.providerKey]?.name ?? r.providerKey ?? 'NIM'
1055
1074
  const source = chalk.green(providerName.padEnd(W_SOURCE))
1056
1075
  // 📖 Favorites: always reserve 2 display columns at the start of Model column.
1057
- // 📖 ⭐ (2 cols) for favorites, ' ' (2 spaces) for non-favorites — keeps alignment stable.
1058
- const favoritePrefix = r.isFavorite ? '⭐' : ' '
1076
+ // 📖 🎯 (2 cols) for recommended, ⭐ (2 cols) for favorites, ' ' (2 spaces) for non-favorites — keeps alignment stable.
1077
+ const favoritePrefix = r.isRecommended ? '🎯' : r.isFavorite ? '⭐' : ' '
1059
1078
  const prefixDisplayWidth = 2
1060
1079
  const nameWidth = Math.max(0, W_MODEL - prefixDisplayWidth)
1061
1080
  const name = favoritePrefix + r.label.slice(0, nameWidth).padEnd(nameWidth)
@@ -1241,6 +1260,9 @@ function renderTable(results, pendingPings, frame, cursor = null, sortColumn = '
1241
1260
 
1242
1261
  if (isCursor) {
1243
1262
  lines.push(chalk.bgRgb(50, 0, 60)(row))
1263
+ } else if (r.isRecommended) {
1264
+ // 📖 Medium green background for recommended models (distinguishable from favorites)
1265
+ lines.push(chalk.bgRgb(15, 40, 15)(row))
1244
1266
  } else if (r.isFavorite) {
1245
1267
  lines.push(chalk.bgRgb(35, 20, 0)(row))
1246
1268
  } else {
@@ -1252,7 +1274,12 @@ function renderTable(results, pendingPings, frame, cursor = null, sortColumn = '
1252
1274
  lines.push(chalk.dim(` ... ${sorted.length - vp.endIdx} more below ...`))
1253
1275
  }
1254
1276
 
1255
- lines.push('')
1277
+ // 📖 Profile save inline prompt — shown when Shift+S is pressed, replaces spacer line
1278
+ if (profileSaveMode) {
1279
+ lines.push(chalk.bgRgb(40, 20, 60)(` 📋 Save profile as: ${chalk.cyanBright(profileSaveBuffer + '▏')} ${chalk.dim('Enter save • Esc cancel')}`))
1280
+ } else {
1281
+ lines.push('')
1282
+ }
1256
1283
  const intervalSec = Math.round(pingInterval / 1000)
1257
1284
 
1258
1285
  // 📖 Footer hints adapt based on active mode
@@ -1261,7 +1288,10 @@ function renderTable(results, pendingPings, frame, cursor = null, sortColumn = '
1261
1288
  : mode === 'opencode-desktop'
1262
1289
  ? chalk.rgb(0, 200, 255)('Enter→OpenDesktop')
1263
1290
  : chalk.rgb(0, 200, 255)('Enter→OpenCode')
1264
- lines.push(chalk.dim(` ↑↓ Navigate • `) + actionHint + chalk.dim(` • F Favorite • R/Y/O/M/L/A/S/C/H/V/B/U Sort • T Tier • N Origin • W↓/X↑ (${intervalSec}s) • `) + chalk.rgb(255, 100, 50).bold('Z Mode') + chalk.dim(` • `) + chalk.yellow('P') + chalk.dim(` Settings • `) + chalk.rgb(0, 255, 80).bold('K Help'))
1291
+ // 📖 Line 1: core navigation + sorting shortcuts
1292
+ lines.push(chalk.dim(` ↑↓ Navigate • `) + actionHint + chalk.dim(` • `) + chalk.yellow('F') + chalk.dim(` Favorite • R/Y/O/M/L/A/S/C/H/V/B/U Sort • `) + chalk.yellow('T') + chalk.dim(` Tier • `) + chalk.yellow('N') + chalk.dim(` Origin • W↓/X↑ (${intervalSec}s) • `) + chalk.rgb(255, 100, 50).bold('Z') + chalk.dim(` Mode • `) + chalk.yellow('P') + chalk.dim(` Settings • `) + chalk.rgb(0, 255, 80).bold('K') + chalk.dim(` Help`))
1293
+ // 📖 Line 2: profiles, recommend, and extended hints — gives visibility to less-obvious features
1294
+ lines.push(chalk.dim(` `) + chalk.rgb(200, 150, 255).bold('⇧P') + chalk.dim(` Cycle profile • `) + chalk.rgb(200, 150, 255).bold('⇧S') + chalk.dim(` Save profile • `) + chalk.rgb(0, 200, 180).bold('Q') + chalk.dim(` Smart Recommend • `) + chalk.yellow('E') + chalk.dim(`/`) + chalk.yellow('D') + chalk.dim(` Tier ↑↓ • `) + chalk.yellow('Esc') + chalk.dim(` Close overlay • Ctrl+C Exit`))
1265
1295
  lines.push('')
1266
1296
  lines.push(
1267
1297
  chalk.rgb(255, 150, 200)(' Made with 💖 & ☕ by \x1b]8;;https://github.com/vava-nessa\x1b\\vava-nessa\x1b]8;;\x1b\\') +
@@ -1306,6 +1336,9 @@ function resolveCloudflareUrl(url) {
1306
1336
  }
1307
1337
 
1308
1338
  function buildPingRequest(apiKey, modelId, providerKey, url) {
1339
+ // 📖 ZAI models are stored as "zai/glm-..." in sources.js but the API expects just "glm-..."
1340
+ const apiModelId = providerKey === 'zai' ? modelId.replace(/^zai\//, '') : modelId
1341
+
1309
1342
  if (providerKey === 'replicate') {
1310
1343
  // 📖 Replicate uses /v1/predictions with a different payload than OpenAI chat-completions.
1311
1344
  const replicateHeaders = { 'Content-Type': 'application/json', Prefer: 'wait=4' }
@@ -1324,7 +1357,7 @@ function buildPingRequest(apiKey, modelId, providerKey, url) {
1324
1357
  return {
1325
1358
  url: resolveCloudflareUrl(url),
1326
1359
  headers,
1327
- body: { model: modelId, messages: [{ role: 'user', content: 'hi' }], max_tokens: 1 },
1360
+ body: { model: apiModelId, messages: [{ role: 'user', content: 'hi' }], max_tokens: 1 },
1328
1361
  }
1329
1362
  }
1330
1363
 
@@ -1339,7 +1372,7 @@ function buildPingRequest(apiKey, modelId, providerKey, url) {
1339
1372
  return {
1340
1373
  url,
1341
1374
  headers,
1342
- body: { model: modelId, messages: [{ role: 'user', content: 'hi' }], max_tokens: 1 },
1375
+ body: { model: apiModelId, messages: [{ role: 'user', content: 'hi' }], max_tokens: 1 },
1343
1376
  }
1344
1377
  }
1345
1378
 
@@ -1387,6 +1420,8 @@ const OPENCODE_MODEL_MAP = {
1387
1420
  }
1388
1421
 
1389
1422
  function getOpenCodeModelId(providerKey, modelId) {
1423
+ // 📖 ZAI models stored as "zai/glm-..." but OpenCode expects just "glm-..."
1424
+ if (providerKey === 'zai') return modelId.replace(/^zai\//, '')
1390
1425
  return OPENCODE_MODEL_MAP[providerKey]?.[modelId] || modelId
1391
1426
  }
1392
1427
 
@@ -1409,6 +1444,7 @@ const ENV_VAR_NAMES = {
1409
1444
  together: 'TOGETHER_API_KEY',
1410
1445
  cloudflare: 'CLOUDFLARE_API_TOKEN',
1411
1446
  perplexity: 'PERPLEXITY_API_KEY',
1447
+ zai: 'ZAI_API_KEY',
1412
1448
  }
1413
1449
 
1414
1450
  // 📖 Provider metadata used by the setup wizard and Settings details panel.
@@ -1533,17 +1569,18 @@ const PROVIDER_METADATA = {
1533
1569
  signupHint: 'Generate API key (billing may be required)',
1534
1570
  rateLimits: 'Tiered limits by spend (default ~50 RPM)',
1535
1571
  },
1572
+ zai: {
1573
+ label: 'ZAI (z.ai)',
1574
+ color: chalk.rgb(0, 150, 255),
1575
+ signupUrl: 'https://z.ai',
1576
+ signupHint: 'Sign up and generate an API key',
1577
+ rateLimits: 'Free tier (generous quota)',
1578
+ },
1536
1579
  }
1537
1580
 
1538
- // 📖 OpenCode config location varies by platform
1539
- // 📖 Windows: %APPDATA%\opencode\opencode.json (or sometimes ~/.config/opencode)
1540
- // 📖 macOS/Linux: ~/.config/opencode/opencode.json
1541
- const OPENCODE_CONFIG = isWindows
1542
- ? join(homedir(), 'AppData', 'Roaming', 'opencode', 'opencode.json')
1543
- : join(homedir(), '.config', 'opencode', 'opencode.json')
1544
-
1545
- // 📖 Fallback to .config on Windows if AppData doesn't exist
1546
- const OPENCODE_CONFIG_FALLBACK = join(homedir(), '.config', 'opencode', 'opencode.json')
1581
+ // 📖 OpenCode config location: ~/.config/opencode/opencode.json on ALL platforms.
1582
+ // 📖 OpenCode uses xdg-basedir which resolves to %USERPROFILE%\.config on Windows.
1583
+ const OPENCODE_CONFIG = join(homedir(), '.config', 'opencode', 'opencode.json')
1547
1584
  const OPENCODE_PORT_RANGE_START = 4096
1548
1585
  const OPENCODE_PORT_RANGE_END = 5096
1549
1586
 
@@ -1585,8 +1622,6 @@ async function resolveOpenCodeTmuxPort() {
1585
1622
  }
1586
1623
 
1587
1624
  function getOpenCodeConfigPath() {
1588
- if (existsSync(OPENCODE_CONFIG)) return OPENCODE_CONFIG
1589
- if (isWindows && existsSync(OPENCODE_CONFIG_FALLBACK)) return OPENCODE_CONFIG_FALLBACK
1590
1625
  return OPENCODE_CONFIG
1591
1626
  }
1592
1627
 
@@ -1629,14 +1664,68 @@ function checkNvidiaNimConfig() {
1629
1664
  // 📖 Resolves the actual API key from config/env and passes it as an env var
1630
1665
  // 📖 to the child process so OpenCode's {env:GROQ_API_KEY} references work
1631
1666
  // 📖 even when the key is only in ~/.free-coding-models.json (not in shell env).
1632
- async function spawnOpenCode(args, providerKey, fcmConfig) {
1667
+ // 📖 createZaiProxy: Localhost reverse proxy that bridges ZAI's non-standard API paths
1668
+ // 📖 to OpenCode's expected /v1/* OpenAI-compatible format.
1669
+ // 📖 OpenCode's local provider calls GET /v1/models for discovery and POST /v1/chat/completions
1670
+ // 📖 for inference. ZAI's API lives at /api/coding/paas/v4/* instead — this proxy rewrites.
1671
+ // 📖 Returns { server, port } — caller must server.close() when done.
1672
+ async function createZaiProxy(apiKey) {
1673
+ const server = createHttpServer((req, res) => {
1674
+ let targetPath = req.url
1675
+ // 📖 Rewrite /v1/* → /api/coding/paas/v4/*
1676
+ if (targetPath.startsWith('/v1/')) {
1677
+ targetPath = '/api/coding/paas/v4/' + targetPath.slice(4)
1678
+ } else if (targetPath.startsWith('/v1')) {
1679
+ targetPath = '/api/coding/paas/v4' + targetPath.slice(3)
1680
+ } else {
1681
+ // 📖 Non /v1 paths (e.g. /api/v0/ health checks) — reject
1682
+ res.writeHead(404)
1683
+ res.end()
1684
+ return
1685
+ }
1686
+ const headers = { ...req.headers, host: 'api.z.ai' }
1687
+ if (apiKey) headers.authorization = `Bearer ${apiKey}`
1688
+ // 📖 Remove transfer-encoding to avoid chunked encoding issues with https.request
1689
+ delete headers['transfer-encoding']
1690
+ const proxyReq = httpsRequest({
1691
+ hostname: 'api.z.ai',
1692
+ port: 443,
1693
+ path: targetPath,
1694
+ method: req.method,
1695
+ headers,
1696
+ }, (proxyRes) => {
1697
+ res.writeHead(proxyRes.statusCode, proxyRes.headers)
1698
+ proxyRes.pipe(res)
1699
+ })
1700
+ proxyReq.on('error', () => { res.writeHead(502); res.end() })
1701
+ req.pipe(proxyReq)
1702
+ })
1703
+ await new Promise(r => server.listen(0, '127.0.0.1', r))
1704
+ return { server, port: server.address().port }
1705
+ }
1706
+
1707
+ async function spawnOpenCode(args, providerKey, fcmConfig, existingZaiProxy = null) {
1633
1708
  const envVarName = ENV_VAR_NAMES[providerKey]
1634
1709
  const resolvedKey = getApiKey(fcmConfig, providerKey)
1635
1710
  const childEnv = { ...process.env }
1711
+ // 📖 Suppress MaxListenersExceededWarning from @modelcontextprotocol/sdk
1712
+ // 📖 when 7+ MCP servers cause drain listener count to exceed default 10
1713
+ childEnv.NODE_NO_WARNINGS = '1'
1636
1714
  const finalArgs = [...args]
1637
1715
  const hasExplicitPortArg = finalArgs.includes('--port')
1638
1716
  if (envVarName && resolvedKey) childEnv[envVarName] = resolvedKey
1639
1717
 
1718
+ // 📖 ZAI proxy: OpenCode's Go binary doesn't know about ZAI as a provider.
1719
+ // 📖 We spin up a localhost proxy that rewrites /v1/* → /api/coding/paas/v4/*
1720
+ // 📖 and register ZAI as a custom openai-compatible provider in opencode.json.
1721
+ // 📖 If startOpenCode already started the proxy, reuse it (existingZaiProxy).
1722
+ let zaiProxy = existingZaiProxy
1723
+ if (providerKey === 'zai' && resolvedKey && !zaiProxy) {
1724
+ const { server, port } = await createZaiProxy(resolvedKey)
1725
+ zaiProxy = server
1726
+ console.log(chalk.dim(` 🔀 ZAI proxy listening on port ${port} (rewrites /v1/* → ZAI API)`))
1727
+ }
1728
+
1640
1729
  // 📖 In tmux, OpenCode sub-agents need a listening port to open extra panes.
1641
1730
  // 📖 We auto-pick one if the user did not provide --port explicitly.
1642
1731
  if (process.env.TMUX && !hasExplicitPortArg) {
@@ -1664,8 +1753,22 @@ async function spawnOpenCode(args, providerKey, fcmConfig) {
1664
1753
  })
1665
1754
 
1666
1755
  return new Promise((resolve, reject) => {
1667
- child.on('exit', resolve)
1756
+ child.on('exit', (code) => {
1757
+ if (zaiProxy) zaiProxy.close()
1758
+ // 📖 ZAI cleanup: remove the ephemeral proxy provider from opencode.json
1759
+ // 📖 so a stale baseURL doesn't cause "Model zai/… is not valid" on next launch
1760
+ if (providerKey === 'zai') {
1761
+ try {
1762
+ const cfg = loadOpenCodeConfig()
1763
+ if (cfg.provider?.zai) delete cfg.provider.zai
1764
+ if (typeof cfg.model === 'string' && cfg.model.startsWith('zai/')) delete cfg.model
1765
+ saveOpenCodeConfig(cfg)
1766
+ } catch { /* best-effort cleanup */ }
1767
+ }
1768
+ resolve(code)
1769
+ })
1668
1770
  child.on('error', (err) => {
1771
+ if (zaiProxy) zaiProxy.close()
1669
1772
  if (err.code === 'ENOENT') {
1670
1773
  console.error(chalk.red('\n X Could not find "opencode" -- is it installed and in your PATH?'))
1671
1774
  console.error(chalk.dim(' Install: npm i -g opencode or see https://opencode.ai'))
@@ -1775,8 +1878,72 @@ After installation, you can use: opencode --model ${modelRef}`
1775
1878
  return
1776
1879
  }
1777
1880
 
1778
- // 📖 Groq: built-in OpenCode provider -- needs provider block with apiKey in opencode.json.
1779
- // 📖 Cerebras: NOT built-in -- needs @ai-sdk/openai-compatible + baseURL, like NVIDIA.
1881
+ // 📖 ZAI: OpenCode's Go binary has no built-in ZAI provider.
1882
+ // 📖 We start a localhost proxy that rewrites /v1/* /api/coding/paas/v4/*
1883
+ // 📖 and register ZAI as a custom openai-compatible provider pointing to the proxy.
1884
+ // 📖 This gives OpenCode a standard provider/model format (zai/glm-5) it understands.
1885
+ if (providerKey === 'zai') {
1886
+ const resolvedKey = getApiKey(fcmConfig, providerKey)
1887
+ if (!resolvedKey) {
1888
+ console.log(chalk.yellow(' ⚠ ZAI API key not found. Set ZAI_API_KEY environment variable.'))
1889
+ console.log()
1890
+ return
1891
+ }
1892
+
1893
+ // 📖 Start proxy FIRST to get the port for config
1894
+ const { server: zaiProxyServer, port: zaiProxyPort } = await createZaiProxy(resolvedKey)
1895
+ console.log(chalk.dim(` 🔀 ZAI proxy listening on port ${zaiProxyPort} (rewrites /v1/* → ZAI API)`))
1896
+
1897
+ console.log(chalk.green(` 🚀 Setting ${chalk.bold(model.label)} as default…`))
1898
+ console.log(chalk.dim(` Model: ${modelRef}`))
1899
+ console.log()
1900
+
1901
+ const config = loadOpenCodeConfig()
1902
+ const backupPath = `${getOpenCodeConfigPath()}.backup-${Date.now()}`
1903
+
1904
+ if (existsSync(getOpenCodeConfigPath())) {
1905
+ copyFileSync(getOpenCodeConfigPath(), backupPath)
1906
+ console.log(chalk.dim(` 💾 Backup: ${backupPath}`))
1907
+ }
1908
+
1909
+ // 📖 Register ZAI as an openai-compatible provider pointing to our localhost proxy
1910
+ // 📖 apiKey is required by @ai-sdk/openai-compatible SDK — the proxy handles real auth internally
1911
+ if (!config.provider) config.provider = {}
1912
+ config.provider.zai = {
1913
+ npm: '@ai-sdk/openai-compatible',
1914
+ name: 'ZAI',
1915
+ options: {
1916
+ baseURL: `http://127.0.0.1:${zaiProxyPort}/v1`,
1917
+ apiKey: 'zai-proxy',
1918
+ },
1919
+ models: {}
1920
+ }
1921
+ config.provider.zai.models[ocModelId] = { name: model.label }
1922
+ config.model = modelRef
1923
+
1924
+ saveOpenCodeConfig(config)
1925
+
1926
+ const savedConfig = loadOpenCodeConfig()
1927
+ console.log(chalk.dim(` 📝 Config saved to: ${getOpenCodeConfigPath()}`))
1928
+ console.log(chalk.dim(` 📝 Default model in config: ${savedConfig.model || 'NOT SET'}`))
1929
+ console.log()
1930
+
1931
+ if (savedConfig.model === config.model) {
1932
+ console.log(chalk.green(` ✓ Default model set to: ${modelRef}`))
1933
+ } else {
1934
+ console.log(chalk.yellow(` ⚠ Config might not have been saved correctly`))
1935
+ }
1936
+ console.log()
1937
+ console.log(chalk.dim(' Starting OpenCode…'))
1938
+ console.log()
1939
+
1940
+ // 📖 Pass existing proxy to spawnOpenCode so it doesn't start a second one
1941
+ await spawnOpenCode(['--model', modelRef], providerKey, fcmConfig, zaiProxyServer)
1942
+ return
1943
+ }
1944
+
1945
+ // 📖 Groq: built-in OpenCode provider — needs provider block with apiKey in opencode.json.
1946
+ // 📖 Cerebras: NOT built-in — needs @ai-sdk/openai-compatible + baseURL, like NVIDIA.
1780
1947
  // 📖 Both need the model registered in provider.<key>.models so OpenCode can find it.
1781
1948
  console.log(chalk.green(` 🚀 Setting ${chalk.bold(model.label)} as default…`))
1782
1949
  console.log(chalk.dim(` Model: ${modelRef}`))
@@ -2092,6 +2259,16 @@ ${isWindows ? 'set NVIDIA_API_KEY=your_key_here' : 'export NVIDIA_API_KEY=your_k
2092
2259
  return
2093
2260
  }
2094
2261
 
2262
+ // 📖 ZAI: Desktop mode can't use the localhost proxy (Desktop is a standalone app).
2263
+ // 📖 Direct the user to use OpenCode CLI mode instead, which supports ZAI via proxy.
2264
+ if (providerKey === 'zai') {
2265
+ console.log(chalk.yellow(' ⚠ ZAI models are supported in OpenCode CLI mode only (not Desktop).'))
2266
+ console.log(chalk.dim(' Reason: ZAI requires a localhost proxy that only works with the CLI spawn.'))
2267
+ console.log(chalk.dim(' Use OpenCode CLI mode (default) to launch ZAI models.'))
2268
+ console.log()
2269
+ return
2270
+ }
2271
+
2095
2272
  // 📖 Groq: built-in OpenCode provider — needs provider block with apiKey in opencode.json.
2096
2273
  // 📖 Cerebras: NOT built-in — needs @ai-sdk/openai-compatible + baseURL, like NVIDIA.
2097
2274
  // 📖 Both need the model registered in provider.<key>.models so OpenCode can find it.
@@ -2454,6 +2631,16 @@ async function main() {
2454
2631
  ensureTelemetryConfig(config)
2455
2632
  ensureFavoritesConfig(config)
2456
2633
 
2634
+ // 📖 If --profile <name> was passed, load that profile into the live config
2635
+ if (cliArgs.profileName) {
2636
+ const profileSettings = loadProfile(config, cliArgs.profileName)
2637
+ if (!profileSettings) {
2638
+ console.error(chalk.red(` Unknown profile "${cliArgs.profileName}". Available: ${listProfiles(config).join(', ') || '(none)'}`))
2639
+ process.exit(1)
2640
+ }
2641
+ saveConfig(config)
2642
+ }
2643
+
2457
2644
  // 📖 Check if any provider has a key — if not, run the first-time setup wizard
2458
2645
  const hasAnyKey = Object.keys(sources).some(pk => !!getApiKey(config, pk))
2459
2646
 
@@ -2597,6 +2784,22 @@ async function main() {
2597
2784
  helpVisible: false, // 📖 Whether the help overlay (K key) is active
2598
2785
  settingsScrollOffset: 0, // 📖 Vertical scroll offset for Settings overlay viewport
2599
2786
  helpScrollOffset: 0, // 📖 Vertical scroll offset for Help overlay viewport
2787
+ // 📖 Smart Recommend overlay state (Q key opens it)
2788
+ recommendOpen: false, // 📖 Whether the recommend overlay is active
2789
+ recommendPhase: 'questionnaire', // 📖 'questionnaire'|'analyzing'|'results' — current phase
2790
+ recommendCursor: 0, // 📖 Selected question option (0-based index within current question)
2791
+ recommendQuestion: 0, // 📖 Which question we're on (0=task, 1=priority, 2=context)
2792
+ recommendAnswers: { taskType: null, priority: null, contextBudget: null }, // 📖 User's answers
2793
+ recommendProgress: 0, // 📖 Analysis progress percentage (0–100)
2794
+ recommendResults: [], // 📖 Top N recommendations from getTopRecommendations()
2795
+ recommendScrollOffset: 0, // 📖 Vertical scroll offset for Recommend overlay viewport
2796
+ recommendAnalysisTimer: null, // 📖 setInterval handle for the 10s analysis phase
2797
+ recommendPingTimer: null, // 📖 setInterval handle for 2 pings/sec during analysis
2798
+ recommendedKeys: new Set(), // 📖 Set of "providerKey/modelId" for recommended models (shown in main table)
2799
+ // 📖 Config Profiles state
2800
+ activeProfile: getActiveProfileName(config), // 📖 Currently loaded profile name (or null)
2801
+ profileSaveMode: false, // 📖 Whether the inline "Save profile" name input is active
2802
+ profileSaveBuffer: '', // 📖 Typed characters for the profile name being saved
2600
2803
  }
2601
2804
 
2602
2805
  // 📖 Re-clamp viewport on terminal resize
@@ -2762,11 +2965,39 @@ async function main() {
2762
2965
  lines.push(chalk.red(` ${state.settingsUpdateError}`))
2763
2966
  }
2764
2967
 
2968
+ // 📖 Profiles section — list saved profiles with active indicator + delete support
2969
+ const savedProfiles = listProfiles(state.config)
2970
+ const profileStartIdx = updateRowIdx + 1
2971
+ const maxRowIdx = savedProfiles.length > 0 ? profileStartIdx + savedProfiles.length - 1 : updateRowIdx
2972
+
2973
+ lines.push('')
2974
+ lines.push(` ${chalk.bold('📋 Profiles')} ${chalk.dim(savedProfiles.length > 0 ? `(${savedProfiles.length} saved)` : '(none — press Shift+S in main view to save)')}`)
2975
+ lines.push(` ${chalk.dim(' ' + '─'.repeat(112))}`)
2976
+ lines.push('')
2977
+
2978
+ if (savedProfiles.length === 0) {
2979
+ lines.push(chalk.dim(' No saved profiles. Press Shift+S in the main table to save your current settings as a profile.'))
2980
+ } else {
2981
+ for (let i = 0; i < savedProfiles.length; i++) {
2982
+ const pName = savedProfiles[i]
2983
+ const rowIdx = profileStartIdx + i
2984
+ const isCursor = state.settingsCursor === rowIdx
2985
+ const isActive = state.activeProfile === pName
2986
+ const activeBadge = isActive ? chalk.greenBright(' ✅ active') : ''
2987
+ const bullet = isCursor ? chalk.bold.cyan(' ❯ ') : chalk.dim(' ')
2988
+ const profileLabel = chalk.rgb(200, 150, 255).bold(pName.padEnd(30))
2989
+ const deleteHint = isCursor ? chalk.dim(' Enter→Load • Backspace→Delete') : ''
2990
+ const row = `${bullet}${profileLabel}${activeBadge}${deleteHint}`
2991
+ cursorLineByRow[rowIdx] = lines.length
2992
+ lines.push(isCursor ? chalk.bgRgb(40, 20, 60)(row) : row)
2993
+ }
2994
+ }
2995
+
2765
2996
  lines.push('')
2766
2997
  if (state.settingsEditMode) {
2767
2998
  lines.push(chalk.dim(' Type API key • Enter Save • Esc Cancel'))
2768
2999
  } else {
2769
- lines.push(chalk.dim(' ↑↓ Navigate • Enter Edit key / Toggle analytics / Check-or-Install update • Space Toggle enabled • T Test key • U Check updates • Esc Close'))
3000
+ lines.push(chalk.dim(' ↑↓ Navigate • Enter Edit key / Toggle / Load profile • Space Toggle • T Test key • U Updates • ⌫ Delete profile • Esc Close'))
2770
3001
  }
2771
3002
  lines.push('')
2772
3003
 
@@ -2800,7 +3031,7 @@ async function main() {
2800
3031
  lines.push(` ${chalk.cyan('Rank')} SWE-bench rank (1 = best coding score) ${chalk.dim('Sort:')} ${chalk.yellow('R')}`)
2801
3032
  lines.push(` ${chalk.dim('Quick glance at which model is objectively the best coder right now.')}`)
2802
3033
  lines.push('')
2803
- lines.push(` ${chalk.cyan('Tier')} S+ / S / A+ / A / A- / B+ / B / C based on SWE-bench score ${chalk.dim('Sort:')} ${chalk.yellow('Y')}`)
3034
+ lines.push(` ${chalk.cyan('Tier')} S+ / S / A+ / A / A- / B+ / B / C based on SWE-bench score ${chalk.dim('Sort:')} ${chalk.yellow('Y')} ${chalk.dim('Cycle:')} ${chalk.yellow('T')}`)
2804
3035
  lines.push(` ${chalk.dim('Skip the noise — S/S+ models solve real GitHub issues, C models are for light tasks.')}`)
2805
3036
  lines.push('')
2806
3037
  lines.push(` ${chalk.cyan('SWE%')} SWE-bench score — coding ability benchmark (color-coded) ${chalk.dim('Sort:')} ${chalk.yellow('S')}`)
@@ -2812,7 +3043,7 @@ async function main() {
2812
3043
  lines.push(` ${chalk.cyan('Model')} Model name (⭐ = favorited, pinned at top) ${chalk.dim('Sort:')} ${chalk.yellow('M')} ${chalk.dim('Favorite:')} ${chalk.yellow('F')}`)
2813
3044
  lines.push(` ${chalk.dim('Star the ones you like — they stay pinned at the top across restarts.')}`)
2814
3045
  lines.push('')
2815
- lines.push(` ${chalk.cyan('Origin')} Provider source (NIM, Groq, Cerebras, etc.) ${chalk.dim('Sort:')} ${chalk.yellow('O')} ${chalk.dim('Filter:')} ${chalk.yellow('N')}`)
3046
+ lines.push(` ${chalk.cyan('Origin')} Provider source (NIM, Groq, Cerebras, etc.) ${chalk.dim('Sort:')} ${chalk.yellow('O')} ${chalk.dim('Cycle:')} ${chalk.yellow('N')}`)
2816
3047
  lines.push(` ${chalk.dim('Same model on different providers can have very different speed and uptime.')}`)
2817
3048
  lines.push('')
2818
3049
  lines.push(` ${chalk.cyan('Latest')} Most recent ping response time (ms) ${chalk.dim('Sort:')} ${chalk.yellow('L')}`)
@@ -2839,16 +3070,17 @@ async function main() {
2839
3070
  lines.push(` ${chalk.yellow('↑↓')} Navigate rows`)
2840
3071
  lines.push(` ${chalk.yellow('Enter')} Select model and launch`)
2841
3072
  lines.push('')
2842
- lines.push(` ${chalk.bold('Filters')}`)
2843
- lines.push(` ${chalk.yellow('T')} Cycle tier filter ${chalk.dim('(All → S+ → S → A+ → A → A- → B+ → B → C → All)')}`)
2844
- lines.push(` ${chalk.yellow('N')} Cycle origin filter ${chalk.dim('(All → NIM → Groq → Cerebras → ... each provider → All)')}`)
2845
- lines.push('')
2846
3073
  lines.push(` ${chalk.bold('Controls')}`)
2847
3074
  lines.push(` ${chalk.yellow('W')} Decrease ping interval (faster)`)
2848
3075
  lines.push(` ${chalk.yellow('X')} Increase ping interval (slower)`)
2849
3076
  lines.push(` ${chalk.yellow('Z')} Cycle launch mode ${chalk.dim('(OpenCode CLI → OpenCode Desktop → OpenClaw)')}`)
2850
3077
  lines.push(` ${chalk.yellow('F')} Toggle favorite on selected row ${chalk.dim('(⭐ pinned at top, persisted)')}`)
3078
+ lines.push(` ${chalk.yellow('Q')} Smart Recommend ${chalk.dim('(🎯 find the best model for your task — questionnaire + live analysis)')}`)
2851
3079
  lines.push(` ${chalk.yellow('P')} Open settings ${chalk.dim('(manage API keys, provider toggles, analytics, manual update)')}`)
3080
+ lines.push(` ${chalk.yellow('Shift+P')} Cycle config profile ${chalk.dim('(switch between saved profiles live)')}`)
3081
+ lines.push(` ${chalk.yellow('Shift+S')} Save current config as a named profile ${chalk.dim('(inline prompt — type name + Enter)')}`)
3082
+ lines.push(` ${chalk.dim('Profiles store: favorites, sort, tier filter, ping interval, API keys.')}`)
3083
+ lines.push(` ${chalk.dim('Use --profile <name> to load a profile on startup.')}`)
2852
3084
  lines.push(` ${chalk.yellow('K')} / ${chalk.yellow('Esc')} Show/hide this help`)
2853
3085
  lines.push(` ${chalk.yellow('Ctrl+C')} Exit`)
2854
3086
  lines.push('')
@@ -2871,6 +3103,8 @@ async function main() {
2871
3103
  lines.push(` ${chalk.cyan('free-coding-models --fiable')} ${chalk.dim('10s reliability analysis')}`)
2872
3104
  lines.push(` ${chalk.cyan('free-coding-models --tier S|A|B|C')} ${chalk.dim('Filter by tier letter')}`)
2873
3105
  lines.push(` ${chalk.cyan('free-coding-models --no-telemetry')} ${chalk.dim('Disable telemetry for this run')}`)
3106
+ lines.push(` ${chalk.cyan('free-coding-models --recommend')} ${chalk.dim('Auto-open Smart Recommend on start')}`)
3107
+ lines.push(` ${chalk.cyan('free-coding-models --profile <name>')} ${chalk.dim('Load a saved config profile')}`)
2874
3108
  lines.push(` ${chalk.dim('Flags can be combined: --openclaw --tier S')}`)
2875
3109
  lines.push('')
2876
3110
  // 📖 Help overlay can be longer than viewport, so keep a dedicated scroll offset.
@@ -2881,6 +3115,211 @@ async function main() {
2881
3115
  return cleared.join('\n')
2882
3116
  }
2883
3117
 
3118
+ // ─── Smart Recommend overlay renderer ─────────────────────────────────────
3119
+ // 📖 renderRecommend: Draw the Smart Recommend overlay with 3 phases:
3120
+ // 1. 'questionnaire' — ask 3 questions (task type, priority, context budget)
3121
+ // 2. 'analyzing' — loading screen with progress bar (10s, 2 pings/sec)
3122
+ // 3. 'results' — show Top 3 recommendations with scores
3123
+ function renderRecommend() {
3124
+ const EL = '\x1b[K'
3125
+ const lines = []
3126
+
3127
+ lines.push('')
3128
+ lines.push(` ${chalk.bold('🎯 Smart Recommend')} ${chalk.dim('— find the best model for your task')}`)
3129
+ lines.push('')
3130
+
3131
+ if (state.recommendPhase === 'questionnaire') {
3132
+ // 📖 Question definitions — each has a title, options array, and answer key
3133
+ const questions = [
3134
+ {
3135
+ title: 'What are you working on?',
3136
+ options: Object.entries(TASK_TYPES).map(([key, val]) => ({ key, label: val.label })),
3137
+ answerKey: 'taskType',
3138
+ },
3139
+ {
3140
+ title: 'What matters most?',
3141
+ options: Object.entries(PRIORITY_TYPES).map(([key, val]) => ({ key, label: val.label })),
3142
+ answerKey: 'priority',
3143
+ },
3144
+ {
3145
+ title: 'How big is your context?',
3146
+ options: Object.entries(CONTEXT_BUDGETS).map(([key, val]) => ({ key, label: val.label })),
3147
+ answerKey: 'contextBudget',
3148
+ },
3149
+ ]
3150
+
3151
+ const q = questions[state.recommendQuestion]
3152
+ const qNum = state.recommendQuestion + 1
3153
+ const qTotal = questions.length
3154
+
3155
+ // 📖 Progress breadcrumbs showing answered questions
3156
+ let breadcrumbs = ''
3157
+ for (let i = 0; i < questions.length; i++) {
3158
+ const answered = state.recommendAnswers[questions[i].answerKey]
3159
+ if (i < state.recommendQuestion && answered) {
3160
+ const answeredLabel = questions[i].options.find(o => o.key === answered)?.label || answered
3161
+ breadcrumbs += chalk.greenBright(` ✓ ${questions[i].title} ${chalk.bold(answeredLabel)}`) + '\n'
3162
+ }
3163
+ }
3164
+ if (breadcrumbs) {
3165
+ lines.push(breadcrumbs.trimEnd())
3166
+ lines.push('')
3167
+ }
3168
+
3169
+ lines.push(` ${chalk.bold(`Question ${qNum}/${qTotal}:`)} ${chalk.cyan(q.title)}`)
3170
+ lines.push('')
3171
+
3172
+ for (let i = 0; i < q.options.length; i++) {
3173
+ const opt = q.options[i]
3174
+ const isCursor = i === state.recommendCursor
3175
+ const bullet = isCursor ? chalk.bold.cyan(' ❯ ') : chalk.dim(' ')
3176
+ const label = isCursor ? chalk.bold.white(opt.label) : chalk.white(opt.label)
3177
+ lines.push(`${bullet}${label}`)
3178
+ }
3179
+
3180
+ lines.push('')
3181
+ lines.push(chalk.dim(' ↑↓ navigate • Enter select • Esc cancel'))
3182
+
3183
+ } else if (state.recommendPhase === 'analyzing') {
3184
+ // 📖 Loading screen with progress bar
3185
+ const pct = Math.min(100, Math.round(state.recommendProgress))
3186
+ const barWidth = 40
3187
+ const filled = Math.round(barWidth * pct / 100)
3188
+ const empty = barWidth - filled
3189
+ const bar = chalk.greenBright('█'.repeat(filled)) + chalk.dim('░'.repeat(empty))
3190
+
3191
+ lines.push(` ${chalk.bold('Analyzing models...')}`)
3192
+ lines.push('')
3193
+ lines.push(` ${bar} ${chalk.bold(String(pct) + '%')}`)
3194
+ lines.push('')
3195
+
3196
+ // 📖 Show what we're doing
3197
+ const taskLabel = TASK_TYPES[state.recommendAnswers.taskType]?.label || '—'
3198
+ const prioLabel = PRIORITY_TYPES[state.recommendAnswers.priority]?.label || '—'
3199
+ const ctxLabel = CONTEXT_BUDGETS[state.recommendAnswers.contextBudget]?.label || '—'
3200
+ lines.push(chalk.dim(` Task: ${taskLabel} • Priority: ${prioLabel} • Context: ${ctxLabel}`))
3201
+ lines.push('')
3202
+
3203
+ // 📖 Spinning indicator
3204
+ const spinIdx = state.frame % FRAMES.length
3205
+ lines.push(` ${chalk.yellow(FRAMES[spinIdx])} Pinging models at 2 pings/sec to gather fresh latency data...`)
3206
+ lines.push('')
3207
+ lines.push(chalk.dim(' Esc to cancel'))
3208
+
3209
+ } else if (state.recommendPhase === 'results') {
3210
+ // 📖 Show Top 3 results with detailed info
3211
+ const taskLabel = TASK_TYPES[state.recommendAnswers.taskType]?.label || '—'
3212
+ const prioLabel = PRIORITY_TYPES[state.recommendAnswers.priority]?.label || '—'
3213
+ const ctxLabel = CONTEXT_BUDGETS[state.recommendAnswers.contextBudget]?.label || '—'
3214
+ lines.push(chalk.dim(` Task: ${taskLabel} • Priority: ${prioLabel} • Context: ${ctxLabel}`))
3215
+ lines.push('')
3216
+
3217
+ if (state.recommendResults.length === 0) {
3218
+ lines.push(` ${chalk.yellow('No models could be scored. Try different criteria or wait for more pings.')}`)
3219
+ } else {
3220
+ lines.push(` ${chalk.bold('Top Recommendations:')}`)
3221
+ lines.push('')
3222
+
3223
+ for (let i = 0; i < state.recommendResults.length; i++) {
3224
+ const rec = state.recommendResults[i]
3225
+ const r = rec.result
3226
+ const medal = i === 0 ? '🥇' : i === 1 ? '🥈' : '🥉'
3227
+ const providerName = sources[r.providerKey]?.name ?? r.providerKey
3228
+ const tierFn = TIER_COLOR[r.tier] ?? (t => chalk.white(t))
3229
+ const avg = getAvg(r)
3230
+ const avgStr = avg === Infinity ? '—' : Math.round(avg) + 'ms'
3231
+ const sweStr = r.sweScore ?? '—'
3232
+ const ctxStr = r.ctx ?? '—'
3233
+ const stability = getStabilityScore(r)
3234
+ const stabStr = stability === -1 ? '—' : String(stability)
3235
+
3236
+ const isCursor = i === state.recommendCursor
3237
+ const highlight = isCursor ? chalk.bgRgb(20, 50, 25) : (s => s)
3238
+
3239
+ lines.push(highlight(` ${medal} ${chalk.bold('#' + (i + 1))} ${chalk.bold.white(r.label)} ${chalk.dim('(' + providerName + ')')}`))
3240
+ lines.push(highlight(` Score: ${chalk.bold.greenBright(String(rec.score) + '/100')} │ Tier: ${tierFn(r.tier)} │ SWE: ${chalk.cyan(sweStr)} │ Avg: ${chalk.yellow(avgStr)} │ CTX: ${chalk.cyan(ctxStr)} │ Stability: ${chalk.cyan(stabStr)}`))
3241
+ lines.push('')
3242
+ }
3243
+ }
3244
+
3245
+ lines.push('')
3246
+ lines.push(` ${chalk.dim('These models are now')} ${chalk.greenBright('highlighted')} ${chalk.dim('and')} 🎯 ${chalk.dim('pinned in the main table.')}`)
3247
+ lines.push('')
3248
+ lines.push(chalk.dim(' ↑↓ navigate • Enter select & close • Esc close • Q new search'))
3249
+ }
3250
+
3251
+ lines.push('')
3252
+ const { visible, offset } = sliceOverlayLines(lines, state.recommendScrollOffset, state.terminalRows)
3253
+ state.recommendScrollOffset = offset
3254
+ const tintedLines = tintOverlayLines(visible, RECOMMEND_OVERLAY_BG)
3255
+ const cleared2 = tintedLines.map(l => l + EL)
3256
+ return cleared2.join('\n')
3257
+ }
3258
+
3259
+ // ─── Smart Recommend: analysis phase controller ────────────────────────────
3260
+ // 📖 startRecommendAnalysis: begins the 10-second analysis phase.
3261
+ // 📖 Pings a random subset of visible models at 2 pings/sec while advancing progress.
3262
+ // 📖 After 10 seconds, computes recommendations and transitions to results phase.
3263
+ function startRecommendAnalysis() {
3264
+ state.recommendPhase = 'analyzing'
3265
+ state.recommendProgress = 0
3266
+ state.recommendResults = []
3267
+
3268
+ const startTime = Date.now()
3269
+ const ANALYSIS_DURATION = 10_000 // 📖 10 seconds
3270
+ const PING_RATE = 500 // 📖 2 pings per second (every 500ms)
3271
+
3272
+ // 📖 Progress updater — runs every 200ms to update the progress bar
3273
+ state.recommendAnalysisTimer = setInterval(() => {
3274
+ const elapsed = Date.now() - startTime
3275
+ state.recommendProgress = Math.min(100, (elapsed / ANALYSIS_DURATION) * 100)
3276
+
3277
+ if (elapsed >= ANALYSIS_DURATION) {
3278
+ // 📖 Analysis complete — compute recommendations
3279
+ clearInterval(state.recommendAnalysisTimer)
3280
+ clearInterval(state.recommendPingTimer)
3281
+ state.recommendAnalysisTimer = null
3282
+ state.recommendPingTimer = null
3283
+
3284
+ const recs = getTopRecommendations(
3285
+ state.results,
3286
+ state.recommendAnswers.taskType,
3287
+ state.recommendAnswers.priority,
3288
+ state.recommendAnswers.contextBudget,
3289
+ 3
3290
+ )
3291
+ state.recommendResults = recs
3292
+ state.recommendPhase = 'results'
3293
+ state.recommendCursor = 0
3294
+
3295
+ // 📖 Mark recommended models so the main table can highlight them
3296
+ state.recommendedKeys = new Set(recs.map(rec => toFavoriteKey(rec.result.providerKey, rec.result.modelId)))
3297
+ // 📖 Tag each result object so sortResultsWithPinnedFavorites can pin them
3298
+ state.results.forEach(r => {
3299
+ const key = toFavoriteKey(r.providerKey, r.modelId)
3300
+ const rec = recs.find(rec => toFavoriteKey(rec.result.providerKey, rec.result.modelId) === key)
3301
+ r.isRecommended = !!rec
3302
+ r.recommendScore = rec ? rec.score : 0
3303
+ })
3304
+ }
3305
+ }, 200)
3306
+
3307
+ // 📖 Targeted pinging — ping random visible models at 2/sec for fresh data
3308
+ state.recommendPingTimer = setInterval(() => {
3309
+ const visible = state.results.filter(r => !r.hidden && r.status !== 'noauth')
3310
+ if (visible.length === 0) return
3311
+ // 📖 Pick a random model to ping — spreads load across all models over 10s
3312
+ const target = visible[Math.floor(Math.random() * visible.length)]
3313
+ pingModel(target).catch(() => {})
3314
+ }, PING_RATE)
3315
+ }
3316
+
3317
+ // 📖 stopRecommendAnalysis: cleanup timers if user cancels during analysis
3318
+ function stopRecommendAnalysis() {
3319
+ if (state.recommendAnalysisTimer) { clearInterval(state.recommendAnalysisTimer); state.recommendAnalysisTimer = null }
3320
+ if (state.recommendPingTimer) { clearInterval(state.recommendPingTimer); state.recommendPingTimer = null }
3321
+ }
3322
+
2884
3323
  // ─── Settings key test helper ───────────────────────────────────────────────
2885
3324
  // 📖 Fires a single ping to the selected provider to verify the API key works.
2886
3325
  async function testProviderKey(providerKey) {
@@ -2952,6 +3391,45 @@ async function main() {
2952
3391
  const onKeyPress = async (str, key) => {
2953
3392
  if (!key) return
2954
3393
 
3394
+ // 📖 Profile save mode: intercept ALL keys while inline name input is active.
3395
+ // 📖 Enter → save, Esc → cancel, Backspace → delete char, printable → append to buffer.
3396
+ if (state.profileSaveMode) {
3397
+ if (key.ctrl && key.name === 'c') { exit(0); return }
3398
+ if (key.name === 'escape') {
3399
+ // 📖 Cancel profile save — discard typed name
3400
+ state.profileSaveMode = false
3401
+ state.profileSaveBuffer = ''
3402
+ return
3403
+ }
3404
+ if (key.name === 'return') {
3405
+ // 📖 Confirm profile save — persist current TUI settings under typed name
3406
+ const name = state.profileSaveBuffer.trim()
3407
+ if (name.length > 0) {
3408
+ saveAsProfile(state.config, name, {
3409
+ tierFilter: TIER_CYCLE[tierFilterMode],
3410
+ sortColumn: state.sortColumn,
3411
+ sortAsc: state.sortDirection === 'asc',
3412
+ pingInterval: state.pingInterval,
3413
+ })
3414
+ setActiveProfile(state.config, name)
3415
+ state.activeProfile = name
3416
+ saveConfig(state.config)
3417
+ }
3418
+ state.profileSaveMode = false
3419
+ state.profileSaveBuffer = ''
3420
+ return
3421
+ }
3422
+ if (key.name === 'backspace') {
3423
+ state.profileSaveBuffer = state.profileSaveBuffer.slice(0, -1)
3424
+ return
3425
+ }
3426
+ // 📖 Append printable characters (str is the raw character typed)
3427
+ if (str && str.length === 1 && !key.ctrl && !key.meta) {
3428
+ state.profileSaveBuffer += str
3429
+ }
3430
+ return
3431
+ }
3432
+
2955
3433
  // 📖 Help overlay: full keyboard navigation + key swallowing while overlay is open.
2956
3434
  if (state.helpVisible) {
2957
3435
  const pageStep = Math.max(1, (state.terminalRows || 1) - 2)
@@ -2969,11 +3447,122 @@ async function main() {
2969
3447
  return
2970
3448
  }
2971
3449
 
3450
+ // 📖 Smart Recommend overlay: full keyboard handling while overlay is open.
3451
+ if (state.recommendOpen) {
3452
+ if (key.ctrl && key.name === 'c') { exit(0); return }
3453
+
3454
+ if (state.recommendPhase === 'questionnaire') {
3455
+ const questions = [
3456
+ { options: Object.keys(TASK_TYPES), answerKey: 'taskType' },
3457
+ { options: Object.keys(PRIORITY_TYPES), answerKey: 'priority' },
3458
+ { options: Object.keys(CONTEXT_BUDGETS), answerKey: 'contextBudget' },
3459
+ ]
3460
+ const q = questions[state.recommendQuestion]
3461
+
3462
+ if (key.name === 'escape') {
3463
+ // 📖 Cancel recommend — close overlay
3464
+ state.recommendOpen = false
3465
+ state.recommendPhase = 'questionnaire'
3466
+ state.recommendQuestion = 0
3467
+ state.recommendCursor = 0
3468
+ state.recommendAnswers = { taskType: null, priority: null, contextBudget: null }
3469
+ return
3470
+ }
3471
+ if (key.name === 'up') {
3472
+ state.recommendCursor = state.recommendCursor > 0 ? state.recommendCursor - 1 : q.options.length - 1
3473
+ return
3474
+ }
3475
+ if (key.name === 'down') {
3476
+ state.recommendCursor = state.recommendCursor < q.options.length - 1 ? state.recommendCursor + 1 : 0
3477
+ return
3478
+ }
3479
+ if (key.name === 'return') {
3480
+ // 📖 Record answer and advance to next question or start analysis
3481
+ state.recommendAnswers[q.answerKey] = q.options[state.recommendCursor]
3482
+ if (state.recommendQuestion < questions.length - 1) {
3483
+ state.recommendQuestion++
3484
+ state.recommendCursor = 0
3485
+ } else {
3486
+ // 📖 All questions answered — start analysis phase
3487
+ startRecommendAnalysis()
3488
+ }
3489
+ return
3490
+ }
3491
+ return // 📖 Swallow all other keys
3492
+ }
3493
+
3494
+ if (state.recommendPhase === 'analyzing') {
3495
+ if (key.name === 'escape') {
3496
+ // 📖 Cancel analysis — stop timers, return to questionnaire
3497
+ stopRecommendAnalysis()
3498
+ state.recommendOpen = false
3499
+ state.recommendPhase = 'questionnaire'
3500
+ state.recommendQuestion = 0
3501
+ state.recommendCursor = 0
3502
+ state.recommendAnswers = { taskType: null, priority: null, contextBudget: null }
3503
+ return
3504
+ }
3505
+ return // 📖 Swallow all keys during analysis (except Esc and Ctrl+C)
3506
+ }
3507
+
3508
+ if (state.recommendPhase === 'results') {
3509
+ if (key.name === 'escape') {
3510
+ // 📖 Close results — recommendations stay highlighted in main table
3511
+ state.recommendOpen = false
3512
+ return
3513
+ }
3514
+ if (key.name === 'q') {
3515
+ // 📖 Start a new search
3516
+ state.recommendPhase = 'questionnaire'
3517
+ state.recommendQuestion = 0
3518
+ state.recommendCursor = 0
3519
+ state.recommendAnswers = { taskType: null, priority: null, contextBudget: null }
3520
+ state.recommendResults = []
3521
+ state.recommendScrollOffset = 0
3522
+ return
3523
+ }
3524
+ if (key.name === 'up') {
3525
+ const count = state.recommendResults.length
3526
+ if (count === 0) return
3527
+ state.recommendCursor = state.recommendCursor > 0 ? state.recommendCursor - 1 : count - 1
3528
+ return
3529
+ }
3530
+ if (key.name === 'down') {
3531
+ const count = state.recommendResults.length
3532
+ if (count === 0) return
3533
+ state.recommendCursor = state.recommendCursor < count - 1 ? state.recommendCursor + 1 : 0
3534
+ return
3535
+ }
3536
+ if (key.name === 'return') {
3537
+ // 📖 Select the highlighted recommendation — close overlay, jump cursor to it
3538
+ const rec = state.recommendResults[state.recommendCursor]
3539
+ if (rec) {
3540
+ const recKey = toFavoriteKey(rec.result.providerKey, rec.result.modelId)
3541
+ state.recommendOpen = false
3542
+ // 📖 Jump to the recommended model in the main table
3543
+ const idx = state.visibleSorted.findIndex(r => toFavoriteKey(r.providerKey, r.modelId) === recKey)
3544
+ if (idx >= 0) {
3545
+ state.cursor = idx
3546
+ adjustScrollOffset(state)
3547
+ }
3548
+ }
3549
+ return
3550
+ }
3551
+ return // 📖 Swallow all other keys
3552
+ }
3553
+
3554
+ return // 📖 Catch-all swallow
3555
+ }
3556
+
2972
3557
  // ─── Settings overlay keyboard handling ───────────────────────────────────
2973
3558
  if (state.settingsOpen) {
2974
3559
  const providerKeys = Object.keys(sources)
2975
3560
  const telemetryRowIdx = providerKeys.length
2976
3561
  const updateRowIdx = providerKeys.length + 1
3562
+ // 📖 Profile rows start after update row — one row per saved profile
3563
+ const savedProfiles = listProfiles(state.config)
3564
+ const profileStartIdx = updateRowIdx + 1
3565
+ const maxRowIdx = savedProfiles.length > 0 ? profileStartIdx + savedProfiles.length - 1 : updateRowIdx
2977
3566
 
2978
3567
  // 📖 Edit mode: capture typed characters for the API key
2979
3568
  if (state.settingsEditMode) {
@@ -3040,7 +3629,7 @@ async function main() {
3040
3629
  return
3041
3630
  }
3042
3631
 
3043
- if (key.name === 'down' && state.settingsCursor < updateRowIdx) {
3632
+ if (key.name === 'down' && state.settingsCursor < maxRowIdx) {
3044
3633
  state.settingsCursor++
3045
3634
  return
3046
3635
  }
@@ -3053,7 +3642,7 @@ async function main() {
3053
3642
 
3054
3643
  if (key.name === 'pagedown') {
3055
3644
  const pageStep = Math.max(1, (state.terminalRows || 1) - 2)
3056
- state.settingsCursor = Math.min(updateRowIdx, state.settingsCursor + pageStep)
3645
+ state.settingsCursor = Math.min(maxRowIdx, state.settingsCursor + pageStep)
3057
3646
  return
3058
3647
  }
3059
3648
 
@@ -3063,7 +3652,7 @@ async function main() {
3063
3652
  }
3064
3653
 
3065
3654
  if (key.name === 'end') {
3066
- state.settingsCursor = updateRowIdx
3655
+ state.settingsCursor = maxRowIdx
3067
3656
  return
3068
3657
  }
3069
3658
 
@@ -3084,6 +3673,33 @@ async function main() {
3084
3673
  return
3085
3674
  }
3086
3675
 
3676
+ // 📖 Profile row: Enter → load the selected profile (apply its settings live)
3677
+ if (state.settingsCursor >= profileStartIdx && savedProfiles.length > 0) {
3678
+ const profileIdx = state.settingsCursor - profileStartIdx
3679
+ const profileName = savedProfiles[profileIdx]
3680
+ if (profileName) {
3681
+ const settings = loadProfile(state.config, profileName)
3682
+ if (settings) {
3683
+ state.sortColumn = settings.sortColumn || 'avg'
3684
+ state.sortDirection = settings.sortAsc ? 'asc' : 'desc'
3685
+ state.pingInterval = settings.pingInterval || PING_INTERVAL
3686
+ if (settings.tierFilter) {
3687
+ const tierIdx = TIER_CYCLE.indexOf(settings.tierFilter)
3688
+ if (tierIdx >= 0) tierFilterMode = tierIdx
3689
+ } else {
3690
+ tierFilterMode = 0
3691
+ }
3692
+ state.activeProfile = profileName
3693
+ syncFavoriteFlags(state.results, state.config)
3694
+ applyTierFilter()
3695
+ const visible = state.results.filter(r => !r.hidden)
3696
+ state.visibleSorted = sortResultsWithPinnedFavorites(visible, state.sortColumn, state.sortDirection)
3697
+ saveConfig(state.config)
3698
+ }
3699
+ }
3700
+ return
3701
+ }
3702
+
3087
3703
  // 📖 Enter edit mode for the selected provider's key
3088
3704
  const pk = providerKeys[state.settingsCursor]
3089
3705
  state.settingsEditBuffer = state.config.apiKeys?.[pk] ?? ''
@@ -3100,6 +3716,8 @@ async function main() {
3100
3716
  return
3101
3717
  }
3102
3718
  if (state.settingsCursor === updateRowIdx) return
3719
+ // 📖 Profile rows don't respond to Space
3720
+ if (state.settingsCursor >= profileStartIdx) return
3103
3721
 
3104
3722
  // 📖 Toggle enabled/disabled for selected provider
3105
3723
  const pk = providerKeys[state.settingsCursor]
@@ -3112,6 +3730,8 @@ async function main() {
3112
3730
 
3113
3731
  if (key.name === 't') {
3114
3732
  if (state.settingsCursor === telemetryRowIdx || state.settingsCursor === updateRowIdx) return
3733
+ // 📖 Profile rows don't respond to T (test key)
3734
+ if (state.settingsCursor >= profileStartIdx) return
3115
3735
 
3116
3736
  // 📖 Test the selected provider's key (fires a real ping)
3117
3737
  const pk = providerKeys[state.settingsCursor]
@@ -3124,12 +3744,34 @@ async function main() {
3124
3744
  return
3125
3745
  }
3126
3746
 
3747
+ // 📖 Backspace on a profile row → delete that profile
3748
+ if (key.name === 'backspace' && state.settingsCursor >= profileStartIdx && savedProfiles.length > 0) {
3749
+ const profileIdx = state.settingsCursor - profileStartIdx
3750
+ const profileName = savedProfiles[profileIdx]
3751
+ if (profileName) {
3752
+ deleteProfile(state.config, profileName)
3753
+ // 📖 If the deleted profile was active, clear active state
3754
+ if (state.activeProfile === profileName) {
3755
+ setActiveProfile(state.config, null)
3756
+ state.activeProfile = null
3757
+ }
3758
+ saveConfig(state.config)
3759
+ // 📖 Re-clamp cursor after deletion (profile list just got shorter)
3760
+ const newProfiles = listProfiles(state.config)
3761
+ const newMaxRowIdx = newProfiles.length > 0 ? profileStartIdx + newProfiles.length - 1 : updateRowIdx
3762
+ if (state.settingsCursor > newMaxRowIdx) {
3763
+ state.settingsCursor = Math.max(0, newMaxRowIdx)
3764
+ }
3765
+ }
3766
+ return
3767
+ }
3768
+
3127
3769
  if (key.ctrl && key.name === 'c') { exit(0); return }
3128
3770
  return // 📖 Swallow all other keys while settings is open
3129
3771
  }
3130
3772
 
3131
3773
  // 📖 P key: open settings screen
3132
- if (key.name === 'p') {
3774
+ if (key.name === 'p' && !key.shift) {
3133
3775
  state.settingsOpen = true
3134
3776
  state.settingsCursor = 0
3135
3777
  state.settingsEditMode = false
@@ -3138,6 +3780,77 @@ async function main() {
3138
3780
  return
3139
3781
  }
3140
3782
 
3783
+ // 📖 Q key: open Smart Recommend overlay
3784
+ if (key.name === 'q') {
3785
+ state.recommendOpen = true
3786
+ state.recommendPhase = 'questionnaire'
3787
+ state.recommendQuestion = 0
3788
+ state.recommendCursor = 0
3789
+ state.recommendAnswers = { taskType: null, priority: null, contextBudget: null }
3790
+ state.recommendResults = []
3791
+ state.recommendScrollOffset = 0
3792
+ return
3793
+ }
3794
+
3795
+ // 📖 Shift+P: cycle through profiles (or show profile picker)
3796
+ if (key.name === 'p' && key.shift) {
3797
+ const profiles = listProfiles(state.config)
3798
+ if (profiles.length === 0) {
3799
+ // 📖 No profiles saved — save current config as 'default' profile
3800
+ saveAsProfile(state.config, 'default', {
3801
+ tierFilter: TIER_CYCLE[tierFilterMode],
3802
+ sortColumn: state.sortColumn,
3803
+ sortAsc: state.sortDirection === 'asc',
3804
+ pingInterval: state.pingInterval,
3805
+ })
3806
+ setActiveProfile(state.config, 'default')
3807
+ state.activeProfile = 'default'
3808
+ saveConfig(state.config)
3809
+ } else {
3810
+ // 📖 Cycle to next profile (or back to null = raw config)
3811
+ const currentIdx = state.activeProfile ? profiles.indexOf(state.activeProfile) : -1
3812
+ const nextIdx = (currentIdx + 1) % (profiles.length + 1) // +1 for "no profile"
3813
+ if (nextIdx === profiles.length) {
3814
+ // 📖 Back to raw config (no profile)
3815
+ setActiveProfile(state.config, null)
3816
+ state.activeProfile = null
3817
+ saveConfig(state.config)
3818
+ } else {
3819
+ const nextProfile = profiles[nextIdx]
3820
+ const settings = loadProfile(state.config, nextProfile)
3821
+ if (settings) {
3822
+ // 📖 Apply profile's TUI settings to live state
3823
+ state.sortColumn = settings.sortColumn || 'avg'
3824
+ state.sortDirection = settings.sortAsc ? 'asc' : 'desc'
3825
+ state.pingInterval = settings.pingInterval || PING_INTERVAL
3826
+ if (settings.tierFilter) {
3827
+ const tierIdx = TIER_CYCLE.indexOf(settings.tierFilter)
3828
+ if (tierIdx >= 0) tierFilterMode = tierIdx
3829
+ } else {
3830
+ tierFilterMode = 0
3831
+ }
3832
+ state.activeProfile = nextProfile
3833
+ // 📖 Rebuild favorites from profile data
3834
+ syncFavoriteFlags(state.results, state.config)
3835
+ applyTierFilter()
3836
+ const visible = state.results.filter(r => !r.hidden)
3837
+ state.visibleSorted = sortResultsWithPinnedFavorites(visible, state.sortColumn, state.sortDirection)
3838
+ state.cursor = 0
3839
+ state.scrollOffset = 0
3840
+ saveConfig(state.config)
3841
+ }
3842
+ }
3843
+ }
3844
+ return
3845
+ }
3846
+
3847
+ // 📖 Shift+S: enter profile save mode — inline text prompt for typing a profile name
3848
+ if (key.name === 's' && key.shift) {
3849
+ state.profileSaveMode = true
3850
+ state.profileSaveBuffer = ''
3851
+ return
3852
+ }
3853
+
3141
3854
  // 📖 Sorting keys: R=rank, Y=tier, O=origin, M=model, L=latest ping, A=avg ping, S=SWE-bench, C=context, H=health, V=verdict, B=stability, U=uptime
3142
3855
  // 📖 T is reserved for tier filter cycling — tier sort moved to Y
3143
3856
  // 📖 N is now reserved for origin filter cycling
@@ -3146,7 +3859,7 @@ async function main() {
3146
3859
  'l': 'ping', 'a': 'avg', 's': 'swe', 'c': 'ctx', 'h': 'condition', 'v': 'verdict', 'b': 'stability', 'u': 'uptime'
3147
3860
  }
3148
3861
 
3149
- if (sortKeys[key.name] && !key.ctrl) {
3862
+ if (sortKeys[key.name] && !key.ctrl && !key.shift) {
3150
3863
  const col = sortKeys[key.name]
3151
3864
  // 📖 Toggle direction if same column, otherwise reset to asc
3152
3865
  if (state.sortColumn === col) {
@@ -3322,19 +4035,21 @@ async function main() {
3322
4035
 
3323
4036
  process.stdin.on('keypress', onKeyPress)
3324
4037
 
3325
- // 📖 Animation loop: render settings overlay OR main table based on state
4038
+ // 📖 Animation loop: render settings overlay, recommend overlay, help overlay, OR main table
3326
4039
  const ticker = setInterval(() => {
3327
4040
  state.frame++
3328
4041
  // 📖 Cache visible+sorted models each frame so Enter handler always matches the display
3329
- if (!state.settingsOpen) {
4042
+ if (!state.settingsOpen && !state.recommendOpen) {
3330
4043
  const visible = state.results.filter(r => !r.hidden)
3331
4044
  state.visibleSorted = sortResultsWithPinnedFavorites(visible, state.sortColumn, state.sortDirection)
3332
4045
  }
3333
4046
  const content = state.settingsOpen
3334
4047
  ? renderSettings()
3335
- : state.helpVisible
3336
- ? renderHelp()
3337
- : renderTable(state.results, state.pendingPings, state.frame, state.cursor, state.sortColumn, state.sortDirection, state.pingInterval, state.lastPingTime, state.mode, tierFilterMode, state.scrollOffset, state.terminalRows, originFilterMode)
4048
+ : state.recommendOpen
4049
+ ? renderRecommend()
4050
+ : state.helpVisible
4051
+ ? renderHelp()
4052
+ : renderTable(state.results, state.pendingPings, state.frame, state.cursor, state.sortColumn, state.sortDirection, state.pingInterval, state.lastPingTime, state.mode, tierFilterMode, state.scrollOffset, state.terminalRows, originFilterMode, state.activeProfile, state.profileSaveMode, state.profileSaveBuffer)
3338
4053
  process.stdout.write(ALT_HOME + content)
3339
4054
  }, Math.round(1000 / FPS))
3340
4055
 
@@ -3342,7 +4057,19 @@ async function main() {
3342
4057
  const initialVisible = state.results.filter(r => !r.hidden)
3343
4058
  state.visibleSorted = sortResultsWithPinnedFavorites(initialVisible, state.sortColumn, state.sortDirection)
3344
4059
 
3345
- process.stdout.write(ALT_HOME + renderTable(state.results, state.pendingPings, state.frame, state.cursor, state.sortColumn, state.sortDirection, state.pingInterval, state.lastPingTime, state.mode, tierFilterMode, state.scrollOffset, state.terminalRows, originFilterMode))
4060
+ process.stdout.write(ALT_HOME + renderTable(state.results, state.pendingPings, state.frame, state.cursor, state.sortColumn, state.sortDirection, state.pingInterval, state.lastPingTime, state.mode, tierFilterMode, state.scrollOffset, state.terminalRows, originFilterMode, state.activeProfile, state.profileSaveMode, state.profileSaveBuffer))
4061
+
4062
+ // 📖 If --recommend was passed, auto-open the Smart Recommend overlay on start
4063
+ if (cliArgs.recommendMode) {
4064
+ state.recommendOpen = true
4065
+ state.recommendPhase = 'questionnaire'
4066
+ state.recommendCursor = 0
4067
+ state.recommendQuestion = 0
4068
+ state.recommendAnswers = { taskType: null, priority: null, contextBudget: null }
4069
+ state.recommendProgress = 0
4070
+ state.recommendResults = []
4071
+ state.recommendScrollOffset = 0
4072
+ }
3346
4073
 
3347
4074
  // ── Continuous ping loop — ping all models every N seconds forever ──────────
3348
4075