codeblog-app 2.2.6 → 2.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/package.json +9 -7
  2. package/src/ai/__tests__/chat.test.ts +11 -2
  3. package/src/ai/__tests__/compat.test.ts +46 -0
  4. package/src/ai/__tests__/home.ai-stream.integration.test.ts +77 -0
  5. package/src/ai/__tests__/provider-registry.test.ts +61 -0
  6. package/src/ai/__tests__/provider.test.ts +58 -18
  7. package/src/ai/__tests__/stream-events.test.ts +152 -0
  8. package/src/ai/chat.ts +200 -88
  9. package/src/ai/configure.ts +13 -4
  10. package/src/ai/models.ts +26 -0
  11. package/src/ai/provider-registry.ts +150 -0
  12. package/src/ai/provider.ts +99 -137
  13. package/src/ai/stream-events.ts +64 -0
  14. package/src/ai/tools.ts +10 -6
  15. package/src/ai/types.ts +105 -0
  16. package/src/auth/index.ts +3 -1
  17. package/src/auth/oauth.ts +17 -2
  18. package/src/cli/__tests__/commands.test.ts +6 -2
  19. package/src/cli/cmd/ai.ts +10 -0
  20. package/src/cli/cmd/setup.ts +275 -5
  21. package/src/cli/ui.ts +131 -24
  22. package/src/config/index.ts +38 -1
  23. package/src/index.ts +4 -1
  24. package/src/mcp/__tests__/client.test.ts +2 -2
  25. package/src/mcp/__tests__/e2e.ts +10 -6
  26. package/src/mcp/client.ts +33 -63
  27. package/src/storage/chat.ts +3 -1
  28. package/src/tui/__tests__/input-intent.test.ts +27 -0
  29. package/src/tui/__tests__/stream-assembler.test.ts +33 -0
  30. package/src/tui/ai-stream.ts +28 -0
  31. package/src/tui/app.tsx +27 -1
  32. package/src/tui/commands.ts +41 -7
  33. package/src/tui/context/theme.tsx +2 -1
  34. package/src/tui/input-intent.ts +26 -0
  35. package/src/tui/routes/home.tsx +590 -190
  36. package/src/tui/routes/setup.tsx +20 -8
  37. package/src/tui/stream-assembler.ts +49 -0
  38. package/src/util/log.ts +3 -1
  39. package/tsconfig.json +1 -1
@@ -3,6 +3,7 @@ import { Auth } from "../../auth"
3
3
  import { OAuth } from "../../auth/oauth"
4
4
  import { McpBridge } from "../../mcp/client"
5
5
  import { UI } from "../ui"
6
+ import { Config } from "../../config"
6
7
 
7
8
  export let setupCompleted = false
8
9
 
@@ -190,7 +191,7 @@ async function scanAndPublish(): Promise<void> {
190
191
 
191
192
  // ─── AI Configuration ────────────────────────────────────────────────────────
192
193
 
193
- async function aiConfigPrompt(): Promise<void> {
194
+ async function aiQuickConfigPrompt(): Promise<void> {
194
195
  const { AIProvider } = await import("../../ai/provider")
195
196
  const hasKey = await AIProvider.hasAnyKey()
196
197
 
@@ -287,6 +288,271 @@ async function aiConfigPrompt(): Promise<void> {
287
288
  }
288
289
  }
289
290
 
291
+ type WizardMode = "quick" | "manual"
292
+
293
+ interface ProviderChoice {
294
+ name: string
295
+ providerID: string
296
+ api: "anthropic" | "openai" | "google" | "openai-compatible"
297
+ baseURL?: string
298
+ hint?: string
299
+ }
300
+
301
+ const PROVIDER_CHOICES: ProviderChoice[] = [
302
+ { name: "OpenAI", providerID: "openai", api: "openai", baseURL: "https://api.openai.com", hint: "Codex OAuth + API key style" },
303
+ { name: "Anthropic", providerID: "anthropic", api: "anthropic", baseURL: "https://api.anthropic.com", hint: "Claude API key" },
304
+ { name: "Google", providerID: "google", api: "google", baseURL: "https://generativelanguage.googleapis.com", hint: "Gemini API key" },
305
+ { name: "OpenRouter", providerID: "openai-compatible", api: "openai-compatible", baseURL: "https://openrouter.ai/api", hint: "OpenAI-compatible" },
306
+ { name: "vLLM", providerID: "openai-compatible", api: "openai-compatible", baseURL: "http://127.0.0.1:8000", hint: "Local/self-hosted OpenAI-compatible" },
307
+ { name: "MiniMax", providerID: "openai-compatible", api: "openai-compatible", baseURL: "https://api.minimax.io", hint: "OpenAI-compatible endpoint" },
308
+ { name: "Moonshot AI (Kimi K2.5)", providerID: "openai-compatible", api: "openai-compatible", baseURL: "https://api.moonshot.ai", hint: "OpenAI-compatible endpoint" },
309
+ { name: "xAI (Grok)", providerID: "openai-compatible", api: "openai-compatible", baseURL: "https://api.x.ai", hint: "OpenAI-compatible endpoint" },
310
+ { name: "Qianfan", providerID: "openai-compatible", api: "openai-compatible", baseURL: "https://qianfan.baidubce.com", hint: "OpenAI-compatible endpoint" },
311
+ { name: "Vercel AI Gateway", providerID: "openai-compatible", api: "openai-compatible", baseURL: "https://ai-gateway.vercel.sh", hint: "OpenAI-compatible endpoint" },
312
+ { name: "OpenCode Zen", providerID: "openai-compatible", api: "openai-compatible", baseURL: "https://opencode.ai/zen", hint: "OpenAI-compatible endpoint" },
313
+ { name: "Xiaomi", providerID: "anthropic", api: "anthropic", baseURL: "https://api.xiaomimimo.com/anthropic", hint: "Anthropic-compatible endpoint" },
314
+ { name: "Synthetic", providerID: "anthropic", api: "anthropic", baseURL: "https://api.synthetic.new", hint: "Anthropic-compatible endpoint" },
315
+ { name: "Together AI", providerID: "openai-compatible", api: "openai-compatible", baseURL: "https://api.together.xyz", hint: "OpenAI-compatible endpoint" },
316
+ { name: "Hugging Face", providerID: "openai-compatible", api: "openai-compatible", baseURL: "https://router.huggingface.co", hint: "OpenAI-compatible endpoint" },
317
+ { name: "Venice AI", providerID: "openai-compatible", api: "openai-compatible", baseURL: "https://api.venice.ai/api", hint: "OpenAI-compatible endpoint" },
318
+ { name: "LiteLLM", providerID: "openai-compatible", api: "openai-compatible", baseURL: "http://localhost:4000", hint: "Unified OpenAI-compatible gateway" },
319
+ { name: "Cloudflare AI Gateway", providerID: "anthropic", api: "anthropic", hint: "Enter full Anthropic gateway URL manually" },
320
+ { name: "Custom Provider", providerID: "openai-compatible", api: "openai-compatible", hint: "Any OpenAI-compatible URL" },
321
+ ]
322
+
323
+ async function fetchOpenAIModels(baseURL: string, key: string): Promise<string[]> {
324
+ try {
325
+ const clean = baseURL.replace(/\/+$/, "")
326
+ const url = clean.endsWith("/v1") ? `${clean}/models` : `${clean}/v1/models`
327
+ const r = await fetch(url, {
328
+ headers: { Authorization: `Bearer ${key}` },
329
+ signal: AbortSignal.timeout(8000),
330
+ })
331
+ if (!r.ok) return []
332
+ const data = await r.json() as { data?: Array<{ id: string }> }
333
+ return data.data?.map((m) => m.id) || []
334
+ } catch {
335
+ return []
336
+ }
337
+ }
338
+
339
+ function isOfficialOpenAIBase(baseURL: string): boolean {
340
+ try {
341
+ const u = new URL(baseURL)
342
+ return u.hostname === "api.openai.com"
343
+ } catch {
344
+ return false
345
+ }
346
+ }
347
+
348
+ async function verifyEndpoint(choice: ProviderChoice, baseURL: string, key: string): Promise<{ ok: boolean; detail: string; detectedApi?: Config.ModelApi }> {
349
+ try {
350
+ if (choice.api === "anthropic") {
351
+ const clean = baseURL.replace(/\/+$/, "")
352
+ const r = await fetch(`${clean}/v1/messages`, {
353
+ method: "POST",
354
+ headers: { "x-api-key": key, "anthropic-version": "2023-06-01", "content-type": "application/json" },
355
+ body: JSON.stringify({ model: "claude-3-5-haiku-latest", max_tokens: 1, messages: [{ role: "user", content: "ping" }] }),
356
+ signal: AbortSignal.timeout(8000),
357
+ })
358
+ if (r.status !== 404) return { ok: true, detail: `Anthropic endpoint reachable (${r.status})`, detectedApi: "anthropic" }
359
+ return { ok: false, detail: "Anthropic endpoint returned 404" }
360
+ }
361
+
362
+ if (choice.api === "google") {
363
+ const clean = baseURL.replace(/\/+$/, "")
364
+ const r = await fetch(`${clean}/v1beta/models?key=${encodeURIComponent(key)}`, {
365
+ signal: AbortSignal.timeout(8000),
366
+ })
367
+ if (r.ok || r.status === 401 || r.status === 403) return { ok: true, detail: `Google endpoint reachable (${r.status})` }
368
+ return { ok: false, detail: `Google endpoint responded ${r.status}` }
369
+ }
370
+
371
+ const { probe } = await import("../../ai/configure")
372
+ const detected = await probe(baseURL, key)
373
+ if (detected === "anthropic") return { ok: true, detail: "Detected Anthropic API format", detectedApi: "anthropic" }
374
+ if (detected === "openai") {
375
+ const detectedApi: Config.ModelApi =
376
+ choice.providerID === "openai" && isOfficialOpenAIBase(baseURL)
377
+ ? "openai"
378
+ : "openai-compatible"
379
+ return { ok: true, detail: "Detected OpenAI API format", detectedApi }
380
+ }
381
+
382
+ const models = await fetchOpenAIModels(baseURL, key)
383
+ if (models.length > 0) {
384
+ const detectedApi: Config.ModelApi =
385
+ choice.providerID === "openai" && isOfficialOpenAIBase(baseURL)
386
+ ? "openai"
387
+ : "openai-compatible"
388
+ return { ok: true, detail: `Model endpoint reachable (${models.length} models)`, detectedApi }
389
+ }
390
+
391
+ return { ok: false, detail: "Could not detect endpoint format or list models" }
392
+ } catch (err) {
393
+ return { ok: false, detail: err instanceof Error ? err.message : String(err) }
394
+ }
395
+ }
396
+
397
+ async function chooseProvider(): Promise<ProviderChoice | undefined> {
398
+ console.log("")
399
+ console.log(` ${UI.Style.TEXT_NORMAL_BOLD}Model/auth provider${UI.Style.TEXT_NORMAL}`)
400
+ const idx = await UI.select(
401
+ " Choose a provider",
402
+ [...PROVIDER_CHOICES.map((p) => p.hint ? `${p.name} (${p.hint})` : p.name), "Skip for now"],
403
+ )
404
+ if (idx < 0 || idx >= PROVIDER_CHOICES.length) return undefined
405
+ return PROVIDER_CHOICES[idx]
406
+ }
407
+
408
+ async function chooseModel(choice: ProviderChoice, mode: WizardMode, baseURL: string, key: string): Promise<string | undefined> {
409
+ const { AIProvider } = await import("../../ai/provider")
410
+ const builtin = Object.values(AIProvider.BUILTIN_MODELS).filter((m) => m.providerID === choice.providerID).map((m) => m.id)
411
+ const openaiCustom = choice.providerID === "openai" && !isOfficialOpenAIBase(baseURL)
412
+ const useRemote = choice.providerID === "openai-compatible" || openaiCustom
413
+
414
+ if (mode === "quick") {
415
+ if (choice.providerID === "anthropic") return "claude-sonnet-4-20250514"
416
+ if (choice.providerID === "openai" && !openaiCustom) return "gpt-4o-mini"
417
+ if (choice.providerID === "google") return "gemini-2.5-flash"
418
+ const remote = await fetchOpenAIModels(baseURL, key)
419
+ return remote[0] || "gpt-4o-mini"
420
+ }
421
+
422
+ let options = builtin
423
+ if (useRemote) {
424
+ const remote = await fetchOpenAIModels(baseURL, key)
425
+ options = remote
426
+ }
427
+ if (options.length === 0) {
428
+ const typed = await UI.input(` Model ID: `)
429
+ return typed.trim() || "gpt-4o-mini"
430
+ }
431
+
432
+ const idx = await UI.select(" Choose a model", [...options, "Custom model id"])
433
+ if (idx < 0) return undefined
434
+ if (idx >= options.length) {
435
+ const typed = await UI.input(` Model ID: `)
436
+ return typed.trim() || options[0]!
437
+ }
438
+ return options[idx]!
439
+ }
440
+
441
+ export async function runAISetupWizard(source: "setup" | "command" = "command"): Promise<void> {
442
+ const { AIProvider } = await import("../../ai/provider")
443
+ const hasKey = await AIProvider.hasAnyKey()
444
+
445
+ UI.divider()
446
+ if (source === "setup") {
447
+ await UI.typeText("AI onboarding")
448
+ } else {
449
+ await UI.typeText("CodeBlog AI setup wizard")
450
+ }
451
+
452
+ if (hasKey) {
453
+ const keep = await UI.waitEnter("AI is already configured. Press Enter to reconfigure, or Esc to keep current config")
454
+ if (keep === "escape") return
455
+ }
456
+
457
+ console.log("")
458
+ const modeIdx = await UI.select(" Onboarding mode", ["QuickStart (recommended)", "Manual", "Skip for now"])
459
+ if (modeIdx < 0 || modeIdx === 2) {
460
+ UI.info("Skipped AI setup.")
461
+ return
462
+ }
463
+ const mode = modeIdx === 0 ? "quick" : "manual"
464
+
465
+ const provider = await chooseProvider()
466
+ if (!provider) {
467
+ UI.info("Skipped AI setup.")
468
+ return
469
+ }
470
+ if (provider.hint) UI.info(`${provider.name}: ${provider.hint}`)
471
+
472
+ const defaultBaseURL = provider.baseURL || ""
473
+ const needsBasePrompt =
474
+ mode === "manual" ||
475
+ provider.providerID === "openai-compatible" ||
476
+ provider.providerID === "openai" ||
477
+ !defaultBaseURL
478
+ let baseURL = defaultBaseURL
479
+
480
+ if (needsBasePrompt) {
481
+ const endpointHint = defaultBaseURL ? ` [${defaultBaseURL}]` : ""
482
+ const entered = await UI.inputWithEscape(` Endpoint base URL${endpointHint}: `)
483
+ if (entered === null) {
484
+ UI.info("Skipped AI setup.")
485
+ return
486
+ }
487
+ baseURL = entered.trim() || defaultBaseURL
488
+ }
489
+
490
+ const keyRaw = await UI.inputWithEscape(` API key / Bearer token: `)
491
+ if (keyRaw === null) {
492
+ UI.info("Skipped AI setup.")
493
+ return
494
+ }
495
+ const key = keyRaw.trim()
496
+ if (!key || key.length < 5) {
497
+ UI.warn("Credential seems invalid, setup skipped.")
498
+ return
499
+ }
500
+
501
+ if (!baseURL) {
502
+ UI.warn("Endpoint URL is required for this provider.")
503
+ return
504
+ }
505
+
506
+ let verified = false
507
+ let detectedApi: Config.ModelApi | undefined
508
+
509
+ while (!verified) {
510
+ await shimmerLine("Verifying endpoint...", 900)
511
+ const verify = await verifyEndpoint(provider, baseURL, key)
512
+ detectedApi = verify.detectedApi
513
+ if (verify.ok) {
514
+ UI.success(verify.detail)
515
+ verified = true
516
+ break
517
+ }
518
+ UI.warn(`Endpoint verification failed: ${verify.detail}`)
519
+ const retry = await UI.waitEnter("Press Enter to retry verification, or Esc to continue anyway")
520
+ if (retry === "escape") break
521
+ }
522
+
523
+ const selectedModel = await chooseModel(provider, mode, baseURL, key)
524
+ if (!selectedModel) {
525
+ UI.info("Skipped AI setup.")
526
+ return
527
+ }
528
+ const cfg = await Config.load()
529
+ const providers = cfg.providers || {}
530
+ const resolvedApi = detectedApi || provider.api
531
+ const resolvedCompat = provider.providerID === "openai-compatible" && resolvedApi === "openai"
532
+ ? "openai-compatible"
533
+ : resolvedApi
534
+ const providerConfig: Config.ProviderConfig = {
535
+ api_key: key,
536
+ api: resolvedApi,
537
+ compat_profile: resolvedCompat,
538
+ }
539
+ if (baseURL) providerConfig.base_url = baseURL
540
+ providers[provider.providerID] = providerConfig
541
+
542
+ const model = provider.providerID === "openai-compatible" && !selectedModel.includes("/")
543
+ ? `openai-compatible/${selectedModel}`
544
+ : selectedModel
545
+
546
+ await Config.save({
547
+ providers,
548
+ default_provider: provider.providerID,
549
+ model,
550
+ })
551
+
552
+ UI.success(`AI configured: ${provider.name} (${model})`)
553
+ console.log(` ${UI.Style.TEXT_DIM}You can rerun this wizard with: codeblog ai setup${UI.Style.TEXT_NORMAL}`)
554
+ }
555
+
290
556
  // ─── Setup Command ───────────────────────────────────────────────────────────
291
557
 
292
558
  export const SetupCommand: CommandModule = {
@@ -325,7 +591,14 @@ export const SetupCommand: CommandModule = {
325
591
  const token = await Auth.get()
326
592
  UI.success(`Authenticated as ${token?.username || "user"}!`)
327
593
 
328
- // Phase 3: Interactive scan & publish
594
+ // Phase 3: AI configuration (OpenClaw-like provider chooser)
595
+ UI.divider()
596
+ await UI.typeText("Let's connect your AI provider first.", { charDelay: 10 })
597
+ await UI.typeText("Choose a provider, enter key/endpoint, and we'll verify it.", { charDelay: 10 })
598
+ console.log("")
599
+ await runAISetupWizard("setup")
600
+
601
+ // Phase 4: Interactive scan & publish
329
602
  UI.divider()
330
603
 
331
604
  await UI.typeText("Great! Let's see what you've been working on.")
@@ -340,9 +613,6 @@ export const SetupCommand: CommandModule = {
340
613
  await UI.typeText("Skipped. You can scan and publish later in the app.")
341
614
  }
342
615
 
343
- // Phase 4: AI configuration
344
- await aiConfigPrompt()
345
-
346
616
  // Phase 5: Transition to TUI
347
617
  UI.divider()
348
618
  setupCompleted = true
package/src/cli/ui.ts CHANGED
@@ -84,31 +84,70 @@ export namespace UI {
84
84
  if (stdin.isTTY && stdin.setRawMode) stdin.setRawMode(true)
85
85
 
86
86
  let buf = ""
87
- const onData = (ch: Buffer) => {
87
+ let paste = false
88
+ let onData: ((ch: Buffer) => void) = () => {}
89
+
90
+ const restore = () => {
91
+ if (stdin.isTTY && stdin.setRawMode) stdin.setRawMode(wasRaw ?? false)
92
+ stdin.removeListener("data", onData)
93
+ }
94
+
95
+ const readClipboard = () => {
96
+ if (process.platform !== "darwin") return ""
97
+ try {
98
+ const out = Bun.spawnSync(["pbpaste"])
99
+ if (out.exitCode !== 0) return ""
100
+ return out.stdout.toString().replace(/\r\n/g, "\n").replace(/\r/g, "\n").replace(/\n/g, "")
101
+ } catch {
102
+ return ""
103
+ }
104
+ }
105
+
106
+ const append = (text: string) => {
107
+ if (!text) return
108
+ buf += text
109
+ process.stderr.write(text)
110
+ }
111
+
112
+ onData = (ch: Buffer) => {
88
113
  const c = ch.toString("utf8")
89
114
  if (c === "\u0003") {
90
115
  // Ctrl+C
91
- if (stdin.isTTY && stdin.setRawMode) stdin.setRawMode(wasRaw ?? false)
92
- stdin.removeListener("data", onData)
116
+ restore()
93
117
  process.exit(130)
94
118
  }
95
- if (c === "\x1b") {
119
+ if (!paste && c === "\x1b") {
96
120
  // Escape
97
- if (stdin.isTTY && stdin.setRawMode) stdin.setRawMode(wasRaw ?? false)
98
- stdin.removeListener("data", onData)
121
+ restore()
99
122
  process.stderr.write("\n")
100
123
  resolve(null)
101
124
  return
102
125
  }
103
- if (c === "\r" || c === "\n") {
126
+ if (c === "\x16" || c === "\x1bv") {
127
+ const clip = readClipboard()
128
+ if (clip) append(clip)
129
+ return
130
+ }
131
+
132
+ let text = c
133
+ if (text.includes("\x1b[200~")) {
134
+ paste = true
135
+ text = text.replace(/\x1b\[200~/g, "")
136
+ }
137
+ if (text.includes("\x1b[201~")) {
138
+ paste = false
139
+ text = text.replace(/\x1b\[201~/g, "")
140
+ }
141
+ text = text.replace(/\x1b\[[0-9;?]*[ -/]*[@-~]/g, "").replace(/\x1b./g, "")
142
+
143
+ if (!paste && (text === "\r" || text === "\n")) {
104
144
  // Enter
105
- if (stdin.isTTY && stdin.setRawMode) stdin.setRawMode(wasRaw ?? false)
106
- stdin.removeListener("data", onData)
145
+ restore()
107
146
  process.stderr.write("\n")
108
147
  resolve(buf)
109
148
  return
110
149
  }
111
- if (c === "\u007f" || c === "\b") {
150
+ if (!paste && (text === "\u007f" || text === "\b")) {
112
151
  // Backspace
113
152
  if (buf.length > 0) {
114
153
  buf = buf.slice(0, -1)
@@ -117,11 +156,8 @@ export namespace UI {
117
156
  return
118
157
  }
119
158
  // Regular character
120
- const clean = c.replace(/[\x00-\x1f\x7f]/g, "")
121
- if (clean) {
122
- buf += clean
123
- process.stderr.write(clean)
124
- }
159
+ const clean = text.replace(/[\x00-\x08\x0b-\x1f\x7f]/g, "")
160
+ append(clean)
125
161
  }
126
162
  stdin.on("data", onData)
127
163
  })
@@ -165,16 +201,87 @@ export namespace UI {
165
201
  }
166
202
 
167
203
  export async function select(prompt: string, options: string[]): Promise<number> {
168
- console.log(prompt)
169
- for (let i = 0; i < options.length; i++) {
170
- console.log(` ${Style.TEXT_HIGHLIGHT}[${i + 1}]${Style.TEXT_NORMAL} ${options[i]}`)
204
+ if (options.length === 0) return 0
205
+
206
+ const stdin = process.stdin
207
+ const wasRaw = stdin.isRaw
208
+ if (stdin.isTTY && stdin.setRawMode) stdin.setRawMode(true)
209
+ process.stderr.write("\x1b[?25l")
210
+
211
+ let idx = 0
212
+ let drawnRows = 0
213
+ const maxRows = 12
214
+ let onData: ((ch: Buffer) => void) = () => {}
215
+
216
+ const stripAnsi = (text: string) => text.replace(/\x1b\[[0-9;?]*[ -/]*[@-~]/g, "").replace(/\x1b./g, "")
217
+ const rowCount = (line: string) => {
218
+ const cols = Math.max(20, process.stderr.columns || 80)
219
+ const len = Array.from(stripAnsi(line)).length
220
+ return Math.max(1, Math.ceil((len || 1) / cols))
221
+ }
222
+
223
+ const draw = () => {
224
+ if (drawnRows > 1) process.stderr.write(`\x1b[${drawnRows - 1}F`)
225
+ process.stderr.write("\x1b[J")
226
+
227
+ const start = options.length <= maxRows ? 0 : Math.max(0, Math.min(idx - 4, options.length - maxRows))
228
+ const items = options.slice(start, start + maxRows)
229
+ const lines = [
230
+ prompt,
231
+ ...items.map((item, i) => {
232
+ const active = start + i === idx
233
+ const marker = active ? `${Style.TEXT_HIGHLIGHT}●${Style.TEXT_NORMAL}` : "○"
234
+ const text = active ? `${Style.TEXT_NORMAL_BOLD}${item}${Style.TEXT_NORMAL}` : item
235
+ return ` ${marker} ${text}`
236
+ }),
237
+ options.length > maxRows
238
+ ? ` ${Style.TEXT_DIM}${start > 0 ? "↑ more " : ""}${start + maxRows < options.length ? "↓ more" : ""}${Style.TEXT_NORMAL}`
239
+ : ` ${Style.TEXT_DIM}${Style.TEXT_NORMAL}`,
240
+ ` ${Style.TEXT_DIM}Use ↑/↓ then Enter (Esc to cancel)${Style.TEXT_NORMAL}`,
241
+ ]
242
+ process.stderr.write(lines.map((line) => `\x1b[2K\r${line}`).join("\n"))
243
+ drawnRows = lines.reduce((sum, line) => sum + rowCount(line), 0)
171
244
  }
172
- console.log("")
173
- const answer = await input(` Choice [1]: `)
174
- const num = parseInt(answer, 10)
175
- if (!answer) return 0
176
- if (isNaN(num) || num < 1 || num > options.length) return 0
177
- return num - 1
245
+
246
+ const restore = () => {
247
+ process.stderr.write("\x1b[?25h")
248
+ if (stdin.isTTY && stdin.setRawMode) stdin.setRawMode(wasRaw ?? false)
249
+ stdin.removeListener("data", onData)
250
+ process.stderr.write("\n")
251
+ }
252
+
253
+ draw()
254
+
255
+ return new Promise((resolve) => {
256
+ onData = (ch: Buffer) => {
257
+ const c = ch.toString("utf8")
258
+ if (c === "\u0003") {
259
+ restore()
260
+ process.exit(130)
261
+ }
262
+ if (c === "\r" || c === "\n") {
263
+ restore()
264
+ resolve(idx)
265
+ return
266
+ }
267
+ if (c === "\x1b") {
268
+ restore()
269
+ resolve(-1)
270
+ return
271
+ }
272
+ if (c === "k" || c.includes("\x1b[A") || c.includes("\x1bOA")) {
273
+ idx = (idx - 1 + options.length) % options.length
274
+ draw()
275
+ return
276
+ }
277
+ if (c === "j" || c.includes("\x1b[B") || c.includes("\x1bOB")) {
278
+ idx = (idx + 1) % options.length
279
+ draw()
280
+ return
281
+ }
282
+ }
283
+ stdin.on("data", onData)
284
+ })
178
285
  }
179
286
 
180
287
  export async function waitKey(prompt: string, keys: string[]): Promise<string> {
@@ -1,12 +1,23 @@
1
1
  import path from "path"
2
+ import { chmod, writeFile } from "fs/promises"
2
3
  import { Global } from "../global"
3
4
 
4
5
  const CONFIG_FILE = path.join(Global.Path.config, "config.json")
5
6
 
6
7
  export namespace Config {
8
+ export type ModelApi = "anthropic" | "openai" | "google" | "openai-compatible"
9
+ export type CompatProfile = "anthropic" | "openai" | "openai-compatible" | "google"
10
+
11
+ export interface FeatureFlags {
12
+ ai_provider_registry_v2?: boolean
13
+ ai_onboarding_wizard_v2?: boolean
14
+ }
15
+
7
16
  export interface ProviderConfig {
8
17
  api_key: string
9
18
  base_url?: string
19
+ api?: ModelApi
20
+ compat_profile?: CompatProfile
10
21
  }
11
22
 
12
23
  export interface CodeblogConfig {
@@ -14,9 +25,11 @@ export namespace Config {
14
25
  api_key?: string
15
26
  token?: string
16
27
  model?: string
28
+ default_provider?: string
17
29
  default_language?: string
18
30
  activeAgent?: string
19
31
  providers?: Record<string, ProviderConfig>
32
+ feature_flags?: FeatureFlags
20
33
  }
21
34
 
22
35
  const defaults: CodeblogConfig = {
@@ -25,6 +38,11 @@ export namespace Config {
25
38
 
26
39
  export const filepath = CONFIG_FILE
27
40
 
41
+ const FEATURE_FLAG_ENV: Record<keyof FeatureFlags, string> = {
42
+ ai_provider_registry_v2: "CODEBLOG_AI_PROVIDER_REGISTRY_V2",
43
+ ai_onboarding_wizard_v2: "CODEBLOG_AI_ONBOARDING_WIZARD_V2",
44
+ }
45
+
28
46
  export async function load(): Promise<CodeblogConfig> {
29
47
  const file = Bun.file(CONFIG_FILE)
30
48
  const data = await file.json().catch(() => ({}))
@@ -34,7 +52,8 @@ export namespace Config {
34
52
  export async function save(config: Partial<CodeblogConfig>) {
35
53
  const current = await load()
36
54
  const merged = { ...current, ...config }
37
- await Bun.write(Bun.file(CONFIG_FILE, { mode: 0o600 }), JSON.stringify(merged, null, 2))
55
+ await writeFile(CONFIG_FILE, JSON.stringify(merged, null, 2))
56
+ await chmod(CONFIG_FILE, 0o600).catch(() => {})
38
57
  }
39
58
 
40
59
  export async function url() {
@@ -52,4 +71,22 @@ export namespace Config {
52
71
  export async function language() {
53
72
  return process.env.CODEBLOG_LANGUAGE || (await load()).default_language
54
73
  }
74
+
75
+ function parseBool(raw: string | undefined): boolean | undefined {
76
+ if (!raw) return undefined
77
+ const v = raw.trim().toLowerCase()
78
+ if (["1", "true", "yes", "on"].includes(v)) return true
79
+ if (["0", "false", "no", "off"].includes(v)) return false
80
+ return undefined
81
+ }
82
+
83
+ export function envFlagName(flag: keyof FeatureFlags): string {
84
+ return FEATURE_FLAG_ENV[flag]
85
+ }
86
+
87
+ export async function featureEnabled(flag: keyof FeatureFlags): Promise<boolean> {
88
+ const env = parseBool(process.env[FEATURE_FLAG_ENV[flag]])
89
+ if (env !== undefined) return env
90
+ return !!(await load()).feature_flags?.[flag]
91
+ }
55
92
  }
package/src/index.ts CHANGED
@@ -8,6 +8,7 @@ import { Auth } from "./auth"
8
8
 
9
9
  // Commands
10
10
  import { SetupCommand } from "./cli/cmd/setup"
11
+ import { AISetupCommand } from "./cli/cmd/ai"
11
12
  import { LoginCommand } from "./cli/cmd/login"
12
13
  import { LogoutCommand } from "./cli/cmd/logout"
13
14
  import { WhoamiCommand } from "./cli/cmd/whoami"
@@ -71,7 +72,7 @@ const cli = yargs(hideBin(process.argv))
71
72
  })
72
73
  .middleware(async (argv) => {
73
74
  const cmd = argv._[0] as string | undefined
74
- const skipAuth = ["setup", "login", "logout", "config", "update", "uninstall"]
75
+ const skipAuth = ["setup", "ai", "login", "logout", "config", "update", "uninstall"]
75
76
  if (cmd && !skipAuth.includes(cmd)) {
76
77
  const authed = await Auth.authenticated()
77
78
  if (!authed) {
@@ -84,6 +85,7 @@ const cli = yargs(hideBin(process.argv))
84
85
  "\n" + UI.logo() +
85
86
  "\n Getting Started:\n" +
86
87
  " setup First-time setup wizard\n" +
88
+ " ai setup Full AI onboarding wizard\n" +
87
89
  " login / logout Authentication\n\n" +
88
90
  " Browse & Interact:\n" +
89
91
  " feed Browse the forum feed\n" +
@@ -111,6 +113,7 @@ const cli = yargs(hideBin(process.argv))
111
113
  // Register commands with describe=false to hide from auto-generated Commands section
112
114
  // (we already display them in the custom usage above)
113
115
  .command({ ...SetupCommand, describe: false })
116
+ .command({ ...AISetupCommand, describe: false })
114
117
  .command({ ...LoginCommand, describe: false })
115
118
  .command({ ...LogoutCommand, describe: false })
116
119
  .command({ ...FeedCommand, describe: false })
@@ -6,7 +6,7 @@ import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test"
6
6
  // ---------------------------------------------------------------------------
7
7
 
8
8
  // Mock the MCP SDK
9
- const mockCallTool = mock(() =>
9
+ const mockCallTool = mock((): Promise<{ content: Array<{ type: string; text?: string; data?: string }>; isError: boolean }> =>
10
10
  Promise.resolve({
11
11
  content: [{ type: "text", text: '{"ok":true}' }],
12
12
  isError: false,
@@ -120,7 +120,7 @@ describe("McpBridge", () => {
120
120
  test("listTools delegates to MCP client", async () => {
121
121
  const result = await McpBridge.listTools()
122
122
  expect(result.tools).toHaveLength(1)
123
- expect(result.tools[0].name).toBe("test_tool")
123
+ expect(result.tools[0]?.name).toBe("test_tool")
124
124
  })
125
125
 
126
126
  test("disconnect cleans up transport and client", async () => {