codeblog-app 2.1.2 → 2.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/drizzle/0000_init.sql +34 -0
- package/drizzle/meta/_journal.json +13 -0
- package/drizzle.config.ts +10 -0
- package/package.json +71 -8
- package/src/ai/__tests__/chat.test.ts +110 -0
- package/src/ai/__tests__/provider.test.ts +184 -0
- package/src/ai/__tests__/tools.test.ts +54 -0
- package/src/ai/chat.ts +170 -0
- package/src/ai/configure.ts +134 -0
- package/src/ai/provider.ts +238 -0
- package/src/ai/tools.ts +91 -0
- package/src/auth/index.ts +47 -0
- package/src/auth/oauth.ts +94 -0
- package/src/cli/__tests__/commands.test.ts +225 -0
- package/src/cli/cmd/agent.ts +97 -0
- package/src/cli/cmd/chat.ts +190 -0
- package/src/cli/cmd/comment.ts +67 -0
- package/src/cli/cmd/config.ts +153 -0
- package/src/cli/cmd/feed.ts +53 -0
- package/src/cli/cmd/forum.ts +106 -0
- package/src/cli/cmd/login.ts +45 -0
- package/src/cli/cmd/logout.ts +12 -0
- package/src/cli/cmd/me.ts +188 -0
- package/src/cli/cmd/post.ts +25 -0
- package/src/cli/cmd/publish.ts +64 -0
- package/src/cli/cmd/scan.ts +78 -0
- package/src/cli/cmd/search.ts +35 -0
- package/src/cli/cmd/setup.ts +273 -0
- package/src/cli/cmd/tui.ts +20 -0
- package/src/cli/cmd/uninstall.ts +156 -0
- package/src/cli/cmd/update.ts +123 -0
- package/src/cli/cmd/vote.ts +50 -0
- package/src/cli/cmd/whoami.ts +18 -0
- package/src/cli/mcp-print.ts +6 -0
- package/src/cli/ui.ts +195 -0
- package/src/config/index.ts +54 -0
- package/src/flag/index.ts +23 -0
- package/src/global/index.ts +38 -0
- package/src/id/index.ts +20 -0
- package/src/index.ts +200 -0
- package/src/mcp/__tests__/client.test.ts +149 -0
- package/src/mcp/__tests__/e2e.ts +327 -0
- package/src/mcp/__tests__/integration.ts +148 -0
- package/src/mcp/client.ts +148 -0
- package/src/server/index.ts +48 -0
- package/src/storage/chat.ts +71 -0
- package/src/storage/db.ts +85 -0
- package/src/storage/schema.sql.ts +39 -0
- package/src/storage/schema.ts +1 -0
- package/src/tui/app.tsx +179 -0
- package/src/tui/commands.ts +187 -0
- package/src/tui/context/exit.tsx +15 -0
- package/src/tui/context/helper.tsx +25 -0
- package/src/tui/context/route.tsx +24 -0
- package/src/tui/context/theme.tsx +470 -0
- package/src/tui/routes/home.tsx +508 -0
- package/src/tui/routes/model.tsx +207 -0
- package/src/tui/routes/notifications.tsx +87 -0
- package/src/tui/routes/post.tsx +102 -0
- package/src/tui/routes/search.tsx +105 -0
- package/src/tui/routes/setup.tsx +255 -0
- package/src/tui/routes/trending.tsx +107 -0
- package/src/util/__tests__/context.test.ts +31 -0
- package/src/util/__tests__/lazy.test.ts +37 -0
- package/src/util/context.ts +23 -0
- package/src/util/error.ts +46 -0
- package/src/util/lazy.ts +18 -0
- package/src/util/log.ts +142 -0
- package/tsconfig.json +11 -0
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
// AI provider auto-detection and configuration
|
|
2
|
+
|
|
3
|
+
function looksLikeApi(r: Response) {
|
|
4
|
+
const ct = r.headers.get("content-type") || ""
|
|
5
|
+
return ct.includes("json") || ct.includes("text/plain")
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export async function probe(base: string, key: string): Promise<"openai" | "anthropic" | null> {
|
|
9
|
+
const clean = base.replace(/\/+$/, "")
|
|
10
|
+
try {
|
|
11
|
+
const r = await fetch(`${clean}/v1/models`, {
|
|
12
|
+
headers: { Authorization: `Bearer ${key}` },
|
|
13
|
+
signal: AbortSignal.timeout(8000),
|
|
14
|
+
})
|
|
15
|
+
if (r.ok || ((r.status === 401 || r.status === 403) && looksLikeApi(r))) return "openai"
|
|
16
|
+
} catch {}
|
|
17
|
+
try {
|
|
18
|
+
const r = await fetch(`${clean}/v1/messages`, {
|
|
19
|
+
method: "POST",
|
|
20
|
+
headers: { "x-api-key": key, "anthropic-version": "2023-06-01", "content-type": "application/json" },
|
|
21
|
+
body: JSON.stringify({ model: "test", max_tokens: 1, messages: [] }),
|
|
22
|
+
signal: AbortSignal.timeout(8000),
|
|
23
|
+
})
|
|
24
|
+
if (r.status !== 404 && looksLikeApi(r)) return "anthropic"
|
|
25
|
+
} catch {}
|
|
26
|
+
return null
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const KEY_PREFIX_MAP: Record<string, string> = {
|
|
30
|
+
"sk-ant-": "anthropic",
|
|
31
|
+
"AIza": "google",
|
|
32
|
+
"xai-": "xai",
|
|
33
|
+
"gsk_": "groq",
|
|
34
|
+
"sk-or-": "openrouter",
|
|
35
|
+
"pplx-": "perplexity",
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const ENV_MAP: Record<string, string> = {
|
|
39
|
+
anthropic: "ANTHROPIC_API_KEY",
|
|
40
|
+
openai: "OPENAI_API_KEY",
|
|
41
|
+
google: "GOOGLE_GENERATIVE_AI_API_KEY",
|
|
42
|
+
xai: "XAI_API_KEY",
|
|
43
|
+
groq: "GROQ_API_KEY",
|
|
44
|
+
openrouter: "OPENROUTER_API_KEY",
|
|
45
|
+
perplexity: "PERPLEXITY_API_KEY",
|
|
46
|
+
"openai-compatible": "OPENAI_COMPATIBLE_API_KEY",
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
async function fetchFirstModel(base: string, key: string): Promise<string | null> {
|
|
50
|
+
try {
|
|
51
|
+
const clean = base.replace(/\/+$/, "")
|
|
52
|
+
const r = await fetch(`${clean}/v1/models`, {
|
|
53
|
+
headers: { Authorization: `Bearer ${key}` },
|
|
54
|
+
signal: AbortSignal.timeout(8000),
|
|
55
|
+
})
|
|
56
|
+
if (!r.ok) return null
|
|
57
|
+
const data = await r.json() as { data?: Array<{ id: string }> }
|
|
58
|
+
if (!data.data || data.data.length === 0) return null
|
|
59
|
+
|
|
60
|
+
// Prefer capable models: claude-sonnet > gpt-4o > claude-opus > first available
|
|
61
|
+
const ids = data.data.map((m) => m.id)
|
|
62
|
+
const preferred = [/^claude-sonnet-4/, /^gpt-4o$/, /^claude-opus-4/, /^gpt-4o-mini$/, /^gemini-2\.5-flash$/]
|
|
63
|
+
for (const pattern of preferred) {
|
|
64
|
+
const match = ids.find((id) => pattern.test(id))
|
|
65
|
+
if (match) return match
|
|
66
|
+
}
|
|
67
|
+
return ids[0] ?? null
|
|
68
|
+
} catch {}
|
|
69
|
+
return null
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
export function detectProvider(key: string) {
|
|
73
|
+
for (const [prefix, provider] of Object.entries(KEY_PREFIX_MAP)) {
|
|
74
|
+
if (key.startsWith(prefix)) return provider
|
|
75
|
+
}
|
|
76
|
+
return "openai"
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
export async function saveProvider(url: string, key: string): Promise<{ provider: string; error?: string }> {
|
|
80
|
+
const { Config } = await import("../config")
|
|
81
|
+
|
|
82
|
+
if (url) {
|
|
83
|
+
const detected = await probe(url, key)
|
|
84
|
+
if (!detected) return { provider: "", error: "Could not connect. Check URL and key." }
|
|
85
|
+
|
|
86
|
+
const provider = detected === "anthropic" ? "anthropic" : "openai-compatible"
|
|
87
|
+
const envKey = detected === "anthropic" ? "ANTHROPIC_API_KEY" : "OPENAI_COMPATIBLE_API_KEY"
|
|
88
|
+
const envBase = detected === "anthropic" ? "ANTHROPIC_BASE_URL" : "OPENAI_COMPATIBLE_BASE_URL"
|
|
89
|
+
process.env[envKey] = key
|
|
90
|
+
process.env[envBase] = url
|
|
91
|
+
|
|
92
|
+
const cfg = await Config.load()
|
|
93
|
+
const providers = cfg.providers || {}
|
|
94
|
+
providers[provider] = { api_key: key, base_url: url }
|
|
95
|
+
|
|
96
|
+
// Auto-set model if not already configured
|
|
97
|
+
const update: Record<string, unknown> = { providers }
|
|
98
|
+
if (!cfg.model) {
|
|
99
|
+
if (detected === "anthropic") {
|
|
100
|
+
update.model = "claude-sonnet-4-20250514"
|
|
101
|
+
} else {
|
|
102
|
+
// For openai-compatible with custom URL, try to fetch available models
|
|
103
|
+
const model = await fetchFirstModel(url, key)
|
|
104
|
+
if (model) update.model = `openai-compatible/${model}`
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
await Config.save(update)
|
|
109
|
+
return { provider: `${detected} format` }
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
const provider = detectProvider(key)
|
|
113
|
+
if (ENV_MAP[provider]) process.env[ENV_MAP[provider]] = key
|
|
114
|
+
|
|
115
|
+
const cfg = await Config.load()
|
|
116
|
+
const providers = cfg.providers || {}
|
|
117
|
+
providers[provider] = { api_key: key }
|
|
118
|
+
|
|
119
|
+
// Auto-set model for known providers
|
|
120
|
+
const update: Record<string, unknown> = { providers }
|
|
121
|
+
if (!cfg.model) {
|
|
122
|
+
const { AIProvider } = await import("./provider")
|
|
123
|
+
const models = Object.values(AIProvider.BUILTIN_MODELS).filter((m) => m.providerID === provider)
|
|
124
|
+
if (models.length > 0) update.model = models[0]!.id
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
await Config.save(update)
|
|
128
|
+
return { provider }
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
export function mask(s: string) {
|
|
132
|
+
if (s.length <= 8) return s
|
|
133
|
+
return s.slice(0, 4) + "\u2022".repeat(Math.min(s.length - 8, 20)) + s.slice(-4)
|
|
134
|
+
}
|
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
import { createAnthropic } from "@ai-sdk/anthropic"
|
|
2
|
+
import { createOpenAI } from "@ai-sdk/openai"
|
|
3
|
+
import { createGoogleGenerativeAI } from "@ai-sdk/google"
|
|
4
|
+
import { createOpenAICompatible } from "@ai-sdk/openai-compatible"
|
|
5
|
+
import { type LanguageModel, type Provider as SDK } from "ai"
|
|
6
|
+
import { Config } from "../config"
|
|
7
|
+
import { Log } from "../util/log"
|
|
8
|
+
|
|
9
|
+
const log = Log.create({ service: "ai-provider" })
|
|
10
|
+
|
|
11
|
+
export namespace AIProvider {
|
|
12
|
+
// ---------------------------------------------------------------------------
|
|
13
|
+
// Bundled providers (4 core)
|
|
14
|
+
// ---------------------------------------------------------------------------
|
|
15
|
+
const BUNDLED_PROVIDERS: Record<string, (options: any) => SDK> = {
|
|
16
|
+
"@ai-sdk/anthropic": createAnthropic as any,
|
|
17
|
+
"@ai-sdk/openai": createOpenAI as any,
|
|
18
|
+
"@ai-sdk/google": createGoogleGenerativeAI as any,
|
|
19
|
+
"@ai-sdk/openai-compatible": createOpenAICompatible as any,
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// ---------------------------------------------------------------------------
|
|
23
|
+
// Provider env key mapping
|
|
24
|
+
// ---------------------------------------------------------------------------
|
|
25
|
+
const PROVIDER_ENV: Record<string, string[]> = {
|
|
26
|
+
anthropic: ["ANTHROPIC_API_KEY", "ANTHROPIC_AUTH_TOKEN"],
|
|
27
|
+
openai: ["OPENAI_API_KEY"],
|
|
28
|
+
google: ["GOOGLE_GENERATIVE_AI_API_KEY", "GOOGLE_API_KEY"],
|
|
29
|
+
"openai-compatible": ["OPENAI_COMPATIBLE_API_KEY"],
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// ---------------------------------------------------------------------------
|
|
33
|
+
// Provider base URL env mapping
|
|
34
|
+
// ---------------------------------------------------------------------------
|
|
35
|
+
const PROVIDER_BASE_URL_ENV: Record<string, string[]> = {
|
|
36
|
+
anthropic: ["ANTHROPIC_BASE_URL"],
|
|
37
|
+
openai: ["OPENAI_BASE_URL", "OPENAI_API_BASE"],
|
|
38
|
+
google: ["GOOGLE_API_BASE_URL"],
|
|
39
|
+
"openai-compatible": ["OPENAI_COMPATIBLE_BASE_URL"],
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// ---------------------------------------------------------------------------
|
|
43
|
+
// Provider → npm package mapping
|
|
44
|
+
// ---------------------------------------------------------------------------
|
|
45
|
+
const PROVIDER_NPM: Record<string, string> = {
|
|
46
|
+
anthropic: "@ai-sdk/anthropic",
|
|
47
|
+
openai: "@ai-sdk/openai",
|
|
48
|
+
google: "@ai-sdk/google",
|
|
49
|
+
"openai-compatible": "@ai-sdk/openai-compatible",
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// ---------------------------------------------------------------------------
|
|
53
|
+
// Model info type
|
|
54
|
+
// ---------------------------------------------------------------------------
|
|
55
|
+
export interface ModelInfo {
|
|
56
|
+
id: string
|
|
57
|
+
providerID: string
|
|
58
|
+
name: string
|
|
59
|
+
contextWindow: number
|
|
60
|
+
outputTokens: number
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// ---------------------------------------------------------------------------
|
|
64
|
+
// Built-in model list
|
|
65
|
+
// ---------------------------------------------------------------------------
|
|
66
|
+
export const BUILTIN_MODELS: Record<string, ModelInfo> = {
|
|
67
|
+
"claude-sonnet-4-20250514": { id: "claude-sonnet-4-20250514", providerID: "anthropic", name: "Claude Sonnet 4", contextWindow: 200000, outputTokens: 16384 },
|
|
68
|
+
"claude-3-5-haiku-20241022": { id: "claude-3-5-haiku-20241022", providerID: "anthropic", name: "Claude 3.5 Haiku", contextWindow: 200000, outputTokens: 8192 },
|
|
69
|
+
"gpt-4o": { id: "gpt-4o", providerID: "openai", name: "GPT-4o", contextWindow: 128000, outputTokens: 16384 },
|
|
70
|
+
"gpt-4o-mini": { id: "gpt-4o-mini", providerID: "openai", name: "GPT-4o Mini", contextWindow: 128000, outputTokens: 16384 },
|
|
71
|
+
"o3-mini": { id: "o3-mini", providerID: "openai", name: "o3-mini", contextWindow: 200000, outputTokens: 100000 },
|
|
72
|
+
"gemini-2.5-flash": { id: "gemini-2.5-flash", providerID: "google", name: "Gemini 2.5 Flash", contextWindow: 1048576, outputTokens: 65536 },
|
|
73
|
+
"gemini-2.5-pro": { id: "gemini-2.5-pro", providerID: "google", name: "Gemini 2.5 Pro", contextWindow: 1048576, outputTokens: 65536 },
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export const DEFAULT_MODEL = "claude-sonnet-4-20250514"
|
|
77
|
+
|
|
78
|
+
// ---------------------------------------------------------------------------
|
|
79
|
+
// Get API key for a provider
|
|
80
|
+
// ---------------------------------------------------------------------------
|
|
81
|
+
export async function getApiKey(providerID: string): Promise<string | undefined> {
|
|
82
|
+
const envKeys = PROVIDER_ENV[providerID] || []
|
|
83
|
+
for (const key of envKeys) {
|
|
84
|
+
if (process.env[key]) return process.env[key]
|
|
85
|
+
}
|
|
86
|
+
const cfg = await Config.load()
|
|
87
|
+
return cfg.providers?.[providerID]?.api_key
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// ---------------------------------------------------------------------------
|
|
91
|
+
// Get base URL for a provider
|
|
92
|
+
// ---------------------------------------------------------------------------
|
|
93
|
+
export async function getBaseUrl(providerID: string): Promise<string | undefined> {
|
|
94
|
+
const envKeys = PROVIDER_BASE_URL_ENV[providerID] || []
|
|
95
|
+
for (const key of envKeys) {
|
|
96
|
+
if (process.env[key]) return process.env[key]
|
|
97
|
+
}
|
|
98
|
+
const cfg = await Config.load()
|
|
99
|
+
return cfg.providers?.[providerID]?.base_url
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// ---------------------------------------------------------------------------
|
|
103
|
+
// List all available providers
|
|
104
|
+
// ---------------------------------------------------------------------------
|
|
105
|
+
export async function listProviders(): Promise<Record<string, { name: string; models: string[]; hasKey: boolean }>> {
|
|
106
|
+
const result: Record<string, { name: string; models: string[]; hasKey: boolean }> = {}
|
|
107
|
+
for (const model of Object.values(BUILTIN_MODELS)) {
|
|
108
|
+
if (!result[model.providerID]) {
|
|
109
|
+
const key = await getApiKey(model.providerID)
|
|
110
|
+
result[model.providerID] = { name: model.providerID, models: [], hasKey: !!key }
|
|
111
|
+
}
|
|
112
|
+
if (!result[model.providerID]!.models.includes(model.id)) {
|
|
113
|
+
result[model.providerID]!.models.push(model.id)
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
const compatKey = await getApiKey("openai-compatible")
|
|
117
|
+
if (compatKey) {
|
|
118
|
+
result["openai-compatible"] = { name: "OpenAI Compatible", models: [], hasKey: true }
|
|
119
|
+
}
|
|
120
|
+
return result
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// ---------------------------------------------------------------------------
|
|
124
|
+
// Get a LanguageModel instance
|
|
125
|
+
// ---------------------------------------------------------------------------
|
|
126
|
+
const sdkCache = new Map<string, SDK>()
|
|
127
|
+
|
|
128
|
+
export async function getModel(modelID?: string): Promise<LanguageModel> {
|
|
129
|
+
const id = modelID || (await getConfiguredModel()) || DEFAULT_MODEL
|
|
130
|
+
|
|
131
|
+
const builtin = BUILTIN_MODELS[id]
|
|
132
|
+
if (builtin) {
|
|
133
|
+
const apiKey = await getApiKey(builtin.providerID)
|
|
134
|
+
if (!apiKey) throw noKeyError(builtin.providerID)
|
|
135
|
+
const base = await getBaseUrl(builtin.providerID)
|
|
136
|
+
return getLanguageModel(builtin.providerID, id, apiKey, undefined, base)
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
if (id.includes("/")) {
|
|
140
|
+
const [providerID, ...rest] = id.split("/")
|
|
141
|
+
const mid = rest.join("/")
|
|
142
|
+
const apiKey = await getApiKey(providerID!)
|
|
143
|
+
if (!apiKey) throw noKeyError(providerID!)
|
|
144
|
+
const base = await getBaseUrl(providerID!)
|
|
145
|
+
return getLanguageModel(providerID!, mid, apiKey, undefined, base)
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
const cfg = await Config.load()
|
|
149
|
+
if (cfg.providers) {
|
|
150
|
+
for (const [providerID, p] of Object.entries(cfg.providers)) {
|
|
151
|
+
if (!p.api_key) continue
|
|
152
|
+
const base = p.base_url || (await getBaseUrl(providerID))
|
|
153
|
+
if (base) {
|
|
154
|
+
log.info("fallback: sending unknown model to provider with base_url", { provider: providerID, model: id })
|
|
155
|
+
return getLanguageModel(providerID, id, p.api_key, undefined, base)
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
throw new Error(`Unknown model: ${id}. Run: codeblog config --list`)
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
function getLanguageModel(providerID: string, modelID: string, apiKey: string, npm?: string, baseURL?: string): LanguageModel {
|
|
164
|
+
const pkg = npm || PROVIDER_NPM[providerID] || "@ai-sdk/openai-compatible"
|
|
165
|
+
const cacheKey = `${providerID}:${pkg}:${apiKey.slice(0, 8)}`
|
|
166
|
+
|
|
167
|
+
log.info("loading model", { provider: providerID, model: modelID, pkg })
|
|
168
|
+
|
|
169
|
+
let sdk = sdkCache.get(cacheKey)
|
|
170
|
+
if (!sdk) {
|
|
171
|
+
const createFn = BUNDLED_PROVIDERS[pkg]
|
|
172
|
+
if (!createFn) throw new Error(`No bundled provider for ${pkg}. Use openai-compatible with a base URL instead.`)
|
|
173
|
+
const opts: Record<string, unknown> = { apiKey, name: providerID }
|
|
174
|
+
if (baseURL) {
|
|
175
|
+
const clean = baseURL.replace(/\/+$/, "")
|
|
176
|
+
opts.baseURL = clean.endsWith("/v1") ? clean : `${clean}/v1`
|
|
177
|
+
}
|
|
178
|
+
sdk = createFn(opts)
|
|
179
|
+
sdkCache.set(cacheKey, sdk)
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
if (pkg === "@ai-sdk/openai-compatible" && typeof (sdk as any).chatModel === "function") {
|
|
183
|
+
return (sdk as any).chatModel(modelID)
|
|
184
|
+
}
|
|
185
|
+
if (typeof (sdk as any).languageModel === "function") {
|
|
186
|
+
return (sdk as any).languageModel(modelID)
|
|
187
|
+
}
|
|
188
|
+
return (sdk as any)(modelID)
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
function noKeyError(providerID: string): Error {
|
|
192
|
+
const envKeys = PROVIDER_ENV[providerID] || []
|
|
193
|
+
const envHint = envKeys[0] || `${providerID.toUpperCase().replace(/-/g, "_")}_API_KEY`
|
|
194
|
+
return new Error(`No API key for ${providerID}. Set ${envHint} or run: codeblog config --provider ${providerID} --api-key <key>`)
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
async function getConfiguredModel(): Promise<string | undefined> {
|
|
198
|
+
const cfg = await Config.load()
|
|
199
|
+
return cfg.model
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
// ---------------------------------------------------------------------------
|
|
203
|
+
// Check if any AI provider has a key configured
|
|
204
|
+
// ---------------------------------------------------------------------------
|
|
205
|
+
export async function hasAnyKey(): Promise<boolean> {
|
|
206
|
+
for (const providerID of Object.keys(PROVIDER_ENV)) {
|
|
207
|
+
const key = await getApiKey(providerID)
|
|
208
|
+
if (key) return true
|
|
209
|
+
}
|
|
210
|
+
const cfg = await Config.load()
|
|
211
|
+
if (cfg.providers) {
|
|
212
|
+
for (const p of Object.values(cfg.providers)) {
|
|
213
|
+
if (p.api_key) return true
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
return false
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
// ---------------------------------------------------------------------------
|
|
220
|
+
// List available models with key status
|
|
221
|
+
// ---------------------------------------------------------------------------
|
|
222
|
+
export async function available(): Promise<Array<{ model: ModelInfo; hasKey: boolean }>> {
|
|
223
|
+
const result: Array<{ model: ModelInfo; hasKey: boolean }> = []
|
|
224
|
+
for (const model of Object.values(BUILTIN_MODELS)) {
|
|
225
|
+
const apiKey = await getApiKey(model.providerID)
|
|
226
|
+
result.push({ model, hasKey: !!apiKey })
|
|
227
|
+
}
|
|
228
|
+
return result
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// ---------------------------------------------------------------------------
|
|
232
|
+
// Parse provider/model format
|
|
233
|
+
// ---------------------------------------------------------------------------
|
|
234
|
+
export function parseModel(model: string) {
|
|
235
|
+
const [providerID, ...rest] = model.split("/")
|
|
236
|
+
return { providerID, modelID: rest.join("/") }
|
|
237
|
+
}
|
|
238
|
+
}
|
package/src/ai/tools.ts
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { tool, jsonSchema } from "ai"
|
|
2
|
+
import { McpBridge } from "../mcp/client"
|
|
3
|
+
import { Log } from "../util/log"
|
|
4
|
+
|
|
5
|
+
const log = Log.create({ service: "ai-tools" })
|
|
6
|
+
|
|
7
|
+
// ---------------------------------------------------------------------------
|
|
8
|
+
// Tool display labels for the TUI streaming indicator.
|
|
9
|
+
// Kept as a static fallback — new tools added to MCP will show their name
|
|
10
|
+
// as-is if not listed here, which is acceptable.
|
|
11
|
+
// ---------------------------------------------------------------------------
|
|
12
|
+
export const TOOL_LABELS: Record<string, string> = {
|
|
13
|
+
scan_sessions: "Scanning IDE sessions...",
|
|
14
|
+
read_session: "Reading session...",
|
|
15
|
+
analyze_session: "Analyzing session...",
|
|
16
|
+
post_to_codeblog: "Publishing post...",
|
|
17
|
+
auto_post: "Auto-posting...",
|
|
18
|
+
weekly_digest: "Generating weekly digest...",
|
|
19
|
+
browse_posts: "Browsing posts...",
|
|
20
|
+
search_posts: "Searching posts...",
|
|
21
|
+
read_post: "Reading post...",
|
|
22
|
+
comment_on_post: "Posting comment...",
|
|
23
|
+
vote_on_post: "Voting...",
|
|
24
|
+
edit_post: "Editing post...",
|
|
25
|
+
delete_post: "Deleting post...",
|
|
26
|
+
bookmark_post: "Bookmarking...",
|
|
27
|
+
browse_by_tag: "Browsing tags...",
|
|
28
|
+
trending_topics: "Loading trending...",
|
|
29
|
+
explore_and_engage: "Exploring posts...",
|
|
30
|
+
join_debate: "Loading debates...",
|
|
31
|
+
my_notifications: "Checking notifications...",
|
|
32
|
+
manage_agents: "Managing agents...",
|
|
33
|
+
my_posts: "Loading your posts...",
|
|
34
|
+
my_dashboard: "Loading dashboard...",
|
|
35
|
+
follow_user: "Processing follow...",
|
|
36
|
+
codeblog_setup: "Configuring CodeBlog...",
|
|
37
|
+
codeblog_status: "Checking status...",
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// ---------------------------------------------------------------------------
|
|
41
|
+
// Helper: call MCP tool and return result
|
|
42
|
+
// ---------------------------------------------------------------------------
|
|
43
|
+
async function mcp(name: string, args: Record<string, unknown> = {}): Promise<any> {
|
|
44
|
+
return McpBridge.callToolJSON(name, args)
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Strip undefined/null values from args before sending to MCP
|
|
48
|
+
function clean(obj: Record<string, unknown>): Record<string, unknown> {
|
|
49
|
+
const result: Record<string, unknown> = {}
|
|
50
|
+
for (const [k, v] of Object.entries(obj)) {
|
|
51
|
+
if (v !== undefined && v !== null) result[k] = v
|
|
52
|
+
}
|
|
53
|
+
return result
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// ---------------------------------------------------------------------------
|
|
57
|
+
// Dynamic tool discovery from MCP server
|
|
58
|
+
// ---------------------------------------------------------------------------
|
|
59
|
+
let _cached: Record<string, any> | null = null
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Build AI SDK tools dynamically from the MCP server's listTools() response.
|
|
63
|
+
* Results are cached after the first successful call.
|
|
64
|
+
*/
|
|
65
|
+
export async function getChatTools(): Promise<Record<string, any>> {
|
|
66
|
+
if (_cached) return _cached
|
|
67
|
+
|
|
68
|
+
const { tools: mcpTools } = await McpBridge.listTools()
|
|
69
|
+
log.info("discovered MCP tools", { count: mcpTools.length, names: mcpTools.map((t) => t.name) })
|
|
70
|
+
|
|
71
|
+
const tools: Record<string, any> = {}
|
|
72
|
+
|
|
73
|
+
for (const t of mcpTools) {
|
|
74
|
+
const name = t.name
|
|
75
|
+
const schema = t.inputSchema as Record<string, unknown>
|
|
76
|
+
|
|
77
|
+
tools[name] = (tool as any)({
|
|
78
|
+
description: t.description || name,
|
|
79
|
+
parameters: jsonSchema(schema),
|
|
80
|
+
execute: async (args: any) => mcp(name, clean(args)),
|
|
81
|
+
})
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
_cached = tools
|
|
85
|
+
return tools
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/** Clear the cached tools (useful for testing or reconnection). */
|
|
89
|
+
export function clearChatToolsCache(): void {
|
|
90
|
+
_cached = null
|
|
91
|
+
}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import path from "path"
|
|
2
|
+
import { Global } from "../global"
|
|
3
|
+
import z from "zod"
|
|
4
|
+
|
|
5
|
+
export namespace Auth {
|
|
6
|
+
export const Token = z
|
|
7
|
+
.object({
|
|
8
|
+
type: z.enum(["jwt", "apikey"]),
|
|
9
|
+
value: z.string(),
|
|
10
|
+
expires: z.number().optional(),
|
|
11
|
+
username: z.string().optional(),
|
|
12
|
+
})
|
|
13
|
+
.meta({ ref: "AuthToken" })
|
|
14
|
+
export type Token = z.infer<typeof Token>
|
|
15
|
+
|
|
16
|
+
const filepath = path.join(Global.Path.data, "auth.json")
|
|
17
|
+
|
|
18
|
+
export async function get(): Promise<Token | null> {
|
|
19
|
+
const file = Bun.file(filepath)
|
|
20
|
+
const data = await file.json().catch(() => null)
|
|
21
|
+
if (!data) return null
|
|
22
|
+
const parsed = Token.safeParse(data)
|
|
23
|
+
if (!parsed.success) return null
|
|
24
|
+
return parsed.data
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export async function set(token: Token) {
|
|
28
|
+
await Bun.write(Bun.file(filepath, { mode: 0o600 }), JSON.stringify(token, null, 2))
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export async function remove() {
|
|
32
|
+
const fs = await import("fs/promises")
|
|
33
|
+
await fs.unlink(filepath).catch(() => {})
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export async function header(): Promise<Record<string, string>> {
|
|
37
|
+
const token = await get()
|
|
38
|
+
if (!token) return {}
|
|
39
|
+
if (token.type === "apikey") return { Authorization: `Bearer ${token.value}` }
|
|
40
|
+
return { Authorization: `Bearer ${token.value}` }
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export async function authenticated(): Promise<boolean> {
|
|
44
|
+
const token = await get()
|
|
45
|
+
return token !== null
|
|
46
|
+
}
|
|
47
|
+
}
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import { Auth } from "./index"
|
|
2
|
+
import { Config } from "../config"
|
|
3
|
+
import { McpBridge } from "../mcp/client"
|
|
4
|
+
import { Server } from "../server"
|
|
5
|
+
import { Log } from "../util/log"
|
|
6
|
+
|
|
7
|
+
const log = Log.create({ service: "oauth" })
|
|
8
|
+
|
|
9
|
+
export namespace OAuth {
|
|
10
|
+
export async function login(options?: { onUrl?: (url: string) => void }) {
|
|
11
|
+
const open = (await import("open")).default
|
|
12
|
+
const base = await Config.url()
|
|
13
|
+
|
|
14
|
+
const { app, port } = Server.createCallbackServer(async (params) => {
|
|
15
|
+
const token = params.get("token")
|
|
16
|
+
const key = params.get("api_key")
|
|
17
|
+
const username = params.get("username") || undefined
|
|
18
|
+
|
|
19
|
+
if (key) {
|
|
20
|
+
await Auth.set({ type: "apikey", value: key, username })
|
|
21
|
+
// Sync API key to MCP config (~/.codeblog/config.json)
|
|
22
|
+
try {
|
|
23
|
+
await McpBridge.callTool("codeblog_setup", { api_key: key })
|
|
24
|
+
} catch (err) {
|
|
25
|
+
log.warn("failed to sync API key to MCP config", { error: String(err) })
|
|
26
|
+
}
|
|
27
|
+
log.info("authenticated with api key")
|
|
28
|
+
} else if (token) {
|
|
29
|
+
await Auth.set({ type: "jwt", value: token, username })
|
|
30
|
+
log.info("authenticated with jwt")
|
|
31
|
+
} else {
|
|
32
|
+
Server.stop()
|
|
33
|
+
throw new Error("No token received")
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
setTimeout(() => Server.stop(), 500)
|
|
37
|
+
return `<!DOCTYPE html>
|
|
38
|
+
<html><head><meta charset="utf-8"><meta name="viewport" content="width=device-width,initial-scale=1">
|
|
39
|
+
<title>CodeBlog - Authenticated</title>
|
|
40
|
+
<style>
|
|
41
|
+
*{margin:0;padding:0;box-sizing:border-box}
|
|
42
|
+
body{font-family:-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,sans-serif;min-height:100vh;display:flex;align-items:center;justify-content:center;background:#f8f9fa}
|
|
43
|
+
.card{text-align:center;background:#fff;border-radius:16px;padding:48px 40px;box-shadow:0 4px 24px rgba(0,0,0,.08);max-width:420px;width:90%}
|
|
44
|
+
.icon{font-size:64px;margin-bottom:16px}
|
|
45
|
+
h1{font-size:24px;color:#232629;margin-bottom:8px}
|
|
46
|
+
p{font-size:15px;color:#6a737c;line-height:1.5}
|
|
47
|
+
.brand{color:#f48225;font-weight:700}
|
|
48
|
+
.hint{margin-top:24px;font-size:13px;color:#9a9a9a}
|
|
49
|
+
</style></head><body>
|
|
50
|
+
<div class="card">
|
|
51
|
+
<div class="icon">✅</div>
|
|
52
|
+
<h1>Welcome to <span class="brand">CodeBlog</span></h1>
|
|
53
|
+
<p>Authentication successful! You can close this window and return to the terminal.</p>
|
|
54
|
+
<p class="hint">This window will close automatically...</p>
|
|
55
|
+
</div>
|
|
56
|
+
<script>setTimeout(()=>window.close(),3000)</script>
|
|
57
|
+
</body></html>`
|
|
58
|
+
})
|
|
59
|
+
|
|
60
|
+
return new Promise<void>((resolve, reject) => {
|
|
61
|
+
const original = app.fetch
|
|
62
|
+
const wrapped = new Proxy(app, {
|
|
63
|
+
get(target, prop) {
|
|
64
|
+
if (prop === "fetch") {
|
|
65
|
+
return async (...args: Parameters<typeof original>) => {
|
|
66
|
+
try {
|
|
67
|
+
const res = await original.apply(target, args)
|
|
68
|
+
resolve()
|
|
69
|
+
return res
|
|
70
|
+
} catch (err) {
|
|
71
|
+
reject(err instanceof Error ? err : new Error(String(err)))
|
|
72
|
+
return new Response("Error", { status: 500 })
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
return Reflect.get(target, prop)
|
|
77
|
+
},
|
|
78
|
+
})
|
|
79
|
+
|
|
80
|
+
Server.start(wrapped, port)
|
|
81
|
+
|
|
82
|
+
const authUrl = `${base}/auth/cli?port=${port}`
|
|
83
|
+
log.info("opening browser", { url: authUrl })
|
|
84
|
+
if (options?.onUrl) options.onUrl(authUrl)
|
|
85
|
+
open(authUrl)
|
|
86
|
+
|
|
87
|
+
// Timeout after 5 minutes
|
|
88
|
+
setTimeout(() => {
|
|
89
|
+
Server.stop()
|
|
90
|
+
reject(new Error("OAuth login timed out"))
|
|
91
|
+
}, 5 * 60 * 1000)
|
|
92
|
+
})
|
|
93
|
+
}
|
|
94
|
+
}
|