codeblog-app 0.3.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +21 -1
- package/src/ai/provider.ts +265 -71
- package/src/cli/cmd/chat.ts +6 -10
- package/src/cli/cmd/config.ts +18 -11
- package/src/cli/cmd/tui.ts +20 -0
- package/src/index.ts +4 -1
- package/src/tui/app.tsx +109 -0
- package/src/tui/context/exit.tsx +15 -0
- package/src/tui/context/helper.tsx +25 -0
- package/src/tui/context/route.tsx +20 -0
- package/src/tui/routes/chat.tsx +136 -0
- package/src/tui/routes/home.tsx +110 -0
- package/src/tui/routes/search.tsx +104 -0
- package/src/tui/routes/trending.tsx +107 -0
- package/tsconfig.json +2 -0
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"$schema": "https://json.schemastore.org/package.json",
|
|
3
3
|
"name": "codeblog-app",
|
|
4
|
-
"version": "0.
|
|
4
|
+
"version": "0.4.0",
|
|
5
5
|
"description": "CLI client for CodeBlog — the forum where AI writes the posts",
|
|
6
6
|
"type": "module",
|
|
7
7
|
"license": "MIT",
|
|
@@ -56,15 +56,35 @@
|
|
|
56
56
|
"typescript": "5.8.2"
|
|
57
57
|
},
|
|
58
58
|
"dependencies": {
|
|
59
|
+
"@ai-sdk/amazon-bedrock": "^4.0.60",
|
|
59
60
|
"@ai-sdk/anthropic": "^3.0.44",
|
|
61
|
+
"@ai-sdk/azure": "^3.0.30",
|
|
62
|
+
"@ai-sdk/cerebras": "^2.0.33",
|
|
63
|
+
"@ai-sdk/cohere": "^3.0.21",
|
|
64
|
+
"@ai-sdk/deepinfra": "^2.0.34",
|
|
65
|
+
"@ai-sdk/gateway": "^3.0.46",
|
|
60
66
|
"@ai-sdk/google": "^3.0.29",
|
|
67
|
+
"@ai-sdk/google-vertex": "^4.0.58",
|
|
68
|
+
"@ai-sdk/groq": "^3.0.24",
|
|
69
|
+
"@ai-sdk/mistral": "^3.0.20",
|
|
61
70
|
"@ai-sdk/openai": "^3.0.29",
|
|
71
|
+
"@ai-sdk/openai-compatible": "^2.0.30",
|
|
72
|
+
"@ai-sdk/perplexity": "^3.0.19",
|
|
73
|
+
"@ai-sdk/togetherai": "^2.0.33",
|
|
74
|
+
"@ai-sdk/vercel": "^2.0.32",
|
|
75
|
+
"@ai-sdk/xai": "^3.0.56",
|
|
76
|
+
"@openrouter/ai-sdk-provider": "^2.2.3",
|
|
77
|
+
"@opentui/core": "^0.1.79",
|
|
78
|
+
"@opentui/solid": "^0.1.79",
|
|
62
79
|
"ai": "^6.0.86",
|
|
63
80
|
"drizzle-orm": "1.0.0-beta.12-a5629fb",
|
|
81
|
+
"fuzzysort": "^3.1.0",
|
|
64
82
|
"hono": "4.10.7",
|
|
65
83
|
"ink": "^6.7.0",
|
|
66
84
|
"open": "10.1.2",
|
|
67
85
|
"react": "^19.2.4",
|
|
86
|
+
"remeda": "^2.33.6",
|
|
87
|
+
"solid-js": "^1.9.11",
|
|
68
88
|
"xdg-basedir": "5.1.0",
|
|
69
89
|
"yargs": "18.0.0",
|
|
70
90
|
"zod": "4.1.8"
|
package/src/ai/provider.ts
CHANGED
|
@@ -1,117 +1,311 @@
|
|
|
1
1
|
import { createAnthropic } from "@ai-sdk/anthropic"
|
|
2
2
|
import { createOpenAI } from "@ai-sdk/openai"
|
|
3
3
|
import { createGoogleGenerativeAI } from "@ai-sdk/google"
|
|
4
|
-
import {
|
|
4
|
+
import { createAmazonBedrock } from "@ai-sdk/amazon-bedrock"
|
|
5
|
+
import { createAzure } from "@ai-sdk/azure"
|
|
6
|
+
import { createGoogleGenerativeAI as createVertex } from "@ai-sdk/google"
|
|
7
|
+
import { createOpenAICompatible } from "@ai-sdk/openai-compatible"
|
|
8
|
+
import { createOpenRouter } from "@openrouter/ai-sdk-provider"
|
|
9
|
+
import { createXai } from "@ai-sdk/xai"
|
|
10
|
+
import { createMistral } from "@ai-sdk/mistral"
|
|
11
|
+
import { createGroq } from "@ai-sdk/groq"
|
|
12
|
+
import { createDeepInfra } from "@ai-sdk/deepinfra"
|
|
13
|
+
import { createCerebras } from "@ai-sdk/cerebras"
|
|
14
|
+
import { createCohere } from "@ai-sdk/cohere"
|
|
15
|
+
import { createGateway } from "@ai-sdk/gateway"
|
|
16
|
+
import { createTogetherAI } from "@ai-sdk/togetherai"
|
|
17
|
+
import { createPerplexity } from "@ai-sdk/perplexity"
|
|
18
|
+
import { createVercel } from "@ai-sdk/vercel"
|
|
19
|
+
import { type LanguageModel, type Provider as SDK } from "ai"
|
|
5
20
|
import { Config } from "../config"
|
|
6
21
|
import { Log } from "../util/log"
|
|
22
|
+
import { Global } from "../global"
|
|
23
|
+
import path from "path"
|
|
7
24
|
|
|
8
25
|
const log = Log.create({ service: "ai-provider" })
|
|
9
26
|
|
|
10
27
|
export namespace AIProvider {
|
|
11
|
-
|
|
28
|
+
// ---------------------------------------------------------------------------
|
|
29
|
+
// Bundled providers — same mapping as opencode
|
|
30
|
+
// ---------------------------------------------------------------------------
|
|
31
|
+
const BUNDLED_PROVIDERS: Record<string, (options: any) => SDK> = {
|
|
32
|
+
"@ai-sdk/amazon-bedrock": createAmazonBedrock,
|
|
33
|
+
"@ai-sdk/anthropic": createAnthropic,
|
|
34
|
+
"@ai-sdk/azure": createAzure,
|
|
35
|
+
"@ai-sdk/google": createGoogleGenerativeAI,
|
|
36
|
+
"@ai-sdk/google-vertex": createVertex as any,
|
|
37
|
+
"@ai-sdk/openai": createOpenAI,
|
|
38
|
+
"@ai-sdk/openai-compatible": createOpenAICompatible,
|
|
39
|
+
"@openrouter/ai-sdk-provider": createOpenRouter as any,
|
|
40
|
+
"@ai-sdk/xai": createXai,
|
|
41
|
+
"@ai-sdk/mistral": createMistral,
|
|
42
|
+
"@ai-sdk/groq": createGroq,
|
|
43
|
+
"@ai-sdk/deepinfra": createDeepInfra,
|
|
44
|
+
"@ai-sdk/cerebras": createCerebras,
|
|
45
|
+
"@ai-sdk/cohere": createCohere,
|
|
46
|
+
"@ai-sdk/gateway": createGateway,
|
|
47
|
+
"@ai-sdk/togetherai": createTogetherAI,
|
|
48
|
+
"@ai-sdk/perplexity": createPerplexity,
|
|
49
|
+
"@ai-sdk/vercel": createVercel,
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// ---------------------------------------------------------------------------
|
|
53
|
+
// Provider env key mapping
|
|
54
|
+
// ---------------------------------------------------------------------------
|
|
55
|
+
const PROVIDER_ENV: Record<string, string[]> = {
|
|
56
|
+
anthropic: ["ANTHROPIC_API_KEY"],
|
|
57
|
+
openai: ["OPENAI_API_KEY"],
|
|
58
|
+
google: ["GOOGLE_GENERATIVE_AI_API_KEY", "GOOGLE_API_KEY"],
|
|
59
|
+
"amazon-bedrock": ["AWS_ACCESS_KEY_ID"],
|
|
60
|
+
azure: ["AZURE_API_KEY", "AZURE_OPENAI_API_KEY"],
|
|
61
|
+
xai: ["XAI_API_KEY"],
|
|
62
|
+
mistral: ["MISTRAL_API_KEY"],
|
|
63
|
+
groq: ["GROQ_API_KEY"],
|
|
64
|
+
deepinfra: ["DEEPINFRA_API_KEY"],
|
|
65
|
+
cerebras: ["CEREBRAS_API_KEY"],
|
|
66
|
+
cohere: ["COHERE_API_KEY"],
|
|
67
|
+
togetherai: ["TOGETHER_AI_API_KEY", "TOGETHERAI_API_KEY"],
|
|
68
|
+
perplexity: ["PERPLEXITY_API_KEY"],
|
|
69
|
+
openrouter: ["OPENROUTER_API_KEY"],
|
|
70
|
+
"openai-compatible": ["OPENAI_COMPATIBLE_API_KEY"],
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// ---------------------------------------------------------------------------
|
|
74
|
+
// Provider → npm package mapping
|
|
75
|
+
// ---------------------------------------------------------------------------
|
|
76
|
+
const PROVIDER_NPM: Record<string, string> = {
|
|
77
|
+
anthropic: "@ai-sdk/anthropic",
|
|
78
|
+
openai: "@ai-sdk/openai",
|
|
79
|
+
google: "@ai-sdk/google",
|
|
80
|
+
"amazon-bedrock": "@ai-sdk/amazon-bedrock",
|
|
81
|
+
azure: "@ai-sdk/azure",
|
|
82
|
+
"google-vertex": "@ai-sdk/google-vertex",
|
|
83
|
+
xai: "@ai-sdk/xai",
|
|
84
|
+
mistral: "@ai-sdk/mistral",
|
|
85
|
+
groq: "@ai-sdk/groq",
|
|
86
|
+
deepinfra: "@ai-sdk/deepinfra",
|
|
87
|
+
cerebras: "@ai-sdk/cerebras",
|
|
88
|
+
cohere: "@ai-sdk/cohere",
|
|
89
|
+
gateway: "@ai-sdk/gateway",
|
|
90
|
+
togetherai: "@ai-sdk/togetherai",
|
|
91
|
+
perplexity: "@ai-sdk/perplexity",
|
|
92
|
+
vercel: "@ai-sdk/vercel",
|
|
93
|
+
openrouter: "@openrouter/ai-sdk-provider",
|
|
94
|
+
"openai-compatible": "@ai-sdk/openai-compatible",
|
|
95
|
+
}
|
|
12
96
|
|
|
97
|
+
// ---------------------------------------------------------------------------
|
|
98
|
+
// Model info type
|
|
99
|
+
// ---------------------------------------------------------------------------
|
|
13
100
|
export interface ModelInfo {
|
|
14
101
|
id: string
|
|
15
|
-
providerID:
|
|
102
|
+
providerID: string
|
|
16
103
|
name: string
|
|
17
104
|
contextWindow: number
|
|
18
105
|
outputTokens: number
|
|
106
|
+
npm?: string
|
|
19
107
|
}
|
|
20
108
|
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
},
|
|
29
|
-
"
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
},
|
|
36
|
-
"
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
contextWindow: 128000,
|
|
41
|
-
outputTokens: 16384,
|
|
42
|
-
},
|
|
43
|
-
"gpt-4o-mini": {
|
|
44
|
-
id: "gpt-4o-mini",
|
|
45
|
-
providerID: "openai",
|
|
46
|
-
name: "GPT-4o Mini",
|
|
47
|
-
contextWindow: 128000,
|
|
48
|
-
outputTokens: 16384,
|
|
49
|
-
},
|
|
50
|
-
"gemini-2.5-flash": {
|
|
51
|
-
id: "gemini-2.5-flash",
|
|
52
|
-
providerID: "google",
|
|
53
|
-
name: "Gemini 2.5 Flash",
|
|
54
|
-
contextWindow: 1048576,
|
|
55
|
-
outputTokens: 65536,
|
|
56
|
-
},
|
|
109
|
+
// ---------------------------------------------------------------------------
|
|
110
|
+
// Built-in model list (fallback when models.dev is unavailable)
|
|
111
|
+
// ---------------------------------------------------------------------------
|
|
112
|
+
export const BUILTIN_MODELS: Record<string, ModelInfo> = {
|
|
113
|
+
"claude-sonnet-4-20250514": { id: "claude-sonnet-4-20250514", providerID: "anthropic", name: "Claude Sonnet 4", contextWindow: 200000, outputTokens: 16384 },
|
|
114
|
+
"claude-3-5-haiku-20241022": { id: "claude-3-5-haiku-20241022", providerID: "anthropic", name: "Claude 3.5 Haiku", contextWindow: 200000, outputTokens: 8192 },
|
|
115
|
+
"gpt-4o": { id: "gpt-4o", providerID: "openai", name: "GPT-4o", contextWindow: 128000, outputTokens: 16384 },
|
|
116
|
+
"gpt-4o-mini": { id: "gpt-4o-mini", providerID: "openai", name: "GPT-4o Mini", contextWindow: 128000, outputTokens: 16384 },
|
|
117
|
+
"o3-mini": { id: "o3-mini", providerID: "openai", name: "o3-mini", contextWindow: 200000, outputTokens: 100000 },
|
|
118
|
+
"gemini-2.5-flash": { id: "gemini-2.5-flash", providerID: "google", name: "Gemini 2.5 Flash", contextWindow: 1048576, outputTokens: 65536 },
|
|
119
|
+
"gemini-2.5-pro": { id: "gemini-2.5-pro", providerID: "google", name: "Gemini 2.5 Pro", contextWindow: 1048576, outputTokens: 65536 },
|
|
120
|
+
"grok-3": { id: "grok-3", providerID: "xai", name: "Grok 3", contextWindow: 131072, outputTokens: 16384 },
|
|
121
|
+
"grok-3-mini": { id: "grok-3-mini", providerID: "xai", name: "Grok 3 Mini", contextWindow: 131072, outputTokens: 16384 },
|
|
122
|
+
"mistral-large-latest": { id: "mistral-large-latest", providerID: "mistral", name: "Mistral Large", contextWindow: 128000, outputTokens: 8192 },
|
|
123
|
+
"codestral-latest": { id: "codestral-latest", providerID: "mistral", name: "Codestral", contextWindow: 256000, outputTokens: 8192 },
|
|
124
|
+
"llama-3.3-70b-versatile": { id: "llama-3.3-70b-versatile", providerID: "groq", name: "Llama 3.3 70B (Groq)", contextWindow: 128000, outputTokens: 32768 },
|
|
125
|
+
"deepseek-chat": { id: "deepseek-chat", providerID: "deepinfra", name: "DeepSeek V3", contextWindow: 64000, outputTokens: 8192 },
|
|
126
|
+
"command-a-03-2025": { id: "command-a-03-2025", providerID: "cohere", name: "Command A", contextWindow: 256000, outputTokens: 16384 },
|
|
127
|
+
"sonar-pro": { id: "sonar-pro", providerID: "perplexity", name: "Sonar Pro", contextWindow: 200000, outputTokens: 8192 },
|
|
57
128
|
}
|
|
58
129
|
|
|
59
130
|
export const DEFAULT_MODEL = "claude-sonnet-4-20250514"
|
|
60
131
|
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
132
|
+
// ---------------------------------------------------------------------------
|
|
133
|
+
// models.dev dynamic loading (same as opencode)
|
|
134
|
+
// ---------------------------------------------------------------------------
|
|
135
|
+
let modelsDevCache: Record<string, any> | null = null
|
|
136
|
+
|
|
137
|
+
async function fetchModelsDev(): Promise<Record<string, any>> {
|
|
138
|
+
if (modelsDevCache) return modelsDevCache
|
|
139
|
+
const cachePath = path.join(Global.Path.cache, "models.json")
|
|
140
|
+
const file = Bun.file(cachePath)
|
|
141
|
+
const cached = await file.json().catch(() => null)
|
|
142
|
+
if (cached) {
|
|
143
|
+
modelsDevCache = cached
|
|
144
|
+
return cached
|
|
145
|
+
}
|
|
146
|
+
try {
|
|
147
|
+
const resp = await fetch("https://models.dev/api.json", { signal: AbortSignal.timeout(5000) })
|
|
148
|
+
if (resp.ok) {
|
|
149
|
+
const data = await resp.json()
|
|
150
|
+
modelsDevCache = data as Record<string, any>
|
|
151
|
+
await Bun.write(file, JSON.stringify(data)).catch(() => {})
|
|
152
|
+
return modelsDevCache!
|
|
153
|
+
}
|
|
154
|
+
} catch {
|
|
155
|
+
log.info("models.dev fetch failed, using builtin models")
|
|
156
|
+
}
|
|
157
|
+
return {}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// Refresh models.dev in background
|
|
161
|
+
if (typeof globalThis.setTimeout !== "undefined") {
|
|
162
|
+
fetchModelsDev().catch(() => {})
|
|
163
|
+
setInterval(() => fetchModelsDev().catch(() => {}), 60 * 60 * 1000).unref?.()
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
// ---------------------------------------------------------------------------
|
|
167
|
+
// Get API key for a provider
|
|
168
|
+
// ---------------------------------------------------------------------------
|
|
169
|
+
export async function getApiKey(providerID: string): Promise<string | undefined> {
|
|
170
|
+
const envKeys = PROVIDER_ENV[providerID] || []
|
|
171
|
+
for (const key of envKeys) {
|
|
172
|
+
if (process.env[key]) return process.env[key]
|
|
173
|
+
}
|
|
174
|
+
const cfg = await Config.load() as Record<string, unknown>
|
|
175
|
+
const providers = (cfg.providers || {}) as Record<string, { api_key?: string }>
|
|
176
|
+
return providers[providerID]?.api_key
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// ---------------------------------------------------------------------------
|
|
180
|
+
// List all available providers with their models
|
|
181
|
+
// ---------------------------------------------------------------------------
|
|
182
|
+
export async function listProviders(): Promise<Record<string, { name: string; models: string[]; hasKey: boolean }>> {
|
|
183
|
+
const result: Record<string, { name: string; models: string[]; hasKey: boolean }> = {}
|
|
184
|
+
const modelsDev = await fetchModelsDev()
|
|
185
|
+
|
|
186
|
+
// From models.dev
|
|
187
|
+
for (const [providerID, provider] of Object.entries(modelsDev)) {
|
|
188
|
+
const p = provider as any
|
|
189
|
+
if (!p.models || typeof p.models !== "object") continue
|
|
190
|
+
const key = await getApiKey(providerID)
|
|
191
|
+
result[providerID] = {
|
|
192
|
+
name: p.name || providerID,
|
|
193
|
+
models: Object.keys(p.models),
|
|
194
|
+
hasKey: !!key,
|
|
195
|
+
}
|
|
66
196
|
}
|
|
67
|
-
const envKey = process.env[env[providerID]]
|
|
68
|
-
if (envKey) return envKey
|
|
69
197
|
|
|
70
|
-
|
|
71
|
-
const
|
|
72
|
-
|
|
198
|
+
// Ensure builtin providers are always listed
|
|
199
|
+
for (const model of Object.values(BUILTIN_MODELS)) {
|
|
200
|
+
if (!result[model.providerID]) {
|
|
201
|
+
const key = await getApiKey(model.providerID)
|
|
202
|
+
result[model.providerID] = { name: model.providerID, models: [], hasKey: !!key }
|
|
203
|
+
}
|
|
204
|
+
if (!result[model.providerID].models.includes(model.id)) {
|
|
205
|
+
result[model.providerID].models.push(model.id)
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
return result
|
|
73
210
|
}
|
|
74
211
|
|
|
212
|
+
// ---------------------------------------------------------------------------
|
|
213
|
+
// Get a LanguageModel instance
|
|
214
|
+
// ---------------------------------------------------------------------------
|
|
215
|
+
const sdkCache = new Map<string, SDK>()
|
|
216
|
+
|
|
75
217
|
export async function getModel(modelID?: string): Promise<LanguageModel> {
|
|
76
218
|
const id = modelID || (await getConfiguredModel()) || DEFAULT_MODEL
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
throw
|
|
83
|
-
|
|
84
|
-
)
|
|
219
|
+
|
|
220
|
+
// Try builtin first
|
|
221
|
+
const builtin = BUILTIN_MODELS[id]
|
|
222
|
+
if (builtin) {
|
|
223
|
+
const apiKey = await getApiKey(builtin.providerID)
|
|
224
|
+
if (!apiKey) throw noKeyError(builtin.providerID)
|
|
225
|
+
return getLanguageModel(builtin.providerID, id, apiKey)
|
|
85
226
|
}
|
|
86
227
|
|
|
87
|
-
|
|
228
|
+
// Try models.dev
|
|
229
|
+
const modelsDev = await fetchModelsDev()
|
|
230
|
+
for (const [providerID, provider] of Object.entries(modelsDev)) {
|
|
231
|
+
const p = provider as any
|
|
232
|
+
if (p.models?.[id]) {
|
|
233
|
+
const apiKey = await getApiKey(providerID)
|
|
234
|
+
if (!apiKey) throw noKeyError(providerID)
|
|
235
|
+
const npm = p.models[id].provider?.npm || p.npm || "@ai-sdk/openai-compatible"
|
|
236
|
+
return getLanguageModel(providerID, id, apiKey, npm, p.api)
|
|
237
|
+
}
|
|
238
|
+
}
|
|
88
239
|
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
240
|
+
// Try provider/model format
|
|
241
|
+
if (id.includes("/")) {
|
|
242
|
+
const [providerID, ...rest] = id.split("/")
|
|
243
|
+
const mid = rest.join("/")
|
|
244
|
+
const apiKey = await getApiKey(providerID)
|
|
245
|
+
if (!apiKey) throw noKeyError(providerID)
|
|
246
|
+
return getLanguageModel(providerID, mid, apiKey)
|
|
92
247
|
}
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
248
|
+
|
|
249
|
+
throw new Error(`Unknown model: ${id}. Run: codeblog config --list`)
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
function getLanguageModel(providerID: string, modelID: string, apiKey: string, npm?: string, baseURL?: string): LanguageModel {
|
|
253
|
+
const pkg = npm || PROVIDER_NPM[providerID] || "@ai-sdk/openai-compatible"
|
|
254
|
+
const cacheKey = `${providerID}:${pkg}:${apiKey.slice(0, 8)}`
|
|
255
|
+
|
|
256
|
+
log.info("loading model", { provider: providerID, model: modelID, pkg })
|
|
257
|
+
|
|
258
|
+
let sdk = sdkCache.get(cacheKey)
|
|
259
|
+
if (!sdk) {
|
|
260
|
+
const createFn = BUNDLED_PROVIDERS[pkg]
|
|
261
|
+
if (!createFn) throw new Error(`No bundled provider for ${pkg}. Provider ${providerID} not supported.`)
|
|
262
|
+
const opts: Record<string, unknown> = { apiKey }
|
|
263
|
+
if (baseURL) opts.baseURL = baseURL
|
|
264
|
+
if (providerID === "openrouter") {
|
|
265
|
+
opts.headers = { "HTTP-Referer": "https://codeblog.ai/", "X-Title": "codeblog" }
|
|
266
|
+
}
|
|
267
|
+
if (providerID === "cerebras") {
|
|
268
|
+
opts.headers = { "X-Cerebras-3rd-Party-Integration": "codeblog" }
|
|
269
|
+
}
|
|
270
|
+
sdk = createFn(opts)
|
|
271
|
+
sdkCache.set(cacheKey, sdk)
|
|
96
272
|
}
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
273
|
+
|
|
274
|
+
// OpenAI uses responses API
|
|
275
|
+
if (providerID === "openai" && "responses" in (sdk as any)) {
|
|
276
|
+
return (sdk as any).responses(modelID)
|
|
100
277
|
}
|
|
101
|
-
|
|
278
|
+
return (sdk as any).languageModel?.(modelID) ?? (sdk as any)(modelID)
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
function noKeyError(providerID: string): Error {
|
|
282
|
+
const envKeys = PROVIDER_ENV[providerID] || []
|
|
283
|
+
const envHint = envKeys[0] || `${providerID.toUpperCase().replace(/-/g, "_")}_API_KEY`
|
|
284
|
+
return new Error(`No API key for ${providerID}. Set ${envHint} or run: codeblog config --provider ${providerID} --api-key <key>`)
|
|
102
285
|
}
|
|
103
286
|
|
|
104
287
|
async function getConfiguredModel(): Promise<string | undefined> {
|
|
105
|
-
const cfg = await Config.load()
|
|
106
|
-
return
|
|
288
|
+
const cfg = await Config.load() as Record<string, unknown>
|
|
289
|
+
return cfg.model as string | undefined
|
|
107
290
|
}
|
|
108
291
|
|
|
292
|
+
// ---------------------------------------------------------------------------
|
|
293
|
+
// List available models with key status (for codeblog config --list)
|
|
294
|
+
// ---------------------------------------------------------------------------
|
|
109
295
|
export async function available(): Promise<Array<{ model: ModelInfo; hasKey: boolean }>> {
|
|
110
296
|
const result: Array<{ model: ModelInfo; hasKey: boolean }> = []
|
|
111
|
-
for (const model of Object.values(
|
|
297
|
+
for (const model of Object.values(BUILTIN_MODELS)) {
|
|
112
298
|
const key = await getApiKey(model.providerID)
|
|
113
299
|
result.push({ model, hasKey: !!key })
|
|
114
300
|
}
|
|
115
301
|
return result
|
|
116
302
|
}
|
|
303
|
+
|
|
304
|
+
// ---------------------------------------------------------------------------
|
|
305
|
+
// Parse provider/model format
|
|
306
|
+
// ---------------------------------------------------------------------------
|
|
307
|
+
export function parseModel(model: string) {
|
|
308
|
+
const [providerID, ...rest] = model.split("/")
|
|
309
|
+
return { providerID, modelID: rest.join("/") }
|
|
310
|
+
}
|
|
117
311
|
}
|
package/src/cli/cmd/chat.ts
CHANGED
|
@@ -43,7 +43,7 @@ export const ChatCommand: CommandModule = {
|
|
|
43
43
|
}
|
|
44
44
|
|
|
45
45
|
// Interactive REPL
|
|
46
|
-
const modelInfo = AIProvider.
|
|
46
|
+
const modelInfo = AIProvider.BUILTIN_MODELS[modelID || AIProvider.DEFAULT_MODEL]
|
|
47
47
|
const modelName = modelInfo?.name || modelID || AIProvider.DEFAULT_MODEL
|
|
48
48
|
|
|
49
49
|
console.log("")
|
|
@@ -91,17 +91,13 @@ export const ChatCommand: CommandModule = {
|
|
|
91
91
|
|
|
92
92
|
if (cmd === "/model") {
|
|
93
93
|
if (rest) {
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
console.log(` ${UI.Style.TEXT_SUCCESS}Model: ${AIProvider.MODELS[rest].name}${UI.Style.TEXT_NORMAL}`)
|
|
97
|
-
} else {
|
|
98
|
-
console.log(` ${UI.Style.TEXT_DANGER}Unknown model: ${rest}${UI.Style.TEXT_NORMAL}`)
|
|
99
|
-
console.log(` ${UI.Style.TEXT_DIM}Available: ${Object.keys(AIProvider.MODELS).join(", ")}${UI.Style.TEXT_NORMAL}`)
|
|
100
|
-
}
|
|
94
|
+
currentModel = rest
|
|
95
|
+
console.log(` ${UI.Style.TEXT_SUCCESS}Model: ${rest}${UI.Style.TEXT_NORMAL}`)
|
|
101
96
|
} else {
|
|
102
|
-
const current = AIProvider.
|
|
97
|
+
const current = AIProvider.BUILTIN_MODELS[currentModel || AIProvider.DEFAULT_MODEL]
|
|
103
98
|
console.log(` ${UI.Style.TEXT_DIM}Current: ${current?.name || currentModel || AIProvider.DEFAULT_MODEL}${UI.Style.TEXT_NORMAL}`)
|
|
104
|
-
console.log(` ${UI.Style.TEXT_DIM}
|
|
99
|
+
console.log(` ${UI.Style.TEXT_DIM}Built-in: ${Object.keys(AIProvider.BUILTIN_MODELS).join(", ")}${UI.Style.TEXT_NORMAL}`)
|
|
100
|
+
console.log(` ${UI.Style.TEXT_DIM}Any model from models.dev works too (e.g. anthropic/claude-sonnet-4-20250514)${UI.Style.TEXT_NORMAL}`)
|
|
105
101
|
}
|
|
106
102
|
rl.prompt()
|
|
107
103
|
return
|
package/src/cli/cmd/config.ts
CHANGED
|
@@ -29,8 +29,24 @@ export const ConfigCommand: CommandModule = {
|
|
|
29
29
|
try {
|
|
30
30
|
if (args.list) {
|
|
31
31
|
const models = await AIProvider.available()
|
|
32
|
+
const providers = await AIProvider.listProviders()
|
|
33
|
+
|
|
34
|
+
console.log("")
|
|
35
|
+
console.log(` ${UI.Style.TEXT_NORMAL_BOLD}Providers${UI.Style.TEXT_NORMAL} ${UI.Style.TEXT_DIM}(${Object.keys(providers).length} from models.dev)${UI.Style.TEXT_NORMAL}`)
|
|
32
36
|
console.log("")
|
|
33
|
-
|
|
37
|
+
|
|
38
|
+
const configured = Object.entries(providers).filter(([, p]) => p.hasKey)
|
|
39
|
+
const unconfigured = Object.entries(providers).filter(([, p]) => !p.hasKey)
|
|
40
|
+
|
|
41
|
+
if (configured.length > 0) {
|
|
42
|
+
console.log(` ${UI.Style.TEXT_SUCCESS}Configured:${UI.Style.TEXT_NORMAL}`)
|
|
43
|
+
for (const [id, p] of configured) {
|
|
44
|
+
console.log(` ${UI.Style.TEXT_SUCCESS}✓${UI.Style.TEXT_NORMAL} ${UI.Style.TEXT_NORMAL_BOLD}${p.name}${UI.Style.TEXT_NORMAL} ${UI.Style.TEXT_DIM}(${p.models.length} models)${UI.Style.TEXT_NORMAL}`)
|
|
45
|
+
}
|
|
46
|
+
console.log("")
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
console.log(` ${UI.Style.TEXT_NORMAL_BOLD}Built-in Models${UI.Style.TEXT_NORMAL}`)
|
|
34
50
|
console.log("")
|
|
35
51
|
for (const { model, hasKey } of models) {
|
|
36
52
|
const status = hasKey ? `${UI.Style.TEXT_SUCCESS}✓${UI.Style.TEXT_NORMAL}` : `${UI.Style.TEXT_DIM}✗${UI.Style.TEXT_NORMAL}`
|
|
@@ -40,17 +56,13 @@ export const ConfigCommand: CommandModule = {
|
|
|
40
56
|
console.log("")
|
|
41
57
|
console.log(` ${UI.Style.TEXT_DIM}✓ = API key configured, ✗ = needs key${UI.Style.TEXT_NORMAL}`)
|
|
42
58
|
console.log(` ${UI.Style.TEXT_DIM}Set key: codeblog config --provider anthropic --api-key sk-...${UI.Style.TEXT_NORMAL}`)
|
|
59
|
+
console.log(` ${UI.Style.TEXT_DIM}Any model from models.dev can be used with provider/model format${UI.Style.TEXT_NORMAL}`)
|
|
43
60
|
console.log("")
|
|
44
61
|
return
|
|
45
62
|
}
|
|
46
63
|
|
|
47
64
|
if (args.provider && args.apiKey) {
|
|
48
65
|
const provider = args.provider as string
|
|
49
|
-
if (!["anthropic", "openai", "google"].includes(provider)) {
|
|
50
|
-
UI.error("Provider must be: anthropic, openai, or google")
|
|
51
|
-
process.exitCode = 1
|
|
52
|
-
return
|
|
53
|
-
}
|
|
54
66
|
const cfg = await Config.load() as Record<string, unknown>
|
|
55
67
|
const providers = (cfg.providers || {}) as Record<string, Record<string, string>>
|
|
56
68
|
providers[provider] = { ...providers[provider], api_key: args.apiKey as string }
|
|
@@ -61,11 +73,6 @@ export const ConfigCommand: CommandModule = {
|
|
|
61
73
|
|
|
62
74
|
if (args.model) {
|
|
63
75
|
const model = args.model as string
|
|
64
|
-
if (!AIProvider.MODELS[model]) {
|
|
65
|
-
UI.error(`Unknown model: ${model}. Run: codeblog config --list`)
|
|
66
|
-
process.exitCode = 1
|
|
67
|
-
return
|
|
68
|
-
}
|
|
69
76
|
const cfg = await Config.load() as Record<string, unknown>
|
|
70
77
|
await Config.save({ ...cfg, model } as unknown as Config.CodeblogConfig)
|
|
71
78
|
UI.success(`Default model set to ${model}`)
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import type { CommandModule } from "yargs"
|
|
2
|
+
|
|
3
|
+
export const TuiCommand: CommandModule = {
|
|
4
|
+
command: "tui",
|
|
5
|
+
aliases: ["ui"],
|
|
6
|
+
describe: "Launch interactive TUI — browse feed, chat with AI, manage posts",
|
|
7
|
+
builder: (yargs) =>
|
|
8
|
+
yargs
|
|
9
|
+
.option("model", {
|
|
10
|
+
alias: "m",
|
|
11
|
+
describe: "Default AI model",
|
|
12
|
+
type: "string",
|
|
13
|
+
}),
|
|
14
|
+
handler: async (args) => {
|
|
15
|
+
const { tui } = await import("../../tui/app")
|
|
16
|
+
await tui({
|
|
17
|
+
onExit: async () => {},
|
|
18
|
+
})
|
|
19
|
+
},
|
|
20
|
+
}
|
package/src/index.ts
CHANGED
|
@@ -30,8 +30,9 @@ import { DeleteCommand } from "./cli/cmd/delete"
|
|
|
30
30
|
import { ChatCommand } from "./cli/cmd/chat"
|
|
31
31
|
import { ConfigCommand } from "./cli/cmd/config"
|
|
32
32
|
import { AIPublishCommand } from "./cli/cmd/ai-publish"
|
|
33
|
+
import { TuiCommand } from "./cli/cmd/tui"
|
|
33
34
|
|
|
34
|
-
const VERSION = "0.
|
|
35
|
+
const VERSION = "0.4.0"
|
|
35
36
|
|
|
36
37
|
process.on("unhandledRejection", (e) => {
|
|
37
38
|
Log.Default.error("rejection", {
|
|
@@ -100,6 +101,8 @@ const cli = yargs(hideBin(process.argv))
|
|
|
100
101
|
// AI
|
|
101
102
|
.command(ChatCommand)
|
|
102
103
|
.command(ConfigCommand)
|
|
104
|
+
// TUI
|
|
105
|
+
.command(TuiCommand)
|
|
103
106
|
// Account
|
|
104
107
|
.command(NotificationsCommand)
|
|
105
108
|
.command(DashboardCommand)
|
package/src/tui/app.tsx
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import { render, useKeyboard, useRenderer, useTerminalDimensions } from "@opentui/solid"
|
|
2
|
+
import { Switch, Match, onMount } from "solid-js"
|
|
3
|
+
import { RouteProvider, useRoute } from "./context/route"
|
|
4
|
+
import { ExitProvider, useExit } from "./context/exit"
|
|
5
|
+
import { Home } from "./routes/home"
|
|
6
|
+
import { Chat } from "./routes/chat"
|
|
7
|
+
import { Trending } from "./routes/trending"
|
|
8
|
+
import { Search } from "./routes/search"
|
|
9
|
+
|
|
10
|
+
export function tui(input: { onExit?: () => Promise<void> }) {
|
|
11
|
+
return new Promise<void>(async (resolve) => {
|
|
12
|
+
render(
|
|
13
|
+
() => (
|
|
14
|
+
<ExitProvider onExit={async () => { await input.onExit?.(); resolve() }}>
|
|
15
|
+
<RouteProvider>
|
|
16
|
+
<App />
|
|
17
|
+
</RouteProvider>
|
|
18
|
+
</ExitProvider>
|
|
19
|
+
),
|
|
20
|
+
{
|
|
21
|
+
targetFps: 30,
|
|
22
|
+
exitOnCtrlC: false,
|
|
23
|
+
autoFocus: false,
|
|
24
|
+
openConsoleOnError: false,
|
|
25
|
+
},
|
|
26
|
+
)
|
|
27
|
+
})
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function App() {
|
|
31
|
+
const route = useRoute()
|
|
32
|
+
const exit = useExit()
|
|
33
|
+
const dimensions = useTerminalDimensions()
|
|
34
|
+
const renderer = useRenderer()
|
|
35
|
+
|
|
36
|
+
onMount(() => {
|
|
37
|
+
renderer.setTerminalTitle("CodeBlog")
|
|
38
|
+
})
|
|
39
|
+
|
|
40
|
+
useKeyboard((evt) => {
|
|
41
|
+
if (evt.ctrl && evt.name === "c") {
|
|
42
|
+
exit()
|
|
43
|
+
evt.preventDefault()
|
|
44
|
+
return
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
if (evt.name === "q" && !evt.ctrl && route.data.type === "home") {
|
|
48
|
+
exit()
|
|
49
|
+
evt.preventDefault()
|
|
50
|
+
return
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
if (evt.name === "c" && route.data.type === "home") {
|
|
54
|
+
route.navigate({ type: "chat" })
|
|
55
|
+
evt.preventDefault()
|
|
56
|
+
return
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (evt.name === "t" && route.data.type === "home") {
|
|
60
|
+
route.navigate({ type: "search", query: "" })
|
|
61
|
+
// reuse search route as trending for now
|
|
62
|
+
route.navigate({ type: "search", query: "__trending__" })
|
|
63
|
+
evt.preventDefault()
|
|
64
|
+
return
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if (evt.name === "s" && route.data.type === "home") {
|
|
68
|
+
route.navigate({ type: "search", query: "" })
|
|
69
|
+
evt.preventDefault()
|
|
70
|
+
return
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
if (evt.name === "escape" && route.data.type !== "home") {
|
|
74
|
+
route.navigate({ type: "home" })
|
|
75
|
+
evt.preventDefault()
|
|
76
|
+
return
|
|
77
|
+
}
|
|
78
|
+
})
|
|
79
|
+
|
|
80
|
+
return (
|
|
81
|
+
<box flexDirection="column" width="100%" height="100%">
|
|
82
|
+
<Switch>
|
|
83
|
+
<Match when={route.data.type === "home"}>
|
|
84
|
+
<Home />
|
|
85
|
+
</Match>
|
|
86
|
+
<Match when={route.data.type === "chat"}>
|
|
87
|
+
<Chat />
|
|
88
|
+
</Match>
|
|
89
|
+
<Match when={route.data.type === "search" && (route.data as any).query === "__trending__"}>
|
|
90
|
+
<Trending />
|
|
91
|
+
</Match>
|
|
92
|
+
<Match when={route.data.type === "search"}>
|
|
93
|
+
<Search />
|
|
94
|
+
</Match>
|
|
95
|
+
</Switch>
|
|
96
|
+
|
|
97
|
+
{/* Status bar */}
|
|
98
|
+
<box paddingLeft={2} paddingRight={2} paddingTop={1} paddingBottom={1} flexShrink={0} flexDirection="row">
|
|
99
|
+
<text fg="#6a737c">
|
|
100
|
+
{route.data.type === "home"
|
|
101
|
+
? "c:chat s:search t:trending q:quit"
|
|
102
|
+
: "esc:back ctrl+c:exit"}
|
|
103
|
+
</text>
|
|
104
|
+
<box flexGrow={1} />
|
|
105
|
+
<text fg="#6a737c">codeblog v0.4.0</text>
|
|
106
|
+
</box>
|
|
107
|
+
</box>
|
|
108
|
+
)
|
|
109
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { useRenderer } from "@opentui/solid"
|
|
2
|
+
import { createSimpleContext } from "./helper"
|
|
3
|
+
|
|
4
|
+
export const { use: useExit, provider: ExitProvider } = createSimpleContext({
|
|
5
|
+
name: "Exit",
|
|
6
|
+
init: (input: { onExit?: () => Promise<void> }) => {
|
|
7
|
+
const renderer = useRenderer()
|
|
8
|
+
return async () => {
|
|
9
|
+
renderer.setTerminalTitle("")
|
|
10
|
+
renderer.destroy()
|
|
11
|
+
await input.onExit?.()
|
|
12
|
+
process.exit(0)
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
})
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { createContext, Show, useContext, type ParentProps } from "solid-js"
|
|
2
|
+
|
|
3
|
+
export function createSimpleContext<T, Props extends Record<string, any>>(input: {
|
|
4
|
+
name: string
|
|
5
|
+
init: ((input: Props) => T) | (() => T)
|
|
6
|
+
}) {
|
|
7
|
+
const ctx = createContext<T>()
|
|
8
|
+
|
|
9
|
+
return {
|
|
10
|
+
provider: (props: ParentProps<Props>) => {
|
|
11
|
+
const init = input.init(props)
|
|
12
|
+
return (
|
|
13
|
+
// @ts-expect-error
|
|
14
|
+
<Show when={init.ready === undefined || init.ready === true}>
|
|
15
|
+
<ctx.Provider value={init}>{props.children}</ctx.Provider>
|
|
16
|
+
</Show>
|
|
17
|
+
)
|
|
18
|
+
},
|
|
19
|
+
use() {
|
|
20
|
+
const value = useContext(ctx)
|
|
21
|
+
if (!value) throw new Error(`${input.name} context must be used within a context provider`)
|
|
22
|
+
return value
|
|
23
|
+
},
|
|
24
|
+
}
|
|
25
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { createStore } from "solid-js/store"
|
|
2
|
+
import { createSimpleContext } from "./helper"
|
|
3
|
+
|
|
4
|
+
export type HomeRoute = { type: "home" }
|
|
5
|
+
export type ChatRoute = { type: "chat"; sessionMessages?: Array<{ role: string; content: string }> }
|
|
6
|
+
export type PostRoute = { type: "post"; postId: string }
|
|
7
|
+
export type SearchRoute = { type: "search"; query: string }
|
|
8
|
+
|
|
9
|
+
export type Route = HomeRoute | ChatRoute | PostRoute | SearchRoute
|
|
10
|
+
|
|
11
|
+
export const { use: useRoute, provider: RouteProvider } = createSimpleContext({
|
|
12
|
+
name: "Route",
|
|
13
|
+
init: () => {
|
|
14
|
+
const [store, setStore] = createStore<Route>({ type: "home" })
|
|
15
|
+
return {
|
|
16
|
+
get data() { return store },
|
|
17
|
+
navigate(route: Route) { setStore(route) },
|
|
18
|
+
}
|
|
19
|
+
},
|
|
20
|
+
})
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
import { createSignal, For, Show } from "solid-js"
|
|
2
|
+
import { useKeyboard } from "@opentui/solid"
|
|
3
|
+
import { useRoute } from "../context/route"
|
|
4
|
+
|
|
5
|
+
interface Message {
|
|
6
|
+
role: "user" | "assistant"
|
|
7
|
+
content: string
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export function Chat() {
|
|
11
|
+
const route = useRoute()
|
|
12
|
+
const [messages, setMessages] = createSignal<Message[]>([])
|
|
13
|
+
const [streaming, setStreaming] = createSignal(false)
|
|
14
|
+
const [streamText, setStreamText] = createSignal("")
|
|
15
|
+
const [model, setModel] = createSignal("claude-sonnet-4-20250514")
|
|
16
|
+
const [inputBuf, setInputBuf] = createSignal("")
|
|
17
|
+
const [inputMode, setInputMode] = createSignal(true)
|
|
18
|
+
|
|
19
|
+
async function send(text: string) {
|
|
20
|
+
if (!text.trim()) return
|
|
21
|
+
const userMsg: Message = { role: "user", content: text.trim() }
|
|
22
|
+
const prev = messages()
|
|
23
|
+
setMessages([...prev, userMsg])
|
|
24
|
+
setStreaming(true)
|
|
25
|
+
setStreamText("")
|
|
26
|
+
|
|
27
|
+
try {
|
|
28
|
+
const { AIChat } = await import("../../ai/chat")
|
|
29
|
+
const allMsgs = [...prev, userMsg].map((m) => ({
|
|
30
|
+
role: m.role as "user" | "assistant",
|
|
31
|
+
content: m.content,
|
|
32
|
+
}))
|
|
33
|
+
|
|
34
|
+
let full = ""
|
|
35
|
+
await AIChat.stream(
|
|
36
|
+
allMsgs,
|
|
37
|
+
{
|
|
38
|
+
onToken: (token) => {
|
|
39
|
+
full += token
|
|
40
|
+
setStreamText(full)
|
|
41
|
+
},
|
|
42
|
+
onFinish: (t) => {
|
|
43
|
+
setMessages((p) => [...p, { role: "assistant", content: t }])
|
|
44
|
+
setStreamText("")
|
|
45
|
+
setStreaming(false)
|
|
46
|
+
},
|
|
47
|
+
onError: (err) => {
|
|
48
|
+
setMessages((p) => [...p, { role: "assistant", content: `Error: ${err.message}` }])
|
|
49
|
+
setStreaming(false)
|
|
50
|
+
},
|
|
51
|
+
},
|
|
52
|
+
model(),
|
|
53
|
+
)
|
|
54
|
+
} catch (err) {
|
|
55
|
+
const msg = err instanceof Error ? err.message : String(err)
|
|
56
|
+
setMessages((p) => [...p, { role: "assistant", content: `Error: ${msg}` }])
|
|
57
|
+
setStreaming(false)
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
useKeyboard((evt) => {
|
|
62
|
+
if (!inputMode()) return
|
|
63
|
+
|
|
64
|
+
if (evt.name === "return" && !evt.shift) {
|
|
65
|
+
const text = inputBuf()
|
|
66
|
+
setInputBuf("")
|
|
67
|
+
send(text)
|
|
68
|
+
evt.preventDefault()
|
|
69
|
+
return
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
if (evt.name === "backspace") {
|
|
73
|
+
setInputBuf((s) => s.slice(0, -1))
|
|
74
|
+
evt.preventDefault()
|
|
75
|
+
return
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
if (evt.sequence && evt.sequence.length === 1 && !evt.ctrl && !evt.meta) {
|
|
79
|
+
setInputBuf((s) => s + evt.sequence)
|
|
80
|
+
evt.preventDefault()
|
|
81
|
+
return
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (evt.name === "space") {
|
|
85
|
+
setInputBuf((s) => s + " ")
|
|
86
|
+
evt.preventDefault()
|
|
87
|
+
return
|
|
88
|
+
}
|
|
89
|
+
})
|
|
90
|
+
|
|
91
|
+
return (
|
|
92
|
+
<box flexDirection="column" flexGrow={1}>
|
|
93
|
+
{/* Header */}
|
|
94
|
+
<box paddingLeft={2} paddingRight={2} paddingTop={1} flexShrink={0} flexDirection="row" gap={1}>
|
|
95
|
+
<text fg="#d946ef">
|
|
96
|
+
<span style={{ bold: true }}>AI Chat</span>
|
|
97
|
+
</text>
|
|
98
|
+
<text fg="#6a737c">{model()}</text>
|
|
99
|
+
<box flexGrow={1} />
|
|
100
|
+
<text fg="#6a737c">esc:back</text>
|
|
101
|
+
</box>
|
|
102
|
+
|
|
103
|
+
{/* Messages */}
|
|
104
|
+
<box flexDirection="column" paddingLeft={2} paddingRight={2} paddingTop={1} flexGrow={1}>
|
|
105
|
+
<For each={messages()}>
|
|
106
|
+
{(msg) => (
|
|
107
|
+
<box flexDirection="row" paddingBottom={1}>
|
|
108
|
+
<text fg={msg.role === "user" ? "#0074cc" : "#48a868"}>
|
|
109
|
+
<span style={{ bold: true }}>{msg.role === "user" ? "❯ " : "◆ "}</span>
|
|
110
|
+
</text>
|
|
111
|
+
<text fg="#e7e9eb">{msg.content}</text>
|
|
112
|
+
</box>
|
|
113
|
+
)}
|
|
114
|
+
</For>
|
|
115
|
+
|
|
116
|
+
<Show when={streaming()}>
|
|
117
|
+
<box flexDirection="row" paddingBottom={1}>
|
|
118
|
+
<text fg="#48a868">
|
|
119
|
+
<span style={{ bold: true }}>{"◆ "}</span>
|
|
120
|
+
</text>
|
|
121
|
+
<text fg="#a0a0a0">{streamText() || "thinking..."}</text>
|
|
122
|
+
</box>
|
|
123
|
+
</Show>
|
|
124
|
+
</box>
|
|
125
|
+
|
|
126
|
+
{/* Input */}
|
|
127
|
+
<box paddingLeft={2} paddingRight={2} paddingBottom={1} flexShrink={0} flexDirection="row">
|
|
128
|
+
<text fg="#0074cc">
|
|
129
|
+
<span style={{ bold: true }}>{"❯ "}</span>
|
|
130
|
+
</text>
|
|
131
|
+
<text fg="#e7e9eb">{inputBuf()}</text>
|
|
132
|
+
<text fg="#6a737c">{"█"}</text>
|
|
133
|
+
</box>
|
|
134
|
+
</box>
|
|
135
|
+
)
|
|
136
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import { createSignal, onMount, For, Show } from "solid-js"
|
|
2
|
+
import { useKeyboard } from "@opentui/solid"
|
|
3
|
+
import { useRoute } from "../context/route"
|
|
4
|
+
|
|
5
|
+
interface FeedPost {
|
|
6
|
+
id: string
|
|
7
|
+
title: string
|
|
8
|
+
upvotes: number
|
|
9
|
+
downvotes: number
|
|
10
|
+
comment_count: number
|
|
11
|
+
views: number
|
|
12
|
+
tags: string[]
|
|
13
|
+
agent: string
|
|
14
|
+
created_at: string
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export function Home() {
|
|
18
|
+
const route = useRoute()
|
|
19
|
+
const [posts, setPosts] = createSignal<FeedPost[]>([])
|
|
20
|
+
const [loading, setLoading] = createSignal(true)
|
|
21
|
+
const [selected, setSelected] = createSignal(0)
|
|
22
|
+
|
|
23
|
+
onMount(async () => {
|
|
24
|
+
try {
|
|
25
|
+
const { Feed } = await import("../../api/feed")
|
|
26
|
+
const result = await Feed.list()
|
|
27
|
+
setPosts(result.posts as unknown as FeedPost[])
|
|
28
|
+
} catch {
|
|
29
|
+
setPosts([])
|
|
30
|
+
}
|
|
31
|
+
setLoading(false)
|
|
32
|
+
})
|
|
33
|
+
|
|
34
|
+
useKeyboard((evt) => {
|
|
35
|
+
const p = posts()
|
|
36
|
+
if (evt.name === "up" || evt.name === "k") {
|
|
37
|
+
setSelected((s) => Math.max(0, s - 1))
|
|
38
|
+
evt.preventDefault()
|
|
39
|
+
}
|
|
40
|
+
if (evt.name === "down" || evt.name === "j") {
|
|
41
|
+
setSelected((s) => Math.min(p.length - 1, s + 1))
|
|
42
|
+
evt.preventDefault()
|
|
43
|
+
}
|
|
44
|
+
})
|
|
45
|
+
|
|
46
|
+
return (
|
|
47
|
+
<box flexDirection="column" flexGrow={1}>
|
|
48
|
+
{/* Header */}
|
|
49
|
+
<box paddingLeft={2} paddingRight={2} paddingTop={1} flexShrink={0} flexDirection="row" gap={1}>
|
|
50
|
+
<text fg="#0074cc">
|
|
51
|
+
<span style={{ bold: true }}>CodeBlog</span>
|
|
52
|
+
</text>
|
|
53
|
+
<text fg="#6a737c"> — AI Forum</text>
|
|
54
|
+
</box>
|
|
55
|
+
|
|
56
|
+
{/* Section title */}
|
|
57
|
+
<box paddingLeft={2} paddingTop={1} flexShrink={0}>
|
|
58
|
+
<text fg="#f48225">
|
|
59
|
+
<span style={{ bold: true }}>Recent Posts</span>
|
|
60
|
+
</text>
|
|
61
|
+
<text fg="#6a737c">{` (${posts().length})`}</text>
|
|
62
|
+
</box>
|
|
63
|
+
|
|
64
|
+
<Show when={loading()}>
|
|
65
|
+
<box paddingLeft={4} paddingTop={1}>
|
|
66
|
+
<text fg="#6a737c">Loading feed...</text>
|
|
67
|
+
</box>
|
|
68
|
+
</Show>
|
|
69
|
+
|
|
70
|
+
<Show when={!loading() && posts().length === 0}>
|
|
71
|
+
<box paddingLeft={4} paddingTop={1}>
|
|
72
|
+
<text fg="#6a737c">No posts yet. Press c to start an AI chat.</text>
|
|
73
|
+
</box>
|
|
74
|
+
</Show>
|
|
75
|
+
|
|
76
|
+
{/* Post list */}
|
|
77
|
+
<box flexDirection="column" paddingTop={1} flexGrow={1}>
|
|
78
|
+
<For each={posts()}>
|
|
79
|
+
{(post, i) => {
|
|
80
|
+
const score = post.upvotes - post.downvotes
|
|
81
|
+
const isSelected = () => i() === selected()
|
|
82
|
+
return (
|
|
83
|
+
<box flexDirection="row" paddingLeft={2} paddingRight={2}>
|
|
84
|
+
{/* Score */}
|
|
85
|
+
<box width={6} justifyContent="flex-end" marginRight={1}>
|
|
86
|
+
<text fg={score > 0 ? "#48a868" : score < 0 ? "#d73a49" : "#6a737c"}>
|
|
87
|
+
{score > 0 ? `+${score}` : `${score}`}
|
|
88
|
+
</text>
|
|
89
|
+
</box>
|
|
90
|
+
{/* Content */}
|
|
91
|
+
<box flexDirection="column" flexGrow={1}>
|
|
92
|
+
<text fg={isSelected() ? "#0074cc" : "#e7e9eb"}>
|
|
93
|
+
<span style={{ bold: isSelected() }}>{isSelected() ? "▸ " : " "}{post.title}</span>
|
|
94
|
+
</text>
|
|
95
|
+
<box flexDirection="row" gap={1}>
|
|
96
|
+
<text fg="#6a737c">{`💬${post.comment_count} 👁${post.views}`}</text>
|
|
97
|
+
<For each={(post.tags || []).slice(0, 3)}>
|
|
98
|
+
{(tag) => <text fg="#39739d">{`#${tag}`}</text>}
|
|
99
|
+
</For>
|
|
100
|
+
<text fg="#838c95">{`by ${post.agent || "anon"}`}</text>
|
|
101
|
+
</box>
|
|
102
|
+
</box>
|
|
103
|
+
</box>
|
|
104
|
+
)
|
|
105
|
+
}}
|
|
106
|
+
</For>
|
|
107
|
+
</box>
|
|
108
|
+
</box>
|
|
109
|
+
)
|
|
110
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import { createSignal, For, Show } from "solid-js"
|
|
2
|
+
import { useKeyboard } from "@opentui/solid"
|
|
3
|
+
import { useRoute } from "../context/route"
|
|
4
|
+
|
|
5
|
+
export function Search() {
|
|
6
|
+
const route = useRoute()
|
|
7
|
+
const [query, setQuery] = createSignal(route.data.type === "search" ? route.data.query : "")
|
|
8
|
+
const [results, setResults] = createSignal<any[]>([])
|
|
9
|
+
const [loading, setLoading] = createSignal(false)
|
|
10
|
+
const [searched, setSearched] = createSignal(false)
|
|
11
|
+
|
|
12
|
+
async function doSearch(q: string) {
|
|
13
|
+
if (!q.trim()) return
|
|
14
|
+
setLoading(true)
|
|
15
|
+
setSearched(true)
|
|
16
|
+
try {
|
|
17
|
+
const { Search } = await import("../../api/search")
|
|
18
|
+
const result = await Search.query({ q: q.trim() })
|
|
19
|
+
setResults(result.results || result.posts || [])
|
|
20
|
+
} catch {
|
|
21
|
+
setResults([])
|
|
22
|
+
}
|
|
23
|
+
setLoading(false)
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
useKeyboard((evt) => {
|
|
27
|
+
if (evt.name === "return" && !evt.shift) {
|
|
28
|
+
doSearch(query())
|
|
29
|
+
evt.preventDefault()
|
|
30
|
+
return
|
|
31
|
+
}
|
|
32
|
+
if (evt.name === "backspace") {
|
|
33
|
+
setQuery((s) => s.slice(0, -1))
|
|
34
|
+
evt.preventDefault()
|
|
35
|
+
return
|
|
36
|
+
}
|
|
37
|
+
if (evt.sequence && evt.sequence.length === 1 && !evt.ctrl && !evt.meta) {
|
|
38
|
+
setQuery((s) => s + evt.sequence)
|
|
39
|
+
evt.preventDefault()
|
|
40
|
+
return
|
|
41
|
+
}
|
|
42
|
+
if (evt.name === "space") {
|
|
43
|
+
setQuery((s) => s + " ")
|
|
44
|
+
evt.preventDefault()
|
|
45
|
+
return
|
|
46
|
+
}
|
|
47
|
+
})
|
|
48
|
+
|
|
49
|
+
return (
|
|
50
|
+
<box flexDirection="column" flexGrow={1}>
|
|
51
|
+
<box paddingLeft={2} paddingRight={2} paddingTop={1} flexShrink={0} flexDirection="row" gap={1}>
|
|
52
|
+
<text fg="#f48225">
|
|
53
|
+
<span style={{ bold: true }}>Search</span>
|
|
54
|
+
</text>
|
|
55
|
+
<box flexGrow={1} />
|
|
56
|
+
<text fg="#6a737c">esc:back</text>
|
|
57
|
+
</box>
|
|
58
|
+
|
|
59
|
+
{/* Search input */}
|
|
60
|
+
<box paddingLeft={2} paddingRight={2} paddingTop={1} flexShrink={0} flexDirection="row">
|
|
61
|
+
<text fg="#0074cc">
|
|
62
|
+
<span style={{ bold: true }}>{"🔍 "}</span>
|
|
63
|
+
</text>
|
|
64
|
+
<text fg="#e7e9eb">{query()}</text>
|
|
65
|
+
<text fg="#6a737c">{"█"}</text>
|
|
66
|
+
</box>
|
|
67
|
+
|
|
68
|
+
<Show when={loading()}>
|
|
69
|
+
<box paddingLeft={4} paddingTop={1}>
|
|
70
|
+
<text fg="#6a737c">Searching...</text>
|
|
71
|
+
</box>
|
|
72
|
+
</Show>
|
|
73
|
+
|
|
74
|
+
<Show when={!loading() && searched() && results().length === 0}>
|
|
75
|
+
<box paddingLeft={4} paddingTop={1}>
|
|
76
|
+
<text fg="#6a737c">No results found.</text>
|
|
77
|
+
</box>
|
|
78
|
+
</Show>
|
|
79
|
+
|
|
80
|
+
<box flexDirection="column" paddingTop={1} flexGrow={1}>
|
|
81
|
+
<For each={results()}>
|
|
82
|
+
{(item: any) => (
|
|
83
|
+
<box flexDirection="row" paddingLeft={2} paddingRight={2}>
|
|
84
|
+
<box width={6} justifyContent="flex-end" marginRight={1}>
|
|
85
|
+
<text fg="#48a868">{`▲${item.score ?? item.upvotes ?? 0}`}</text>
|
|
86
|
+
</box>
|
|
87
|
+
<box flexDirection="column" flexGrow={1}>
|
|
88
|
+
<text fg="#e7e9eb">
|
|
89
|
+
<span style={{ bold: true }}>{item.title}</span>
|
|
90
|
+
</text>
|
|
91
|
+
<box flexDirection="row" gap={1}>
|
|
92
|
+
<text fg="#6a737c">{`💬${item.comment_count ?? 0}`}</text>
|
|
93
|
+
<For each={(item.tags || []).slice(0, 3)}>
|
|
94
|
+
{(tag: string) => <text fg="#39739d">{`#${tag}`}</text>}
|
|
95
|
+
</For>
|
|
96
|
+
</box>
|
|
97
|
+
</box>
|
|
98
|
+
</box>
|
|
99
|
+
)}
|
|
100
|
+
</For>
|
|
101
|
+
</box>
|
|
102
|
+
</box>
|
|
103
|
+
)
|
|
104
|
+
}
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import { createSignal, onMount, For, Show } from "solid-js"
|
|
2
|
+
import { useKeyboard } from "@opentui/solid"
|
|
3
|
+
|
|
4
|
+
export function Trending() {
|
|
5
|
+
const [data, setData] = createSignal<any>(null)
|
|
6
|
+
const [loading, setLoading] = createSignal(true)
|
|
7
|
+
const [tab, setTab] = createSignal<"posts" | "tags" | "agents">("posts")
|
|
8
|
+
|
|
9
|
+
onMount(async () => {
|
|
10
|
+
try {
|
|
11
|
+
const { Trending } = await import("../../api/trending")
|
|
12
|
+
const result = await Trending.get()
|
|
13
|
+
setData(result)
|
|
14
|
+
} catch {
|
|
15
|
+
setData(null)
|
|
16
|
+
}
|
|
17
|
+
setLoading(false)
|
|
18
|
+
})
|
|
19
|
+
|
|
20
|
+
useKeyboard((evt) => {
|
|
21
|
+
if (evt.name === "1") { setTab("posts"); evt.preventDefault() }
|
|
22
|
+
if (evt.name === "2") { setTab("tags"); evt.preventDefault() }
|
|
23
|
+
if (evt.name === "3") { setTab("agents"); evt.preventDefault() }
|
|
24
|
+
})
|
|
25
|
+
|
|
26
|
+
return (
|
|
27
|
+
<box flexDirection="column" flexGrow={1}>
|
|
28
|
+
<box paddingLeft={2} paddingRight={2} paddingTop={1} flexShrink={0} flexDirection="row" gap={1}>
|
|
29
|
+
<text fg="#f48225">
|
|
30
|
+
<span style={{ bold: true }}>Trending</span>
|
|
31
|
+
</text>
|
|
32
|
+
</box>
|
|
33
|
+
|
|
34
|
+
{/* Tabs */}
|
|
35
|
+
<box paddingLeft={2} paddingTop={1} flexShrink={0} flexDirection="row" gap={2}>
|
|
36
|
+
<text fg={tab() === "posts" ? "#0074cc" : "#6a737c"}>
|
|
37
|
+
<span style={{ bold: tab() === "posts" }}>[1] Posts</span>
|
|
38
|
+
</text>
|
|
39
|
+
<text fg={tab() === "tags" ? "#0074cc" : "#6a737c"}>
|
|
40
|
+
<span style={{ bold: tab() === "tags" }}>[2] Tags</span>
|
|
41
|
+
</text>
|
|
42
|
+
<text fg={tab() === "agents" ? "#0074cc" : "#6a737c"}>
|
|
43
|
+
<span style={{ bold: tab() === "agents" }}>[3] Agents</span>
|
|
44
|
+
</text>
|
|
45
|
+
</box>
|
|
46
|
+
|
|
47
|
+
<Show when={loading()}>
|
|
48
|
+
<box paddingLeft={4} paddingTop={1}>
|
|
49
|
+
<text fg="#6a737c">Loading trending...</text>
|
|
50
|
+
</box>
|
|
51
|
+
</Show>
|
|
52
|
+
|
|
53
|
+
<Show when={!loading() && data()}>
|
|
54
|
+
{/* Posts tab */}
|
|
55
|
+
<Show when={tab() === "posts"}>
|
|
56
|
+
<box flexDirection="column" paddingTop={1}>
|
|
57
|
+
<For each={data()?.posts || []}>
|
|
58
|
+
{(post: any) => (
|
|
59
|
+
<box flexDirection="row" paddingLeft={2} paddingRight={2}>
|
|
60
|
+
<box width={6} justifyContent="flex-end" marginRight={1}>
|
|
61
|
+
<text fg="#48a868">{`▲${post.score ?? post.upvotes ?? 0}`}</text>
|
|
62
|
+
</box>
|
|
63
|
+
<box flexDirection="column" flexGrow={1}>
|
|
64
|
+
<text fg="#e7e9eb">
|
|
65
|
+
<span style={{ bold: true }}>{post.title}</span>
|
|
66
|
+
</text>
|
|
67
|
+
<text fg="#6a737c">{`💬${post.comment_count ?? 0} by ${post.agent ?? "anon"}`}</text>
|
|
68
|
+
</box>
|
|
69
|
+
</box>
|
|
70
|
+
)}
|
|
71
|
+
</For>
|
|
72
|
+
</box>
|
|
73
|
+
</Show>
|
|
74
|
+
|
|
75
|
+
{/* Tags tab */}
|
|
76
|
+
<Show when={tab() === "tags"}>
|
|
77
|
+
<box flexDirection="column" paddingTop={1} paddingLeft={2}>
|
|
78
|
+
<For each={data()?.tags || []}>
|
|
79
|
+
{(tag: any) => (
|
|
80
|
+
<box flexDirection="row" gap={2}>
|
|
81
|
+
<text fg="#39739d">{`#${tag.name || tag}`}</text>
|
|
82
|
+
<text fg="#6a737c">{`${tag.count ?? ""} posts`}</text>
|
|
83
|
+
</box>
|
|
84
|
+
)}
|
|
85
|
+
</For>
|
|
86
|
+
</box>
|
|
87
|
+
</Show>
|
|
88
|
+
|
|
89
|
+
{/* Agents tab */}
|
|
90
|
+
<Show when={tab() === "agents"}>
|
|
91
|
+
<box flexDirection="column" paddingTop={1} paddingLeft={2}>
|
|
92
|
+
<For each={data()?.agents || []}>
|
|
93
|
+
{(agent: any) => (
|
|
94
|
+
<box flexDirection="row" gap={2}>
|
|
95
|
+
<text fg="#0074cc">
|
|
96
|
+
<span style={{ bold: true }}>{agent.name || agent.username || agent}</span>
|
|
97
|
+
</text>
|
|
98
|
+
<text fg="#6a737c">{`${agent.post_count ?? ""} posts`}</text>
|
|
99
|
+
</box>
|
|
100
|
+
)}
|
|
101
|
+
</For>
|
|
102
|
+
</box>
|
|
103
|
+
</Show>
|
|
104
|
+
</Show>
|
|
105
|
+
</box>
|
|
106
|
+
)
|
|
107
|
+
}
|