smoothie-code 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/index.ts ADDED
@@ -0,0 +1,367 @@
1
+ import { readFileSync } from 'fs';
2
+ import { fileURLToPath } from 'url';
3
+ import { dirname, join } from 'path';
4
+ import { execFile as execFileCb, execFileSync } from 'child_process';
5
+ import { promisify } from 'util';
6
+ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
7
+ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
8
+ import { z } from 'zod';
9
+
10
+ const execFile = promisify(execFileCb);
11
+ const __dirname = dirname(fileURLToPath(import.meta.url));
12
+ const PROJECT_ROOT = join(__dirname, '..');
13
+
14
+ // ---------------------------------------------------------------------------
15
+ // Types
16
+ // ---------------------------------------------------------------------------
17
+
18
+ interface OpenRouterModel {
19
+ id: string;
20
+ label: string;
21
+ }
22
+
23
+ interface Config {
24
+ openrouter_models: OpenRouterModel[];
25
+ }
26
+
27
+ interface ModelResult {
28
+ model: string;
29
+ response: string;
30
+ }
31
+
32
+ interface ModelEntry {
33
+ fn: () => Promise<ModelResult>;
34
+ label: string;
35
+ }
36
+
37
+ interface OpenRouterMessage {
38
+ role: string;
39
+ content: string;
40
+ }
41
+
42
+ interface OpenRouterChoice {
43
+ message: OpenRouterMessage;
44
+ }
45
+
46
+ interface OpenRouterResponse {
47
+ choices?: OpenRouterChoice[];
48
+ }
49
+
50
+ // ---------------------------------------------------------------------------
51
+ // .env loader (no dotenv dependency)
52
+ // ---------------------------------------------------------------------------
53
+ function loadEnv(): void {
54
+ try {
55
+ const env = readFileSync(join(PROJECT_ROOT, '.env'), 'utf8');
56
+ for (const line of env.split('\n')) {
57
+ const [key, ...val] = line.split('=');
58
+ if (key && val.length) process.env[key.trim()] = val.join('=').trim();
59
+ }
60
+ } catch {
61
+ // .env file not found or unreadable — that's fine
62
+ }
63
+ }
64
+ loadEnv();
65
+
66
+ // ---------------------------------------------------------------------------
67
+ // Model query helpers
68
+ // ---------------------------------------------------------------------------
69
+
70
+ async function queryCodex(prompt: string): Promise<ModelResult> {
71
+ try {
72
+ const tmpFile = join(PROJECT_ROOT, `.codex-out-${Date.now()}.txt`);
73
+ await execFile('codex', ['exec', '--full-auto', '-o', tmpFile, prompt], {
74
+ timeout: 90_000,
75
+ });
76
+ let response: string;
77
+ try {
78
+ response = readFileSync(tmpFile, 'utf8').trim();
79
+ const { unlinkSync } = await import('fs');
80
+ unlinkSync(tmpFile);
81
+ } catch {
82
+ response = '';
83
+ }
84
+ return { model: 'Codex', response: response || '(empty response)' };
85
+ } catch (err: unknown) {
86
+ const message = err instanceof Error ? err.message : String(err);
87
+ return { model: 'Codex', response: `Error: ${message}` };
88
+ }
89
+ }
90
+
91
+ async function queryOpenRouter(
92
+ prompt: string,
93
+ modelId: string,
94
+ modelLabel: string,
95
+ ): Promise<ModelResult> {
96
+ try {
97
+ const controller = new AbortController();
98
+ const timer = setTimeout(() => controller.abort(), 60_000);
99
+
100
+ const res = await fetch('https://openrouter.ai/api/v1/chat/completions', {
101
+ method: 'POST',
102
+ headers: {
103
+ 'Authorization': `Bearer ${process.env.OPENROUTER_API_KEY}`,
104
+ 'HTTP-Referer': 'https://hotairbag.github.io/smoothie',
105
+ 'X-Title': 'Smoothie',
106
+ 'Content-Type': 'application/json',
107
+ },
108
+ body: JSON.stringify({
109
+ model: modelId,
110
+ messages: [{ role: 'user', content: prompt }],
111
+ }),
112
+ signal: controller.signal,
113
+ });
114
+
115
+ clearTimeout(timer);
116
+
117
+ if (!res.ok) {
118
+ return { model: modelLabel, response: `Error: HTTP ${res.status} (${res.statusText})` };
119
+ }
120
+
121
+ const data = (await res.json()) as OpenRouterResponse;
122
+ const text = data.choices?.[0]?.message?.content ?? 'No response content';
123
+ return { model: modelLabel, response: text };
124
+ } catch (err: unknown) {
125
+ const message = err instanceof Error ? err.message : String(err);
126
+ return { model: modelLabel, response: `Error: ${message}` };
127
+ }
128
+ }
129
+
130
+ // ---------------------------------------------------------------------------
131
+ // Platform helpers
132
+ // ---------------------------------------------------------------------------
133
+
134
+ function isCodexInstalled(): boolean {
135
+ try {
136
+ execFileSync('which', ['codex'], { stdio: 'ignore' });
137
+ return true;
138
+ } catch {
139
+ return false;
140
+ }
141
+ }
142
+
143
+ function findContextFile(): string | null {
144
+ for (const name of ['GEMINI.md', 'CLAUDE.md', 'AGENTS.md']) {
145
+ try {
146
+ const content = readFileSync(join(process.cwd(), name), 'utf8');
147
+ if (content.trim()) return content;
148
+ } catch {}
149
+ }
150
+ return null;
151
+ }
152
+
153
+ function buildDeepContext(prompt: string): string {
154
+ const TOKEN_CAP = 16000;
155
+ const CHAR_CAP = TOKEN_CAP * 4; // ~4 chars per token
156
+
157
+ const parts: string[] = [`## Prompt\n${prompt}`];
158
+ let totalLen = parts[0].length;
159
+
160
+ // Context file
161
+ const ctxFile = findContextFile();
162
+ if (ctxFile && totalLen + ctxFile.length < CHAR_CAP) {
163
+ parts.push(`## Context File\n${ctxFile}`);
164
+ totalLen += ctxFile.length;
165
+ }
166
+
167
+ // Git diff (recent changes, capped at 100KB)
168
+ try {
169
+ const diff = execFileSync('git', ['diff', 'HEAD~3'], {
170
+ encoding: 'utf8',
171
+ maxBuffer: 100 * 1024,
172
+ timeout: 10_000,
173
+ });
174
+ if (diff && totalLen + diff.length < CHAR_CAP) {
175
+ parts.push(`## Recent Git Diff\n${diff}`);
176
+ totalLen += diff.length;
177
+ } else if (diff) {
178
+ const truncated = diff.slice(0, CHAR_CAP - totalLen - 100);
179
+ parts.push(`## Recent Git Diff (truncated)\n${truncated}`);
180
+ totalLen += truncated.length;
181
+ }
182
+ } catch {}
183
+
184
+ // Directory listing (git tracked files only - respects .gitignore)
185
+ try {
186
+ const files = execFileSync('git', ['ls-files'], {
187
+ encoding: 'utf8',
188
+ timeout: 5_000,
189
+ });
190
+ // Filter out sensitive files
191
+ const SENSITIVE = ['.env', '.pem', '.key', 'secret', 'credential', 'token'];
192
+ const filtered = files.split('\n').filter((f: string) =>
193
+ f && !SENSITIVE.some(s => f.toLowerCase().includes(s))
194
+ ).join('\n');
195
+ if (filtered && totalLen + filtered.length < CHAR_CAP) {
196
+ parts.push(`## Project Files\n${filtered}`);
197
+ }
198
+ } catch {}
199
+
200
+ return parts.join('\n\n');
201
+ }
202
+
203
+ // ---------------------------------------------------------------------------
204
+ // MCP Server
205
+ // ---------------------------------------------------------------------------
206
+
207
+ const server = new McpServer({ name: 'smoothie', version: '1.0.0' });
208
+
209
+ server.tool(
210
+ 'smoothie_blend',
211
+ {
212
+ prompt: z.string().describe('The prompt to send to all models'),
213
+ deep: z.boolean().optional().describe('Full context mode with project files and git diff'),
214
+ },
215
+ async ({ prompt, deep }) => {
216
+ // Read config on every request so edits take effect immediately
217
+ let config: Config;
218
+ try {
219
+ config = JSON.parse(
220
+ readFileSync(join(PROJECT_ROOT, 'config.json'), 'utf8'),
221
+ ) as Config;
222
+ } catch {
223
+ config = { openrouter_models: [] };
224
+ }
225
+
226
+ const finalPrompt = deep ? buildDeepContext(prompt) : prompt;
227
+
228
+ // Build model array based on platform
229
+ const platform = process.env.SMOOTHIE_PLATFORM || 'claude';
230
+ const models: ModelEntry[] = [];
231
+
232
+ // Add platform-specific models
233
+ if (platform !== 'codex' && isCodexInstalled()) {
234
+ models.push({ fn: () => queryCodex(finalPrompt), label: 'Codex' });
235
+ }
236
+ if (platform === 'codex' || platform === 'gemini') {
237
+ // Add Claude via OpenRouter as a reviewer (not the judge)
238
+ models.push({
239
+ fn: () => queryOpenRouter(finalPrompt, 'anthropic/claude-sonnet-4', 'Claude Sonnet'),
240
+ label: 'Claude Sonnet',
241
+ });
242
+ }
243
+
244
+ // Add OpenRouter models from config
245
+ for (const m of config.openrouter_models) {
246
+ models.push({
247
+ fn: () => queryOpenRouter(finalPrompt, m.id, m.label),
248
+ label: m.label,
249
+ });
250
+ }
251
+
252
+ // Print initial progress
253
+ process.stderr.write('\n\u{1F9C3} Smoothie blending...\n\n');
254
+ for (const { label } of models) {
255
+ process.stderr.write(` \u23F3 ${label.padEnd(26)} waiting...\n`);
256
+ }
257
+ process.stderr.write('\n');
258
+
259
+ // Run all in parallel with progress tracking
260
+ const startTimes: Record<string, number> = {};
261
+ const promises = models.map(({ fn, label }) => {
262
+ startTimes[label] = Date.now();
263
+ return fn()
264
+ .then((result: ModelResult) => {
265
+ const elapsed = ((Date.now() - startTimes[label]) / 1000).toFixed(1);
266
+ process.stderr.write(
267
+ ` \u2713 ${label.padEnd(26)} done (${elapsed}s)\n`,
268
+ );
269
+ return result;
270
+ })
271
+ .catch((err: unknown) => {
272
+ const elapsed = ((Date.now() - startTimes[label]) / 1000).toFixed(1);
273
+ const message = err instanceof Error ? err.message : String(err);
274
+ process.stderr.write(
275
+ ` \u2717 ${label.padEnd(26)} failed (${elapsed}s)\n`,
276
+ );
277
+ return { model: label, response: `Error: ${message}` } as ModelResult;
278
+ });
279
+ });
280
+
281
+ const results: ModelResult[] = await Promise.all(promises);
282
+ const judgeNames: Record<string, string> = { claude: 'Claude', codex: 'Codex', gemini: 'Gemini' };
283
+ const judgeName = judgeNames[platform] || 'the judge';
284
+ process.stderr.write(`\n \u25C6 All done. Handing to ${judgeName}...\n\n`);
285
+
286
+ return {
287
+ content: [{ type: 'text' as const, text: JSON.stringify({ results }, null, 2) }],
288
+ };
289
+ },
290
+ );
291
+
292
+ server.tool(
293
+ 'smoothie_estimate',
294
+ {
295
+ prompt: z.string().describe('The prompt to estimate costs for'),
296
+ deep: z.boolean().optional().describe('Estimate for deep mode'),
297
+ },
298
+ async ({ prompt, deep }) => {
299
+ let config: Config;
300
+ try {
301
+ config = JSON.parse(readFileSync(join(PROJECT_ROOT, 'config.json'), 'utf8')) as Config;
302
+ } catch {
303
+ config = { openrouter_models: [] };
304
+ }
305
+
306
+ const contextPayload = deep ? buildDeepContext(prompt) : prompt;
307
+ const tokenCount = Math.ceil(contextPayload.length / 4);
308
+
309
+ // Fetch pricing from OpenRouter
310
+ let pricingMap: Record<string, number> = {};
311
+ try {
312
+ const res = await fetch('https://openrouter.ai/api/v1/models', {
313
+ headers: { Authorization: `Bearer ${process.env.OPENROUTER_API_KEY}` },
314
+ });
315
+ const data = (await res.json()) as { data?: Array<{ id: string; pricing?: { prompt?: string } }> };
316
+ if (data.data) {
317
+ for (const m of data.data) {
318
+ pricingMap[m.id] = parseFloat(m.pricing?.prompt || '0');
319
+ }
320
+ }
321
+ } catch {
322
+ // Pricing unavailable — continue with zeros
323
+ }
324
+
325
+ const platform = process.env.SMOOTHIE_PLATFORM || 'claude';
326
+ const rows: Array<{ label: string; tokens: number; cost: number; note?: string }> = [];
327
+
328
+ if (platform === 'claude') {
329
+ if (isCodexInstalled()) {
330
+ rows.push({ label: 'Codex', tokens: tokenCount, cost: 0, note: 'free (subscription)' });
331
+ }
332
+ }
333
+ if (platform === 'codex' || platform === 'gemini') {
334
+ const price = pricingMap['anthropic/claude-sonnet-4'] || 0;
335
+ rows.push({ label: 'Claude Sonnet', tokens: tokenCount, cost: tokenCount * price });
336
+ }
337
+ if (platform === 'gemini' && isCodexInstalled()) {
338
+ rows.push({ label: 'Codex', tokens: tokenCount, cost: 0, note: 'free (subscription)' });
339
+ }
340
+
341
+ for (const model of config.openrouter_models) {
342
+ const price = pricingMap[model.id] || 0;
343
+ rows.push({
344
+ label: model.label,
345
+ tokens: tokenCount,
346
+ cost: tokenCount * price,
347
+ note: price === 0 && Object.keys(pricingMap).length === 0 ? 'pricing unavailable' : undefined,
348
+ });
349
+ }
350
+
351
+ const totalCost = rows.reduce((sum, r) => sum + (r.cost || 0), 0);
352
+
353
+ return {
354
+ content: [{
355
+ type: 'text' as const,
356
+ text: JSON.stringify({ rows, totalCost, tokenCount, note: 'Token estimates are approximate (~4 chars/token)' }, null, 2),
357
+ }],
358
+ };
359
+ },
360
+ );
361
+
362
+ // ---------------------------------------------------------------------------
363
+ // Start
364
+ // ---------------------------------------------------------------------------
365
+
366
+ const transport = new StdioServerTransport();
367
+ await server.connect(transport);
@@ -0,0 +1,318 @@
1
+ #!/usr/bin/env node
2
+
3
+ import { readFileSync, writeFileSync, existsSync } from 'fs';
4
+ import { fileURLToPath } from 'url';
5
+ import { dirname, join } from 'path';
6
+ import { createInterface, Interface } from 'readline';
7
+
8
+ const __dirname = dirname(fileURLToPath(import.meta.url));
9
+ const PROJECT_ROOT = join(__dirname, '..');
10
+
11
+ interface OpenRouterModel {
12
+ id: string;
13
+ name?: string;
14
+ context_length?: number;
15
+ }
16
+
17
+ interface ModelEntry {
18
+ id: string;
19
+ label: string;
20
+ }
21
+
22
+ interface Config {
23
+ openrouter_models: ModelEntry[];
24
+ auto_blend?: boolean;
25
+ }
26
+
27
+ const FALLBACK_MODELS: ModelEntry[] = [
28
+ { id: 'google/gemini-2.5-pro-preview', label: 'Gemini 2.5 Pro' },
29
+ { id: 'deepseek/deepseek-r2', label: 'DeepSeek R2' },
30
+ { id: 'x-ai/grok-3', label: 'Grok 3' },
31
+ { id: 'qwen/qwen-2.5-coder-32b-instruct', label: 'Qwen 2.5 Coder' },
32
+ { id: 'mistralai/codestral-2501', label: 'Codestral' },
33
+ ];
34
+
35
+ const CODING_PROVIDERS = [
36
+ 'anthropic', 'google', 'openai', 'x-ai', 'deepseek',
37
+ 'qwen', 'mistralai', 'meta-llama', 'cohere', 'amazon',
38
+ ];
39
+
40
+ function loadEnv(): void {
41
+ try {
42
+ const env = readFileSync(join(PROJECT_ROOT, '.env'), 'utf8');
43
+ for (const line of env.split('\n')) {
44
+ const [key, ...val] = line.split('=');
45
+ if (key && val.length) process.env[key.trim()] = val.join('=').trim();
46
+ }
47
+ } catch {}
48
+ }
49
+
50
+ function formatLabel(model: OpenRouterModel): string {
51
+ if (model.name) return model.name;
52
+ const raw = model.id.includes('/') ? model.id.split('/').slice(1).join('/') : model.id;
53
+ return raw.replace(/-/g, ' ').replace(/\b\w/g, (c) => c.toUpperCase());
54
+ }
55
+
56
+ function loadConfig(configPath: string): Config {
57
+ try {
58
+ return JSON.parse(readFileSync(configPath, 'utf8')) as Config;
59
+ } catch {
60
+ return { openrouter_models: [] };
61
+ }
62
+ }
63
+
64
+ function saveConfig(configPath: string, config: Config): void {
65
+ writeFileSync(configPath, JSON.stringify(config, null, 2) + '\n', 'utf8');
66
+ }
67
+
68
+ async function fetchModels(apiKey: string): Promise<OpenRouterModel[] | null> {
69
+ try {
70
+ const res = await fetch('https://openrouter.ai/api/v1/models?order=throughput', {
71
+ headers: { Authorization: `Bearer ${apiKey}` },
72
+ });
73
+ if (!res.ok) throw new Error(`HTTP ${res.status}: ${res.statusText}`);
74
+ const json = (await res.json()) as { data?: OpenRouterModel[] };
75
+ return json.data || [];
76
+ } catch (err) {
77
+ const message = err instanceof Error ? err.message : String(err);
78
+ console.warn(`\n Could not fetch models (${message}). Using defaults.\n`);
79
+ return null;
80
+ }
81
+ }
82
+
83
+ function isCodingModel(model: OpenRouterModel): boolean {
84
+ if ((model.context_length || 0) < 32000) return false;
85
+ if (model.id.includes('embed')) return false;
86
+ if (model.id.includes('vision') && !model.id.includes('omni')) return false;
87
+ const provider = model.id.includes('/') ? model.id.split('/')[0] : model.id;
88
+ return CODING_PROVIDERS.includes(provider);
89
+ }
90
+
91
+ function dedupeAndFilter(models: OpenRouterModel[]): OpenRouterModel[] {
92
+ const coding = models.filter(isCodingModel);
93
+
94
+ // Deduplicate by provider family
95
+ const seen = new Set<string>();
96
+ const deduped: OpenRouterModel[] = [];
97
+ for (const m of coding) {
98
+ const provider = m.id.includes('/') ? m.id.split('/')[0] : m.id;
99
+ if (seen.has(provider)) continue;
100
+ seen.add(provider);
101
+ deduped.push(m);
102
+ }
103
+
104
+ return deduped.slice(0, 8);
105
+ }
106
+
107
+ async function lookupModel(apiKey: string, modelId: string): Promise<ModelEntry | null> {
108
+ try {
109
+ const res = await fetch('https://openrouter.ai/api/v1/models?order=throughput', {
110
+ headers: { Authorization: `Bearer ${apiKey}` },
111
+ });
112
+ if (!res.ok) return null;
113
+ const json = (await res.json()) as { data?: OpenRouterModel[] };
114
+ const found = json.data?.find((m) => m.id === modelId);
115
+ if (found) return { id: found.id, label: formatLabel(found) };
116
+ // If not found but looks like a valid ID, accept it anyway
117
+ if (modelId.includes('/')) {
118
+ return { id: modelId, label: formatLabel({ id: modelId }) };
119
+ }
120
+ return null;
121
+ } catch {
122
+ if (modelId.includes('/')) {
123
+ return { id: modelId, label: formatLabel({ id: modelId }) };
124
+ }
125
+ return null;
126
+ }
127
+ }
128
+
129
+ function promptQ(rl: Interface, question: string): Promise<string> {
130
+ return new Promise((resolve) => {
131
+ rl.question(question, (answer: string) => resolve(answer));
132
+ });
133
+ }
134
+
135
+ // ── CLI: add <model_id> ─────────────────────────────────────────────
136
+ async function cmdAdd(apiKey: string, configPath: string, modelId: string): Promise<void> {
137
+ const config = loadConfig(configPath);
138
+ if (config.openrouter_models.some((m) => m.id === modelId)) {
139
+ console.log(` Already added: ${modelId}`);
140
+ return;
141
+ }
142
+ const entry = await lookupModel(apiKey, modelId);
143
+ if (!entry) {
144
+ console.error(` Could not find model: ${modelId}`);
145
+ process.exit(1);
146
+ }
147
+ config.openrouter_models.push(entry);
148
+ saveConfig(configPath, config);
149
+ console.log(` ✓ Added ${entry.label} (${entry.id})`);
150
+ }
151
+
152
+ // ── CLI: remove <model_id> ──────────────────────────────────────────
153
+ function cmdRemove(configPath: string, modelId: string): void {
154
+ const config = loadConfig(configPath);
155
+ const before = config.openrouter_models.length;
156
+ config.openrouter_models = config.openrouter_models.filter((m) => m.id !== modelId);
157
+ if (config.openrouter_models.length === before) {
158
+ console.log(` Not found: ${modelId}`);
159
+ return;
160
+ }
161
+ saveConfig(configPath, config);
162
+ console.log(` ✓ Removed ${modelId}`);
163
+ }
164
+
165
+ // ── CLI: list ───────────────────────────────────────────────────────
166
+ function cmdList(configPath: string): void {
167
+ const config = loadConfig(configPath);
168
+ if (config.openrouter_models.length === 0) {
169
+ console.log(' No models configured. Run: node dist/select-models.js');
170
+ return;
171
+ }
172
+ console.log(' Current models:');
173
+ for (const m of config.openrouter_models) {
174
+ console.log(` ${m.label} (${m.id})`);
175
+ }
176
+ }
177
+
178
+ // ── Interactive picker (default / install mode) ─────────────────────
179
+ async function cmdPick(apiKey: string, configPath: string): Promise<void> {
180
+ let topModels: ModelEntry[];
181
+ const rawModels = await fetchModels(apiKey);
182
+
183
+ if (rawModels === null) {
184
+ topModels = FALLBACK_MODELS.map((m) => ({ ...m }));
185
+ } else {
186
+ const deduped = dedupeAndFilter(rawModels);
187
+ topModels =
188
+ deduped.length > 0
189
+ ? deduped.map((m) => ({ id: m.id, label: formatLabel(m) }))
190
+ : FALLBACK_MODELS.map((m) => ({ ...m }));
191
+ }
192
+
193
+ // Exclude the judge's own model family
194
+ const platform = process.env.SMOOTHIE_PLATFORM || 'claude';
195
+ const excludePrefixes: Record<string, string[]> = {
196
+ claude: ['anthropic'],
197
+ codex: ['openai'],
198
+ gemini: ['google'],
199
+ };
200
+ const excluded = excludePrefixes[platform] || [];
201
+ topModels = topModels.filter(m => !excluded.some(prefix => m.id.startsWith(prefix + '/')));
202
+
203
+ // Default selection: first 3
204
+ const selected = new Set([0, 1, 2]);
205
+
206
+ // Print list with selection markers
207
+ console.log('');
208
+ for (let i = 0; i < topModels.length; i++) {
209
+ const check = selected.has(i) ? '\x1b[32m✓\x1b[0m' : ' ';
210
+ const num = String(i + 1).padStart(2, ' ');
211
+ console.log(` ${check} ${num}. ${topModels[i].label}`);
212
+ }
213
+ console.log('');
214
+
215
+ const rl = createInterface({ input: process.stdin, output: process.stdout });
216
+
217
+ const answer = await promptQ(
218
+ rl,
219
+ ' Toggle numbers, paste model ID, or Enter to confirm: ',
220
+ );
221
+ rl.close();
222
+
223
+ const input = answer.trim();
224
+
225
+ // Collect any pasted model IDs (contain '/')
226
+ const pastedIds: ModelEntry[] = [];
227
+ const toggleNums: number[] = [];
228
+
229
+ if (input) {
230
+ for (const token of input.split(/\s+/)) {
231
+ if (token.includes('/')) {
232
+ // Looks like a model ID
233
+ const entry = await lookupModel(apiKey, token);
234
+ if (entry) {
235
+ pastedIds.push(entry);
236
+ console.log(` ✓ Added ${entry.label}`);
237
+ } else {
238
+ console.log(` ✗ Unknown: ${token}`);
239
+ }
240
+ } else {
241
+ const n = parseInt(token, 10);
242
+ if (n >= 1 && n <= topModels.length) toggleNums.push(n);
243
+ }
244
+ }
245
+ }
246
+
247
+ // If user typed numbers, use exactly those (not toggle, just select)
248
+ let finalSelection: ModelEntry[];
249
+ if (toggleNums.length > 0) {
250
+ finalSelection = toggleNums.map((n) => topModels[n - 1]);
251
+ } else if (pastedIds.length > 0) {
252
+ // Keep defaults + add pasted
253
+ finalSelection = [...selected].map((i) => topModels[i]);
254
+ } else {
255
+ // Enter with no input → use defaults
256
+ finalSelection = [...selected].map((i) => topModels[i]);
257
+ }
258
+
259
+ // Merge pasted IDs
260
+ for (const p of pastedIds) {
261
+ if (!finalSelection.some((m) => m.id === p.id)) {
262
+ finalSelection.push(p);
263
+ }
264
+ }
265
+
266
+ // Preserve existing config fields (like auto_blend)
267
+ const existing = loadConfig(configPath);
268
+ existing.openrouter_models = finalSelection;
269
+ saveConfig(configPath, existing);
270
+
271
+ console.log(` ✓ ${finalSelection.map((m) => m.label).join(', ')}`);
272
+ }
273
+
274
+ // ── Main ────────────────────────────────────────────────────────────
275
+ async function main(): Promise<void> {
276
+ const args = process.argv.slice(2);
277
+ loadEnv();
278
+
279
+ const apiKey = process.env.OPENROUTER_API_KEY || '';
280
+ const configPath =
281
+ args.find((a) => a.endsWith('.json')) || join(PROJECT_ROOT, 'config.json');
282
+
283
+ // Subcommands
284
+ if (args[0] === 'add' && args[1]) {
285
+ if (!apiKey) {
286
+ console.error(' Set OPENROUTER_API_KEY in .env first');
287
+ process.exit(1);
288
+ }
289
+ await cmdAdd(apiKey, configPath, args[1]);
290
+ return;
291
+ }
292
+
293
+ if (args[0] === 'remove' && args[1]) {
294
+ cmdRemove(configPath, args[1]);
295
+ return;
296
+ }
297
+
298
+ if (args[0] === 'list') {
299
+ cmdList(configPath);
300
+ return;
301
+ }
302
+
303
+ // Interactive picker (default behavior / install mode)
304
+ const key = args[0] && !args[0].endsWith('.json') ? args[0] : apiKey;
305
+ if (!key) {
306
+ console.error(' No API key. Usage: node dist/select-models.js <key>');
307
+ console.error(' Or set OPENROUTER_API_KEY in .env');
308
+ process.exit(1);
309
+ }
310
+ // If called with positional key, set it for fetching
311
+ if (args[0] && !args[0].endsWith('.json') && args[0] !== 'add' && args[0] !== 'remove' && args[0] !== 'list') {
312
+ process.env.OPENROUTER_API_KEY = args[0];
313
+ }
314
+
315
+ await cmdPick(key, configPath);
316
+ }
317
+
318
+ main();
package/tsconfig.json ADDED
@@ -0,0 +1,14 @@
1
+ {
2
+ "compilerOptions": {
3
+ "target": "ES2022",
4
+ "module": "Node16",
5
+ "moduleResolution": "Node16",
6
+ "outDir": "dist",
7
+ "rootDir": "src",
8
+ "strict": true,
9
+ "esModuleInterop": true,
10
+ "skipLibCheck": true,
11
+ "declaration": true,
12
+ "sourceMap": false
13
+ }
14
+ }