smoothie-code 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,185 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * blend-cli.ts — Standalone blend runner for hooks.
4
+ *
5
+ * Usage:
6
+ * node dist/blend-cli.js "Review this plan: ..."
7
+ * echo "plan text" | node dist/blend-cli.js
8
+ *
9
+ * Queries Codex + OpenRouter models in parallel, prints JSON results to stdout.
10
+ * Progress goes to stderr so it doesn't interfere with hook JSON output.
11
+ */
12
+ import { readFileSync } from 'fs';
13
+ import { fileURLToPath } from 'url';
14
+ import { dirname, join } from 'path';
15
+ import { execFile as execFileCb } from 'child_process';
16
+ import { promisify } from 'util';
17
+ import { createInterface } from 'readline';
18
+ const execFile = promisify(execFileCb);
19
+ const __dirname = dirname(fileURLToPath(import.meta.url));
20
+ const PROJECT_ROOT = join(__dirname, '..');
21
+ // ---------------------------------------------------------------------------
22
+ // .env loader
23
+ // ---------------------------------------------------------------------------
24
+ function loadEnv() {
25
+ try {
26
+ const env = readFileSync(join(PROJECT_ROOT, '.env'), 'utf8');
27
+ for (const line of env.split('\n')) {
28
+ const [key, ...val] = line.split('=');
29
+ if (key && val.length)
30
+ process.env[key.trim()] = val.join('=').trim();
31
+ }
32
+ }
33
+ catch {
34
+ // no .env
35
+ }
36
+ }
37
+ loadEnv();
38
+ // ---------------------------------------------------------------------------
39
+ // Model queries (same as index.ts)
40
+ // ---------------------------------------------------------------------------
41
+ async function queryCodex(prompt) {
42
+ try {
43
+ const tmpFile = join(PROJECT_ROOT, `.codex-out-${Date.now()}.txt`);
44
+ await execFile('codex', ['exec', '--full-auto', '-o', tmpFile, prompt], {
45
+ timeout: 90_000,
46
+ });
47
+ let response;
48
+ try {
49
+ response = readFileSync(tmpFile, 'utf8').trim();
50
+ const { unlinkSync } = await import('fs');
51
+ unlinkSync(tmpFile);
52
+ }
53
+ catch {
54
+ response = '';
55
+ }
56
+ return { model: 'Codex', response: response || '(empty response)' };
57
+ }
58
+ catch (err) {
59
+ const message = err instanceof Error ? err.message : String(err);
60
+ return { model: 'Codex', response: `Error: ${message}` };
61
+ }
62
+ }
63
+ async function queryOpenRouter(prompt, modelId, modelLabel) {
64
+ try {
65
+ const controller = new AbortController();
66
+ const timer = setTimeout(() => controller.abort(), 60_000);
67
+ const res = await fetch('https://openrouter.ai/api/v1/chat/completions', {
68
+ method: 'POST',
69
+ headers: {
70
+ 'Authorization': `Bearer ${process.env.OPENROUTER_API_KEY}`,
71
+ 'HTTP-Referer': 'https://hotairbag.github.io/smoothie',
72
+ 'X-Title': 'Smoothie',
73
+ 'Content-Type': 'application/json',
74
+ },
75
+ body: JSON.stringify({
76
+ model: modelId,
77
+ messages: [{ role: 'user', content: prompt }],
78
+ }),
79
+ signal: controller.signal,
80
+ });
81
+ clearTimeout(timer);
82
+ const data = (await res.json());
83
+ const text = data.choices?.[0]?.message?.content ?? 'No response content';
84
+ return { model: modelLabel, response: text };
85
+ }
86
+ catch (err) {
87
+ const message = err instanceof Error ? err.message : String(err);
88
+ return { model: modelLabel, response: `Error: ${message}` };
89
+ }
90
+ }
91
+ // ---------------------------------------------------------------------------
92
+ // Read prompt from arg or stdin
93
+ // ---------------------------------------------------------------------------
94
+ async function getPrompt() {
95
+ if (process.argv[2])
96
+ return process.argv[2];
97
+ // Read from stdin
98
+ const rl = createInterface({ input: process.stdin });
99
+ const lines = [];
100
+ for await (const line of rl) {
101
+ lines.push(line);
102
+ }
103
+ return lines.join('\n');
104
+ }
105
+ // ---------------------------------------------------------------------------
106
+ // Main
107
+ // ---------------------------------------------------------------------------
108
+ async function main() {
109
+ const args = process.argv.slice(2);
110
+ const deep = args.includes('--deep');
111
+ const filteredArgs = args.filter(a => a !== '--deep');
112
+ // Temporarily override argv for getPrompt
113
+ process.argv = [process.argv[0], process.argv[1], ...filteredArgs];
114
+ const prompt = await getPrompt();
115
+ if (!prompt.trim()) {
116
+ process.stderr.write('blend-cli: no prompt provided\n');
117
+ process.exit(1);
118
+ }
119
+ let finalPrompt = prompt;
120
+ if (deep) {
121
+ // Read context file
122
+ for (const name of ['GEMINI.md', 'CLAUDE.md', 'AGENTS.md']) {
123
+ try {
124
+ const content = readFileSync(join(process.cwd(), name), 'utf8');
125
+ if (content.trim()) {
126
+ finalPrompt = `## Context File\n${content}\n\n## Prompt\n${prompt}`;
127
+ break;
128
+ }
129
+ }
130
+ catch {
131
+ // file not found, try next
132
+ }
133
+ }
134
+ // Add git diff
135
+ try {
136
+ const { execFileSync } = await import('child_process');
137
+ const diff = execFileSync('git', ['diff', 'HEAD~3'], { encoding: 'utf8', maxBuffer: 100 * 1024, timeout: 10000 });
138
+ if (diff)
139
+ finalPrompt += `\n\n## Recent Git Diff\n${diff.slice(0, 40000)}`;
140
+ }
141
+ catch {
142
+ // no git diff available
143
+ }
144
+ }
145
+ let config;
146
+ try {
147
+ config = JSON.parse(readFileSync(join(PROJECT_ROOT, 'config.json'), 'utf8'));
148
+ }
149
+ catch {
150
+ config = { openrouter_models: [] };
151
+ }
152
+ const models = [
153
+ { fn: () => queryCodex(finalPrompt), label: 'Codex' },
154
+ ...config.openrouter_models.map((m) => ({
155
+ fn: () => queryOpenRouter(finalPrompt, m.id, m.label),
156
+ label: m.label,
157
+ })),
158
+ ];
159
+ process.stderr.write('\n🧃 Smoothie blending...\n\n');
160
+ for (const { label } of models) {
161
+ process.stderr.write(` ⏳ ${label.padEnd(26)} waiting...\n`);
162
+ }
163
+ process.stderr.write('\n');
164
+ const startTimes = {};
165
+ const promises = models.map(({ fn, label }) => {
166
+ startTimes[label] = Date.now();
167
+ return fn()
168
+ .then((result) => {
169
+ const elapsed = ((Date.now() - startTimes[label]) / 1000).toFixed(1);
170
+ process.stderr.write(` ✓ ${label.padEnd(26)} done (${elapsed}s)\n`);
171
+ return result;
172
+ })
173
+ .catch((err) => {
174
+ const elapsed = ((Date.now() - startTimes[label]) / 1000).toFixed(1);
175
+ const message = err instanceof Error ? err.message : String(err);
176
+ process.stderr.write(` ✗ ${label.padEnd(26)} failed (${elapsed}s)\n`);
177
+ return { model: label, response: `Error: ${message}` };
178
+ });
179
+ });
180
+ const results = await Promise.all(promises);
181
+ process.stderr.write('\n ◆ All done.\n\n');
182
+ // Output JSON to stdout (for hook consumption)
183
+ process.stdout.write(JSON.stringify({ results }, null, 2));
184
+ }
185
+ main();
@@ -0,0 +1 @@
1
+ export {};
package/dist/index.js ADDED
@@ -0,0 +1,286 @@
1
+ import { readFileSync } from 'fs';
2
+ import { fileURLToPath } from 'url';
3
+ import { dirname, join } from 'path';
4
+ import { execFile as execFileCb, execFileSync } from 'child_process';
5
+ import { promisify } from 'util';
6
+ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
7
+ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
8
+ import { z } from 'zod';
9
+ const execFile = promisify(execFileCb);
10
+ const __dirname = dirname(fileURLToPath(import.meta.url));
11
+ const PROJECT_ROOT = join(__dirname, '..');
12
+ // ---------------------------------------------------------------------------
13
+ // .env loader (no dotenv dependency)
14
+ // ---------------------------------------------------------------------------
15
+ function loadEnv() {
16
+ try {
17
+ const env = readFileSync(join(PROJECT_ROOT, '.env'), 'utf8');
18
+ for (const line of env.split('\n')) {
19
+ const [key, ...val] = line.split('=');
20
+ if (key && val.length)
21
+ process.env[key.trim()] = val.join('=').trim();
22
+ }
23
+ }
24
+ catch {
25
+ // .env file not found or unreadable — that's fine
26
+ }
27
+ }
28
+ loadEnv();
29
+ // ---------------------------------------------------------------------------
30
+ // Model query helpers
31
+ // ---------------------------------------------------------------------------
32
+ async function queryCodex(prompt) {
33
+ try {
34
+ const tmpFile = join(PROJECT_ROOT, `.codex-out-${Date.now()}.txt`);
35
+ await execFile('codex', ['exec', '--full-auto', '-o', tmpFile, prompt], {
36
+ timeout: 90_000,
37
+ });
38
+ let response;
39
+ try {
40
+ response = readFileSync(tmpFile, 'utf8').trim();
41
+ const { unlinkSync } = await import('fs');
42
+ unlinkSync(tmpFile);
43
+ }
44
+ catch {
45
+ response = '';
46
+ }
47
+ return { model: 'Codex', response: response || '(empty response)' };
48
+ }
49
+ catch (err) {
50
+ const message = err instanceof Error ? err.message : String(err);
51
+ return { model: 'Codex', response: `Error: ${message}` };
52
+ }
53
+ }
54
+ async function queryOpenRouter(prompt, modelId, modelLabel) {
55
+ try {
56
+ const controller = new AbortController();
57
+ const timer = setTimeout(() => controller.abort(), 60_000);
58
+ const res = await fetch('https://openrouter.ai/api/v1/chat/completions', {
59
+ method: 'POST',
60
+ headers: {
61
+ 'Authorization': `Bearer ${process.env.OPENROUTER_API_KEY}`,
62
+ 'HTTP-Referer': 'https://hotairbag.github.io/smoothie',
63
+ 'X-Title': 'Smoothie',
64
+ 'Content-Type': 'application/json',
65
+ },
66
+ body: JSON.stringify({
67
+ model: modelId,
68
+ messages: [{ role: 'user', content: prompt }],
69
+ }),
70
+ signal: controller.signal,
71
+ });
72
+ clearTimeout(timer);
73
+ if (!res.ok) {
74
+ return { model: modelLabel, response: `Error: HTTP ${res.status} (${res.statusText})` };
75
+ }
76
+ const data = (await res.json());
77
+ const text = data.choices?.[0]?.message?.content ?? 'No response content';
78
+ return { model: modelLabel, response: text };
79
+ }
80
+ catch (err) {
81
+ const message = err instanceof Error ? err.message : String(err);
82
+ return { model: modelLabel, response: `Error: ${message}` };
83
+ }
84
+ }
85
+ // ---------------------------------------------------------------------------
86
+ // Platform helpers
87
+ // ---------------------------------------------------------------------------
88
+ function isCodexInstalled() {
89
+ try {
90
+ execFileSync('which', ['codex'], { stdio: 'ignore' });
91
+ return true;
92
+ }
93
+ catch {
94
+ return false;
95
+ }
96
+ }
97
+ function findContextFile() {
98
+ for (const name of ['GEMINI.md', 'CLAUDE.md', 'AGENTS.md']) {
99
+ try {
100
+ const content = readFileSync(join(process.cwd(), name), 'utf8');
101
+ if (content.trim())
102
+ return content;
103
+ }
104
+ catch { }
105
+ }
106
+ return null;
107
+ }
108
+ function buildDeepContext(prompt) {
109
+ const TOKEN_CAP = 16000;
110
+ const CHAR_CAP = TOKEN_CAP * 4; // ~4 chars per token
111
+ const parts = [`## Prompt\n${prompt}`];
112
+ let totalLen = parts[0].length;
113
+ // Context file
114
+ const ctxFile = findContextFile();
115
+ if (ctxFile && totalLen + ctxFile.length < CHAR_CAP) {
116
+ parts.push(`## Context File\n${ctxFile}`);
117
+ totalLen += ctxFile.length;
118
+ }
119
+ // Git diff (recent changes, capped at 100KB)
120
+ try {
121
+ const diff = execFileSync('git', ['diff', 'HEAD~3'], {
122
+ encoding: 'utf8',
123
+ maxBuffer: 100 * 1024,
124
+ timeout: 10_000,
125
+ });
126
+ if (diff && totalLen + diff.length < CHAR_CAP) {
127
+ parts.push(`## Recent Git Diff\n${diff}`);
128
+ totalLen += diff.length;
129
+ }
130
+ else if (diff) {
131
+ const truncated = diff.slice(0, CHAR_CAP - totalLen - 100);
132
+ parts.push(`## Recent Git Diff (truncated)\n${truncated}`);
133
+ totalLen += truncated.length;
134
+ }
135
+ }
136
+ catch { }
137
+ // Directory listing (git tracked files only - respects .gitignore)
138
+ try {
139
+ const files = execFileSync('git', ['ls-files'], {
140
+ encoding: 'utf8',
141
+ timeout: 5_000,
142
+ });
143
+ // Filter out sensitive files
144
+ const SENSITIVE = ['.env', '.pem', '.key', 'secret', 'credential', 'token'];
145
+ const filtered = files.split('\n').filter((f) => f && !SENSITIVE.some(s => f.toLowerCase().includes(s))).join('\n');
146
+ if (filtered && totalLen + filtered.length < CHAR_CAP) {
147
+ parts.push(`## Project Files\n${filtered}`);
148
+ }
149
+ }
150
+ catch { }
151
+ return parts.join('\n\n');
152
+ }
153
+ // ---------------------------------------------------------------------------
154
+ // MCP Server
155
+ // ---------------------------------------------------------------------------
156
+ const server = new McpServer({ name: 'smoothie', version: '1.0.0' });
157
+ server.tool('smoothie_blend', {
158
+ prompt: z.string().describe('The prompt to send to all models'),
159
+ deep: z.boolean().optional().describe('Full context mode with project files and git diff'),
160
+ }, async ({ prompt, deep }) => {
161
+ // Read config on every request so edits take effect immediately
162
+ let config;
163
+ try {
164
+ config = JSON.parse(readFileSync(join(PROJECT_ROOT, 'config.json'), 'utf8'));
165
+ }
166
+ catch {
167
+ config = { openrouter_models: [] };
168
+ }
169
+ const finalPrompt = deep ? buildDeepContext(prompt) : prompt;
170
+ // Build model array based on platform
171
+ const platform = process.env.SMOOTHIE_PLATFORM || 'claude';
172
+ const models = [];
173
+ // Add platform-specific models
174
+ if (platform !== 'codex' && isCodexInstalled()) {
175
+ models.push({ fn: () => queryCodex(finalPrompt), label: 'Codex' });
176
+ }
177
+ if (platform === 'codex' || platform === 'gemini') {
178
+ // Add Claude via OpenRouter as a reviewer (not the judge)
179
+ models.push({
180
+ fn: () => queryOpenRouter(finalPrompt, 'anthropic/claude-sonnet-4', 'Claude Sonnet'),
181
+ label: 'Claude Sonnet',
182
+ });
183
+ }
184
+ // Add OpenRouter models from config
185
+ for (const m of config.openrouter_models) {
186
+ models.push({
187
+ fn: () => queryOpenRouter(finalPrompt, m.id, m.label),
188
+ label: m.label,
189
+ });
190
+ }
191
+ // Print initial progress
192
+ process.stderr.write('\n\u{1F9C3} Smoothie blending...\n\n');
193
+ for (const { label } of models) {
194
+ process.stderr.write(` \u23F3 ${label.padEnd(26)} waiting...\n`);
195
+ }
196
+ process.stderr.write('\n');
197
+ // Run all in parallel with progress tracking
198
+ const startTimes = {};
199
+ const promises = models.map(({ fn, label }) => {
200
+ startTimes[label] = Date.now();
201
+ return fn()
202
+ .then((result) => {
203
+ const elapsed = ((Date.now() - startTimes[label]) / 1000).toFixed(1);
204
+ process.stderr.write(` \u2713 ${label.padEnd(26)} done (${elapsed}s)\n`);
205
+ return result;
206
+ })
207
+ .catch((err) => {
208
+ const elapsed = ((Date.now() - startTimes[label]) / 1000).toFixed(1);
209
+ const message = err instanceof Error ? err.message : String(err);
210
+ process.stderr.write(` \u2717 ${label.padEnd(26)} failed (${elapsed}s)\n`);
211
+ return { model: label, response: `Error: ${message}` };
212
+ });
213
+ });
214
+ const results = await Promise.all(promises);
215
+ const judgeNames = { claude: 'Claude', codex: 'Codex', gemini: 'Gemini' };
216
+ const judgeName = judgeNames[platform] || 'the judge';
217
+ process.stderr.write(`\n \u25C6 All done. Handing to ${judgeName}...\n\n`);
218
+ return {
219
+ content: [{ type: 'text', text: JSON.stringify({ results }, null, 2) }],
220
+ };
221
+ });
222
+ server.tool('smoothie_estimate', {
223
+ prompt: z.string().describe('The prompt to estimate costs for'),
224
+ deep: z.boolean().optional().describe('Estimate for deep mode'),
225
+ }, async ({ prompt, deep }) => {
226
+ let config;
227
+ try {
228
+ config = JSON.parse(readFileSync(join(PROJECT_ROOT, 'config.json'), 'utf8'));
229
+ }
230
+ catch {
231
+ config = { openrouter_models: [] };
232
+ }
233
+ const contextPayload = deep ? buildDeepContext(prompt) : prompt;
234
+ const tokenCount = Math.ceil(contextPayload.length / 4);
235
+ // Fetch pricing from OpenRouter
236
+ let pricingMap = {};
237
+ try {
238
+ const res = await fetch('https://openrouter.ai/api/v1/models', {
239
+ headers: { Authorization: `Bearer ${process.env.OPENROUTER_API_KEY}` },
240
+ });
241
+ const data = (await res.json());
242
+ if (data.data) {
243
+ for (const m of data.data) {
244
+ pricingMap[m.id] = parseFloat(m.pricing?.prompt || '0');
245
+ }
246
+ }
247
+ }
248
+ catch {
249
+ // Pricing unavailable — continue with zeros
250
+ }
251
+ const platform = process.env.SMOOTHIE_PLATFORM || 'claude';
252
+ const rows = [];
253
+ if (platform === 'claude') {
254
+ if (isCodexInstalled()) {
255
+ rows.push({ label: 'Codex', tokens: tokenCount, cost: 0, note: 'free (subscription)' });
256
+ }
257
+ }
258
+ if (platform === 'codex' || platform === 'gemini') {
259
+ const price = pricingMap['anthropic/claude-sonnet-4'] || 0;
260
+ rows.push({ label: 'Claude Sonnet', tokens: tokenCount, cost: tokenCount * price });
261
+ }
262
+ if (platform === 'gemini' && isCodexInstalled()) {
263
+ rows.push({ label: 'Codex', tokens: tokenCount, cost: 0, note: 'free (subscription)' });
264
+ }
265
+ for (const model of config.openrouter_models) {
266
+ const price = pricingMap[model.id] || 0;
267
+ rows.push({
268
+ label: model.label,
269
+ tokens: tokenCount,
270
+ cost: tokenCount * price,
271
+ note: price === 0 && Object.keys(pricingMap).length === 0 ? 'pricing unavailable' : undefined,
272
+ });
273
+ }
274
+ const totalCost = rows.reduce((sum, r) => sum + (r.cost || 0), 0);
275
+ return {
276
+ content: [{
277
+ type: 'text',
278
+ text: JSON.stringify({ rows, totalCost, tokenCount, note: 'Token estimates are approximate (~4 chars/token)' }, null, 2),
279
+ }],
280
+ };
281
+ });
282
+ // ---------------------------------------------------------------------------
283
+ // Start
284
+ // ---------------------------------------------------------------------------
285
+ const transport = new StdioServerTransport();
286
+ await server.connect(transport);
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export {};