@projectservan8n/cnapse 0.5.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ConfigUI-V5TM6KKS.js +306 -0
- package/dist/index.js +392 -176
- package/package.json +1 -1
- package/src/components/ConfigUI.tsx +353 -0
- package/src/components/ProviderSelector.tsx +270 -68
- package/src/index.tsx +95 -83
- package/src/lib/ollama.ts +140 -0
package/package.json
CHANGED
|
@@ -0,0 +1,353 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Interactive Configuration UI
|
|
3
|
+
* - Select provider
|
|
4
|
+
* - Enter API key (for non-Ollama)
|
|
5
|
+
* - Select model from recommended list
|
|
6
|
+
* - For Ollama: runs the model to ensure it's ready
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import React, { useState, useEffect } from 'react';
|
|
10
|
+
import { Box, Text, useInput, useApp } from 'ink';
|
|
11
|
+
import TextInput from 'ink-text-input';
|
|
12
|
+
import Spinner from 'ink-spinner';
|
|
13
|
+
import { getConfig, setProvider, setModel, setApiKey } from '../lib/config.js';
|
|
14
|
+
import { exec } from 'child_process';
|
|
15
|
+
import { promisify } from 'util';
|
|
16
|
+
|
|
17
|
+
const execAsync = promisify(exec);
|
|
18
|
+
|
|
19
|
+
type Step = 'provider' | 'apiKey' | 'model' | 'ollamaCheck' | 'done';
|
|
20
|
+
|
|
21
|
+
interface ProviderConfig {
|
|
22
|
+
id: 'ollama' | 'openrouter' | 'anthropic' | 'openai';
|
|
23
|
+
name: string;
|
|
24
|
+
description: string;
|
|
25
|
+
needsApiKey: boolean;
|
|
26
|
+
models: Array<{
|
|
27
|
+
id: string;
|
|
28
|
+
name: string;
|
|
29
|
+
description: string;
|
|
30
|
+
recommended?: boolean;
|
|
31
|
+
}>;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const PROVIDERS: ProviderConfig[] = [
|
|
35
|
+
{
|
|
36
|
+
id: 'ollama',
|
|
37
|
+
name: 'Ollama',
|
|
38
|
+
description: 'Local AI - Free, private, runs on your PC',
|
|
39
|
+
needsApiKey: false,
|
|
40
|
+
models: [
|
|
41
|
+
{ id: 'qwen2.5:0.5b', name: 'Qwen 2.5 0.5B', description: 'Ultra fast, good for tasks', recommended: true },
|
|
42
|
+
{ id: 'qwen2.5:1.5b', name: 'Qwen 2.5 1.5B', description: 'Fast, better quality' },
|
|
43
|
+
{ id: 'qwen2.5:7b', name: 'Qwen 2.5 7B', description: 'Best quality, needs 8GB+ RAM' },
|
|
44
|
+
{ id: 'llama3.2:1b', name: 'Llama 3.2 1B', description: 'Fast, good general use' },
|
|
45
|
+
{ id: 'llama3.2:3b', name: 'Llama 3.2 3B', description: 'Balanced speed/quality' },
|
|
46
|
+
{ id: 'codellama:7b', name: 'Code Llama 7B', description: 'Best for coding tasks' },
|
|
47
|
+
{ id: 'llava:7b', name: 'LLaVA 7B', description: 'Vision model - can see images' },
|
|
48
|
+
],
|
|
49
|
+
},
|
|
50
|
+
{
|
|
51
|
+
id: 'openrouter',
|
|
52
|
+
name: 'OpenRouter',
|
|
53
|
+
description: 'Many models, pay-per-use, great value',
|
|
54
|
+
needsApiKey: true,
|
|
55
|
+
models: [
|
|
56
|
+
{ id: 'qwen/qwen-2.5-coder-32b-instruct', name: 'Qwen 2.5 Coder 32B', description: 'Best for coding, very cheap', recommended: true },
|
|
57
|
+
{ id: 'anthropic/claude-3.5-sonnet', name: 'Claude 3.5 Sonnet', description: 'Best overall quality' },
|
|
58
|
+
{ id: 'openai/gpt-4o', name: 'GPT-4o', description: 'Fast, multimodal' },
|
|
59
|
+
{ id: 'openai/gpt-4o-mini', name: 'GPT-4o Mini', description: 'Cheap, fast, good quality' },
|
|
60
|
+
{ id: 'google/gemini-pro-1.5', name: 'Gemini Pro 1.5', description: 'Long context, fast' },
|
|
61
|
+
{ id: 'meta-llama/llama-3.1-70b-instruct', name: 'Llama 3.1 70B', description: 'Open source, powerful' },
|
|
62
|
+
],
|
|
63
|
+
},
|
|
64
|
+
{
|
|
65
|
+
id: 'anthropic',
|
|
66
|
+
name: 'Anthropic',
|
|
67
|
+
description: 'Claude models - Best for complex reasoning',
|
|
68
|
+
needsApiKey: true,
|
|
69
|
+
models: [
|
|
70
|
+
{ id: 'claude-3-5-sonnet-20241022', name: 'Claude 3.5 Sonnet', description: 'Best balance of speed/quality', recommended: true },
|
|
71
|
+
{ id: 'claude-3-opus-20240229', name: 'Claude 3 Opus', description: 'Most capable, slower' },
|
|
72
|
+
{ id: 'claude-3-haiku-20240307', name: 'Claude 3 Haiku', description: 'Fastest, cheapest' },
|
|
73
|
+
],
|
|
74
|
+
},
|
|
75
|
+
{
|
|
76
|
+
id: 'openai',
|
|
77
|
+
name: 'OpenAI',
|
|
78
|
+
description: 'GPT models - Well-known, reliable',
|
|
79
|
+
needsApiKey: true,
|
|
80
|
+
models: [
|
|
81
|
+
{ id: 'gpt-4o', name: 'GPT-4o', description: 'Latest, multimodal', recommended: true },
|
|
82
|
+
{ id: 'gpt-4o-mini', name: 'GPT-4o Mini', description: 'Fast and cheap' },
|
|
83
|
+
{ id: 'gpt-4-turbo', name: 'GPT-4 Turbo', description: 'Previous best' },
|
|
84
|
+
{ id: 'gpt-3.5-turbo', name: 'GPT-3.5 Turbo', description: 'Legacy, very cheap' },
|
|
85
|
+
],
|
|
86
|
+
},
|
|
87
|
+
];
|
|
88
|
+
|
|
89
|
+
export function ConfigUI() {
|
|
90
|
+
const { exit } = useApp();
|
|
91
|
+
const config = getConfig();
|
|
92
|
+
|
|
93
|
+
const [step, setStep] = useState<Step>('provider');
|
|
94
|
+
const [providerIndex, setProviderIndex] = useState(() => {
|
|
95
|
+
const idx = PROVIDERS.findIndex(p => p.id === config.provider);
|
|
96
|
+
return idx >= 0 ? idx : 0;
|
|
97
|
+
});
|
|
98
|
+
const [modelIndex, setModelIndex] = useState(0);
|
|
99
|
+
const [apiKeyInput, setApiKeyInput] = useState('');
|
|
100
|
+
const [selectedProvider, setSelectedProvider] = useState<ProviderConfig | null>(null);
|
|
101
|
+
const [ollamaStatus, setOllamaStatus] = useState<'checking' | 'pulling' | 'running' | 'ready' | 'error'>('checking');
|
|
102
|
+
const [ollamaMessage, setOllamaMessage] = useState('');
|
|
103
|
+
|
|
104
|
+
// Handle keyboard input
|
|
105
|
+
useInput((input, key) => {
|
|
106
|
+
if (key.escape) {
|
|
107
|
+
exit();
|
|
108
|
+
return;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (step === 'provider') {
|
|
112
|
+
if (key.upArrow) {
|
|
113
|
+
setProviderIndex(prev => (prev > 0 ? prev - 1 : PROVIDERS.length - 1));
|
|
114
|
+
} else if (key.downArrow) {
|
|
115
|
+
setProviderIndex(prev => (prev < PROVIDERS.length - 1 ? prev + 1 : 0));
|
|
116
|
+
} else if (key.return) {
|
|
117
|
+
const provider = PROVIDERS[providerIndex]!;
|
|
118
|
+
setSelectedProvider(provider);
|
|
119
|
+
setProvider(provider.id);
|
|
120
|
+
|
|
121
|
+
// Find recommended model index
|
|
122
|
+
const recommendedIdx = provider.models.findIndex(m => m.recommended);
|
|
123
|
+
setModelIndex(recommendedIdx >= 0 ? recommendedIdx : 0);
|
|
124
|
+
|
|
125
|
+
if (provider.needsApiKey) {
|
|
126
|
+
const apiKeyProvider = provider.id as 'openrouter' | 'anthropic' | 'openai';
|
|
127
|
+
if (!config.apiKeys[apiKeyProvider]) {
|
|
128
|
+
setStep('apiKey');
|
|
129
|
+
} else {
|
|
130
|
+
setStep('model');
|
|
131
|
+
}
|
|
132
|
+
} else {
|
|
133
|
+
setStep('model');
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
} else if (step === 'model' && selectedProvider) {
|
|
137
|
+
if (key.upArrow) {
|
|
138
|
+
setModelIndex(prev => (prev > 0 ? prev - 1 : selectedProvider.models.length - 1));
|
|
139
|
+
} else if (key.downArrow) {
|
|
140
|
+
setModelIndex(prev => (prev < selectedProvider.models.length - 1 ? prev + 1 : 0));
|
|
141
|
+
} else if (key.return) {
|
|
142
|
+
const model = selectedProvider.models[modelIndex]!;
|
|
143
|
+
setModel(model.id);
|
|
144
|
+
|
|
145
|
+
if (selectedProvider.id === 'ollama') {
|
|
146
|
+
setStep('ollamaCheck');
|
|
147
|
+
} else {
|
|
148
|
+
setStep('done');
|
|
149
|
+
setTimeout(() => exit(), 2000);
|
|
150
|
+
}
|
|
151
|
+
} else if (key.leftArrow || input === 'b') {
|
|
152
|
+
setStep('provider');
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
// Handle API key submission
|
|
158
|
+
const handleApiKeySubmit = (value: string) => {
|
|
159
|
+
if (value.trim() && selectedProvider) {
|
|
160
|
+
setApiKey(selectedProvider.id as 'openrouter' | 'anthropic' | 'openai', value.trim());
|
|
161
|
+
setStep('model');
|
|
162
|
+
}
|
|
163
|
+
};
|
|
164
|
+
|
|
165
|
+
// Ollama model check and run
|
|
166
|
+
useEffect(() => {
|
|
167
|
+
if (step !== 'ollamaCheck' || !selectedProvider) return;
|
|
168
|
+
|
|
169
|
+
const modelId = selectedProvider.models[modelIndex]!.id;
|
|
170
|
+
|
|
171
|
+
async function checkAndRunOllama() {
|
|
172
|
+
try {
|
|
173
|
+
// Check if Ollama is running
|
|
174
|
+
setOllamaStatus('checking');
|
|
175
|
+
setOllamaMessage('Checking Ollama...');
|
|
176
|
+
|
|
177
|
+
try {
|
|
178
|
+
await execAsync('ollama list', { timeout: 5000 });
|
|
179
|
+
} catch {
|
|
180
|
+
setOllamaStatus('error');
|
|
181
|
+
setOllamaMessage('Ollama not found. Install from https://ollama.ai');
|
|
182
|
+
setTimeout(() => exit(), 3000);
|
|
183
|
+
return;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// Check if model exists
|
|
187
|
+
const { stdout } = await execAsync('ollama list');
|
|
188
|
+
const modelName = modelId.split(':')[0];
|
|
189
|
+
const hasModel = stdout.toLowerCase().includes(modelName!.toLowerCase());
|
|
190
|
+
|
|
191
|
+
if (!hasModel) {
|
|
192
|
+
setOllamaStatus('pulling');
|
|
193
|
+
setOllamaMessage(`Downloading ${modelId}... (this may take a few minutes)`);
|
|
194
|
+
|
|
195
|
+
// Pull the model
|
|
196
|
+
await execAsync(`ollama pull ${modelId}`, { timeout: 600000 }); // 10 min timeout
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// Run the model to load it into memory
|
|
200
|
+
setOllamaStatus('running');
|
|
201
|
+
setOllamaMessage(`Starting ${modelId}...`);
|
|
202
|
+
|
|
203
|
+
// Send a simple request to load the model
|
|
204
|
+
await execAsync(`ollama run ${modelId} "Hello" --nowordwrap`, { timeout: 120000 });
|
|
205
|
+
|
|
206
|
+
setOllamaStatus('ready');
|
|
207
|
+
setOllamaMessage(`${modelId} is ready!`);
|
|
208
|
+
setStep('done');
|
|
209
|
+
setTimeout(() => exit(), 2000);
|
|
210
|
+
|
|
211
|
+
} catch (err) {
|
|
212
|
+
setOllamaStatus('error');
|
|
213
|
+
setOllamaMessage(`Error: ${err instanceof Error ? err.message : 'Unknown error'}`);
|
|
214
|
+
setTimeout(() => exit(), 3000);
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
checkAndRunOllama();
|
|
219
|
+
}, [step, selectedProvider, modelIndex, exit]);
|
|
220
|
+
|
|
221
|
+
return (
|
|
222
|
+
<Box flexDirection="column" padding={1}>
|
|
223
|
+
<Box marginBottom={1}>
|
|
224
|
+
<Text bold color="cyan">C-napse Configuration</Text>
|
|
225
|
+
</Box>
|
|
226
|
+
|
|
227
|
+
{/* Provider Selection */}
|
|
228
|
+
{step === 'provider' && (
|
|
229
|
+
<Box flexDirection="column">
|
|
230
|
+
<Text bold>Select AI Provider:</Text>
|
|
231
|
+
<Text color="gray" dimColor>(Use arrows, Enter to select, Esc to cancel)</Text>
|
|
232
|
+
<Box marginTop={1} flexDirection="column">
|
|
233
|
+
{PROVIDERS.map((p, i) => {
|
|
234
|
+
const isSelected = i === providerIndex;
|
|
235
|
+
const isCurrent = p.id === config.provider;
|
|
236
|
+
return (
|
|
237
|
+
<Box key={p.id} flexDirection="column">
|
|
238
|
+
<Text color={isSelected ? 'cyan' : 'white'}>
|
|
239
|
+
{isSelected ? '❯ ' : ' '}
|
|
240
|
+
<Text bold={isSelected}>{p.name}</Text>
|
|
241
|
+
{isCurrent && <Text color="green"> (current)</Text>}
|
|
242
|
+
{p.needsApiKey && p.id !== 'ollama' && config.apiKeys[p.id as 'openrouter' | 'anthropic' | 'openai'] && <Text color="yellow"> (key saved)</Text>}
|
|
243
|
+
</Text>
|
|
244
|
+
{isSelected && (
|
|
245
|
+
<Text color="gray"> {p.description}</Text>
|
|
246
|
+
)}
|
|
247
|
+
</Box>
|
|
248
|
+
);
|
|
249
|
+
})}
|
|
250
|
+
</Box>
|
|
251
|
+
</Box>
|
|
252
|
+
)}
|
|
253
|
+
|
|
254
|
+
{/* API Key Input */}
|
|
255
|
+
{step === 'apiKey' && selectedProvider && (
|
|
256
|
+
<Box flexDirection="column">
|
|
257
|
+
<Text><Text color="green">✓</Text> Provider: <Text bold>{selectedProvider.name}</Text></Text>
|
|
258
|
+
<Box marginTop={1} flexDirection="column">
|
|
259
|
+
<Text bold>Enter your {selectedProvider.name} API key:</Text>
|
|
260
|
+
<Text color="gray" dimColor>
|
|
261
|
+
{selectedProvider.id === 'openrouter' && 'Get key at: https://openrouter.ai/keys'}
|
|
262
|
+
{selectedProvider.id === 'anthropic' && 'Get key at: https://console.anthropic.com'}
|
|
263
|
+
{selectedProvider.id === 'openai' && 'Get key at: https://platform.openai.com/api-keys'}
|
|
264
|
+
</Text>
|
|
265
|
+
<Box marginTop={1}>
|
|
266
|
+
<Text color="cyan">❯ </Text>
|
|
267
|
+
<TextInput
|
|
268
|
+
value={apiKeyInput}
|
|
269
|
+
onChange={setApiKeyInput}
|
|
270
|
+
onSubmit={handleApiKeySubmit}
|
|
271
|
+
mask="*"
|
|
272
|
+
/>
|
|
273
|
+
</Box>
|
|
274
|
+
</Box>
|
|
275
|
+
</Box>
|
|
276
|
+
)}
|
|
277
|
+
|
|
278
|
+
{/* Model Selection */}
|
|
279
|
+
{step === 'model' && selectedProvider && (
|
|
280
|
+
<Box flexDirection="column">
|
|
281
|
+
<Text><Text color="green">✓</Text> Provider: <Text bold>{selectedProvider.name}</Text></Text>
|
|
282
|
+
{selectedProvider.needsApiKey && (
|
|
283
|
+
<Text><Text color="green">✓</Text> API Key: <Text bold>configured</Text></Text>
|
|
284
|
+
)}
|
|
285
|
+
<Box marginTop={1} flexDirection="column">
|
|
286
|
+
<Text bold>Select Model:</Text>
|
|
287
|
+
<Text color="gray" dimColor>(Arrows to navigate, Enter to select, B to go back)</Text>
|
|
288
|
+
<Box marginTop={1} flexDirection="column">
|
|
289
|
+
{selectedProvider.models.map((model, i) => {
|
|
290
|
+
const isSelected = i === modelIndex;
|
|
291
|
+
const isCurrent = model.id === config.model && selectedProvider.id === config.provider;
|
|
292
|
+
return (
|
|
293
|
+
<Box key={model.id} flexDirection="column">
|
|
294
|
+
<Text color={isSelected ? 'cyan' : 'white'}>
|
|
295
|
+
{isSelected ? '❯ ' : ' '}
|
|
296
|
+
<Text bold={isSelected}>{model.name}</Text>
|
|
297
|
+
{model.recommended && <Text color="yellow"> *</Text>}
|
|
298
|
+
{isCurrent && <Text color="green"> (current)</Text>}
|
|
299
|
+
</Text>
|
|
300
|
+
{isSelected && (
|
|
301
|
+
<Text color="gray"> {model.description}</Text>
|
|
302
|
+
)}
|
|
303
|
+
</Box>
|
|
304
|
+
);
|
|
305
|
+
})}
|
|
306
|
+
</Box>
|
|
307
|
+
<Box marginTop={1}>
|
|
308
|
+
<Text color="gray" dimColor>* = Recommended for C-napse</Text>
|
|
309
|
+
</Box>
|
|
310
|
+
</Box>
|
|
311
|
+
</Box>
|
|
312
|
+
)}
|
|
313
|
+
|
|
314
|
+
{/* Ollama Check */}
|
|
315
|
+
{step === 'ollamaCheck' && selectedProvider && (
|
|
316
|
+
<Box flexDirection="column">
|
|
317
|
+
<Text><Text color="green">✓</Text> Provider: <Text bold>{selectedProvider.name}</Text></Text>
|
|
318
|
+
<Text><Text color="green">✓</Text> Model: <Text bold>{selectedProvider.models[modelIndex]?.name}</Text></Text>
|
|
319
|
+
<Box marginTop={1}>
|
|
320
|
+
{ollamaStatus === 'error' ? (
|
|
321
|
+
<Text color="red">✗ {ollamaMessage}</Text>
|
|
322
|
+
) : ollamaStatus === 'ready' ? (
|
|
323
|
+
<Text color="green">✓ {ollamaMessage}</Text>
|
|
324
|
+
) : (
|
|
325
|
+
<Text>
|
|
326
|
+
<Text color="cyan"><Spinner type="dots" /></Text>
|
|
327
|
+
{' '}{ollamaMessage}
|
|
328
|
+
</Text>
|
|
329
|
+
)}
|
|
330
|
+
</Box>
|
|
331
|
+
</Box>
|
|
332
|
+
)}
|
|
333
|
+
|
|
334
|
+
{/* Done */}
|
|
335
|
+
{step === 'done' && selectedProvider && (
|
|
336
|
+
<Box flexDirection="column">
|
|
337
|
+
<Text><Text color="green">✓</Text> Provider: <Text bold>{selectedProvider.name}</Text></Text>
|
|
338
|
+
{selectedProvider.needsApiKey && (
|
|
339
|
+
<Text><Text color="green">✓</Text> API Key: <Text bold>configured</Text></Text>
|
|
340
|
+
)}
|
|
341
|
+
<Text><Text color="green">✓</Text> Model: <Text bold>{selectedProvider.models[modelIndex]?.name}</Text></Text>
|
|
342
|
+
<Box marginTop={1}>
|
|
343
|
+
<Text color="green" bold>Configuration saved! Run `cnapse` to start.</Text>
|
|
344
|
+
</Box>
|
|
345
|
+
</Box>
|
|
346
|
+
)}
|
|
347
|
+
|
|
348
|
+
<Box marginTop={2}>
|
|
349
|
+
<Text color="gray" dimColor>Press Esc to cancel</Text>
|
|
350
|
+
</Box>
|
|
351
|
+
</Box>
|
|
352
|
+
);
|
|
353
|
+
}
|