@projectservan8n/cnapse 0.5.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ConfigUI-V5TM6KKS.js +306 -0
- package/dist/index.js +392 -176
- package/package.json +1 -1
- package/src/components/ConfigUI.tsx +353 -0
- package/src/components/ProviderSelector.tsx +270 -68
- package/src/index.tsx +95 -83
- package/src/lib/ollama.ts +140 -0
|
@@ -1,69 +1,122 @@
|
|
|
1
|
-
|
|
1
|
+
/**
|
|
2
|
+
* Provider Selector - Used in TUI for /provider command
|
|
3
|
+
* - Shows provider list with API key status
|
|
4
|
+
* - Prompts for API key if needed
|
|
5
|
+
* - Shows model list with recommendations
|
|
6
|
+
* - For Ollama: shows model availability status
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import React, { useState, useEffect } from 'react';
|
|
2
10
|
import { Box, Text, useInput } from 'ink';
|
|
3
|
-
import
|
|
11
|
+
import TextInput from 'ink-text-input';
|
|
12
|
+
import Spinner from 'ink-spinner';
|
|
13
|
+
import { getConfig, setProvider, setModel, setApiKey } from '../lib/config.js';
|
|
14
|
+
import { checkOllamaStatus, hasModel, OllamaStatus } from '../lib/ollama.js';
|
|
4
15
|
|
|
5
16
|
interface ProviderSelectorProps {
|
|
6
17
|
onClose: () => void;
|
|
7
18
|
onSelect: (provider: string, model: string) => void;
|
|
8
19
|
}
|
|
9
20
|
|
|
10
|
-
interface
|
|
21
|
+
interface ModelConfig {
|
|
22
|
+
id: string;
|
|
23
|
+
name: string;
|
|
24
|
+
recommended?: boolean;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
interface ProviderConfig {
|
|
11
28
|
id: 'ollama' | 'openrouter' | 'anthropic' | 'openai';
|
|
12
29
|
name: string;
|
|
13
30
|
description: string;
|
|
14
|
-
|
|
15
|
-
models:
|
|
31
|
+
needsApiKey: boolean;
|
|
32
|
+
models: ModelConfig[];
|
|
16
33
|
}
|
|
17
34
|
|
|
18
|
-
const PROVIDERS:
|
|
35
|
+
const PROVIDERS: ProviderConfig[] = [
|
|
19
36
|
{
|
|
20
37
|
id: 'ollama',
|
|
21
38
|
name: 'Ollama',
|
|
22
|
-
description: 'Local AI - Free, private
|
|
23
|
-
|
|
24
|
-
models: [
|
|
39
|
+
description: 'Local AI - Free, private',
|
|
40
|
+
needsApiKey: false,
|
|
41
|
+
models: [
|
|
42
|
+
{ id: 'qwen2.5:0.5b', name: 'Qwen 2.5 0.5B (fast)', recommended: true },
|
|
43
|
+
{ id: 'qwen2.5:1.5b', name: 'Qwen 2.5 1.5B' },
|
|
44
|
+
{ id: 'qwen2.5:7b', name: 'Qwen 2.5 7B (quality)' },
|
|
45
|
+
{ id: 'llama3.2:1b', name: 'Llama 3.2 1B' },
|
|
46
|
+
{ id: 'llama3.2:3b', name: 'Llama 3.2 3B' },
|
|
47
|
+
{ id: 'codellama:7b', name: 'Code Llama 7B' },
|
|
48
|
+
{ id: 'llava:7b', name: 'LLaVA 7B (vision)' },
|
|
49
|
+
],
|
|
25
50
|
},
|
|
26
51
|
{
|
|
27
52
|
id: 'openrouter',
|
|
28
53
|
name: 'OpenRouter',
|
|
29
54
|
description: 'Many models, pay-per-use',
|
|
30
|
-
|
|
55
|
+
needsApiKey: true,
|
|
31
56
|
models: [
|
|
32
|
-
'qwen/qwen-2.5-coder-32b-instruct',
|
|
33
|
-
'anthropic/claude-3.5-sonnet',
|
|
34
|
-
'openai/gpt-4o',
|
|
35
|
-
'openai/gpt-4o-mini',
|
|
36
|
-
'google/gemini-pro-1.5',
|
|
37
|
-
'meta-llama/llama-3.1-70b-instruct',
|
|
57
|
+
{ id: 'qwen/qwen-2.5-coder-32b-instruct', name: 'Qwen Coder 32B', recommended: true },
|
|
58
|
+
{ id: 'anthropic/claude-3.5-sonnet', name: 'Claude 3.5 Sonnet' },
|
|
59
|
+
{ id: 'openai/gpt-4o', name: 'GPT-4o' },
|
|
60
|
+
{ id: 'openai/gpt-4o-mini', name: 'GPT-4o Mini' },
|
|
61
|
+
{ id: 'google/gemini-pro-1.5', name: 'Gemini Pro 1.5' },
|
|
38
62
|
],
|
|
39
63
|
},
|
|
40
64
|
{
|
|
41
65
|
id: 'anthropic',
|
|
42
66
|
name: 'Anthropic',
|
|
43
|
-
description: 'Claude
|
|
44
|
-
|
|
45
|
-
models: [
|
|
67
|
+
description: 'Claude - Best reasoning',
|
|
68
|
+
needsApiKey: true,
|
|
69
|
+
models: [
|
|
70
|
+
{ id: 'claude-3-5-sonnet-20241022', name: 'Claude 3.5 Sonnet', recommended: true },
|
|
71
|
+
{ id: 'claude-3-opus-20240229', name: 'Claude 3 Opus' },
|
|
72
|
+
{ id: 'claude-3-haiku-20240307', name: 'Claude 3 Haiku' },
|
|
73
|
+
],
|
|
46
74
|
},
|
|
47
75
|
{
|
|
48
76
|
id: 'openai',
|
|
49
77
|
name: 'OpenAI',
|
|
50
78
|
description: 'GPT models',
|
|
51
|
-
|
|
52
|
-
models: [
|
|
79
|
+
needsApiKey: true,
|
|
80
|
+
models: [
|
|
81
|
+
{ id: 'gpt-4o', name: 'GPT-4o', recommended: true },
|
|
82
|
+
{ id: 'gpt-4o-mini', name: 'GPT-4o Mini' },
|
|
83
|
+
{ id: 'gpt-4-turbo', name: 'GPT-4 Turbo' },
|
|
84
|
+
],
|
|
53
85
|
},
|
|
54
86
|
];
|
|
55
87
|
|
|
56
|
-
type
|
|
88
|
+
type Step = 'provider' | 'apiKey' | 'model' | 'ollamaError' | 'done';
|
|
57
89
|
|
|
58
90
|
export function ProviderSelector({ onClose, onSelect }: ProviderSelectorProps) {
|
|
59
91
|
const config = getConfig();
|
|
60
|
-
const [
|
|
92
|
+
const [step, setStep] = useState<Step>('provider');
|
|
61
93
|
const [providerIndex, setProviderIndex] = useState(() => {
|
|
62
94
|
const idx = PROVIDERS.findIndex(p => p.id === config.provider);
|
|
63
95
|
return idx >= 0 ? idx : 0;
|
|
64
96
|
});
|
|
65
97
|
const [modelIndex, setModelIndex] = useState(0);
|
|
66
|
-
const [
|
|
98
|
+
const [apiKeyInput, setApiKeyInput] = useState('');
|
|
99
|
+
const [selectedProvider, setSelectedProvider] = useState<ProviderConfig | null>(null);
|
|
100
|
+
|
|
101
|
+
// Ollama status
|
|
102
|
+
const [ollamaStatus, setOllamaStatus] = useState<OllamaStatus | null>(null);
|
|
103
|
+
const [checkingOllama, setCheckingOllama] = useState(false);
|
|
104
|
+
|
|
105
|
+
// Check Ollama status when selecting Ollama provider
|
|
106
|
+
useEffect(() => {
|
|
107
|
+
if (step === 'model' && selectedProvider?.id === 'ollama' && !ollamaStatus) {
|
|
108
|
+
setCheckingOllama(true);
|
|
109
|
+
checkOllamaStatus().then(status => {
|
|
110
|
+
setOllamaStatus(status);
|
|
111
|
+
setCheckingOllama(false);
|
|
112
|
+
|
|
113
|
+
// If Ollama isn't running, show error
|
|
114
|
+
if (!status.running) {
|
|
115
|
+
setStep('ollamaError');
|
|
116
|
+
}
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
}, [step, selectedProvider, ollamaStatus]);
|
|
67
120
|
|
|
68
121
|
useInput((input, key) => {
|
|
69
122
|
if (key.escape) {
|
|
@@ -71,7 +124,7 @@ export function ProviderSelector({ onClose, onSelect }: ProviderSelectorProps) {
|
|
|
71
124
|
return;
|
|
72
125
|
}
|
|
73
126
|
|
|
74
|
-
if (
|
|
127
|
+
if (step === 'provider') {
|
|
75
128
|
if (key.upArrow) {
|
|
76
129
|
setProviderIndex(prev => (prev > 0 ? prev - 1 : PROVIDERS.length - 1));
|
|
77
130
|
} else if (key.downArrow) {
|
|
@@ -79,98 +132,247 @@ export function ProviderSelector({ onClose, onSelect }: ProviderSelectorProps) {
|
|
|
79
132
|
} else if (key.return) {
|
|
80
133
|
const provider = PROVIDERS[providerIndex]!;
|
|
81
134
|
setSelectedProvider(provider);
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
135
|
+
|
|
136
|
+
// Find recommended or current model
|
|
137
|
+
const currentIdx = provider.models.findIndex(m => m.id === config.model);
|
|
138
|
+
const recommendedIdx = provider.models.findIndex(m => m.recommended);
|
|
139
|
+
setModelIndex(currentIdx >= 0 ? currentIdx : (recommendedIdx >= 0 ? recommendedIdx : 0));
|
|
140
|
+
|
|
141
|
+
// Check if we need API key
|
|
142
|
+
if (provider.needsApiKey) {
|
|
143
|
+
const apiKeyProvider = provider.id as 'openrouter' | 'anthropic' | 'openai';
|
|
144
|
+
if (!config.apiKeys[apiKeyProvider]) {
|
|
145
|
+
setStep('apiKey');
|
|
146
|
+
} else {
|
|
147
|
+
setStep('model');
|
|
148
|
+
}
|
|
149
|
+
} else {
|
|
150
|
+
setStep('model');
|
|
151
|
+
}
|
|
86
152
|
}
|
|
87
|
-
} else if (
|
|
153
|
+
} else if (step === 'model' && selectedProvider) {
|
|
88
154
|
if (key.upArrow) {
|
|
89
155
|
setModelIndex(prev => (prev > 0 ? prev - 1 : selectedProvider.models.length - 1));
|
|
90
156
|
} else if (key.downArrow) {
|
|
91
157
|
setModelIndex(prev => (prev < selectedProvider.models.length - 1 ? prev + 1 : 0));
|
|
92
158
|
} else if (key.return) {
|
|
93
159
|
const model = selectedProvider.models[modelIndex]!;
|
|
94
|
-
|
|
160
|
+
|
|
161
|
+
// For Ollama, warn if model not available
|
|
162
|
+
if (selectedProvider.id === 'ollama' && ollamaStatus && !hasModel(ollamaStatus, model.id)) {
|
|
163
|
+
// Still allow selection, but they'll need to pull it
|
|
164
|
+
}
|
|
165
|
+
|
|
95
166
|
setProvider(selectedProvider.id);
|
|
96
|
-
setModel(model);
|
|
97
|
-
|
|
98
|
-
|
|
167
|
+
setModel(model.id);
|
|
168
|
+
setStep('done');
|
|
169
|
+
onSelect(selectedProvider.id, model.id);
|
|
170
|
+
|
|
171
|
+
// Brief delay to show confirmation
|
|
172
|
+
setTimeout(() => onClose(), 1500);
|
|
99
173
|
} else if (key.leftArrow || input === 'b') {
|
|
100
|
-
|
|
174
|
+
setStep('provider');
|
|
175
|
+
setOllamaStatus(null); // Reset Ollama status
|
|
176
|
+
}
|
|
177
|
+
} else if (step === 'ollamaError') {
|
|
178
|
+
if (key.return || input === 'b') {
|
|
179
|
+
setStep('provider');
|
|
180
|
+
setOllamaStatus(null);
|
|
101
181
|
}
|
|
102
182
|
}
|
|
103
183
|
});
|
|
104
184
|
|
|
105
|
-
|
|
185
|
+
const handleApiKeySubmit = (value: string) => {
|
|
186
|
+
if (value.trim() && selectedProvider) {
|
|
187
|
+
setApiKey(selectedProvider.id as 'openrouter' | 'anthropic' | 'openai', value.trim());
|
|
188
|
+
setStep('model');
|
|
189
|
+
}
|
|
190
|
+
};
|
|
191
|
+
|
|
192
|
+
// Provider selection
|
|
193
|
+
if (step === 'provider') {
|
|
106
194
|
return (
|
|
107
|
-
<Box flexDirection="column" borderStyle="round" borderColor="cyan" padding={1}>
|
|
195
|
+
<Box flexDirection="column" borderStyle="round" borderColor="cyan" padding={1} width={60}>
|
|
108
196
|
<Box marginBottom={1}>
|
|
109
197
|
<Text bold color="cyan">Select Provider</Text>
|
|
110
198
|
</Box>
|
|
111
199
|
<Box marginBottom={1}>
|
|
112
|
-
<Text color="gray">
|
|
200
|
+
<Text color="gray" dimColor>Arrows to navigate, Enter to select</Text>
|
|
113
201
|
</Box>
|
|
114
202
|
|
|
115
203
|
{PROVIDERS.map((provider, index) => {
|
|
116
204
|
const isSelected = index === providerIndex;
|
|
117
205
|
const isCurrent = provider.id === config.provider;
|
|
206
|
+
const hasKey = provider.needsApiKey && provider.id !== 'ollama'
|
|
207
|
+
? !!config.apiKeys[provider.id as 'openrouter' | 'anthropic' | 'openai']
|
|
208
|
+
: true;
|
|
118
209
|
|
|
119
210
|
return (
|
|
120
|
-
<Box key={provider.id}
|
|
211
|
+
<Box key={provider.id} flexDirection="column">
|
|
121
212
|
<Text color={isSelected ? 'cyan' : 'white'}>
|
|
122
213
|
{isSelected ? '❯ ' : ' '}
|
|
123
214
|
{provider.name}
|
|
124
215
|
{isCurrent && <Text color="green"> (current)</Text>}
|
|
216
|
+
{provider.needsApiKey && !hasKey && <Text color="red"> (needs key)</Text>}
|
|
217
|
+
{provider.needsApiKey && hasKey && !isCurrent && <Text color="yellow"> (key saved)</Text>}
|
|
125
218
|
</Text>
|
|
126
219
|
{isSelected && (
|
|
127
|
-
<Text color="gray">
|
|
220
|
+
<Text color="gray"> {provider.description}</Text>
|
|
128
221
|
)}
|
|
129
222
|
</Box>
|
|
130
223
|
);
|
|
131
224
|
})}
|
|
132
225
|
|
|
133
|
-
<Box marginTop={1}
|
|
134
|
-
<Text color="gray">
|
|
135
|
-
Current: {config.provider} / {config.model}
|
|
136
|
-
</Text>
|
|
226
|
+
<Box marginTop={1}>
|
|
227
|
+
<Text color="gray" dimColor>Press Esc to cancel</Text>
|
|
137
228
|
</Box>
|
|
138
229
|
</Box>
|
|
139
230
|
);
|
|
140
231
|
}
|
|
141
232
|
|
|
142
|
-
//
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
<Box
|
|
146
|
-
<
|
|
233
|
+
// API Key input
|
|
234
|
+
if (step === 'apiKey' && selectedProvider) {
|
|
235
|
+
return (
|
|
236
|
+
<Box flexDirection="column" borderStyle="round" borderColor="cyan" padding={1} width={60}>
|
|
237
|
+
<Box marginBottom={1}>
|
|
238
|
+
<Text bold color="cyan">Enter API Key</Text>
|
|
239
|
+
</Box>
|
|
240
|
+
<Text><Text color="green">✓</Text> Provider: {selectedProvider.name}</Text>
|
|
241
|
+
<Box marginTop={1} flexDirection="column">
|
|
242
|
+
<Text color="gray" dimColor>
|
|
243
|
+
{selectedProvider.id === 'openrouter' && 'Get key: openrouter.ai/keys'}
|
|
244
|
+
{selectedProvider.id === 'anthropic' && 'Get key: console.anthropic.com'}
|
|
245
|
+
{selectedProvider.id === 'openai' && 'Get key: platform.openai.com/api-keys'}
|
|
246
|
+
</Text>
|
|
247
|
+
<Box marginTop={1}>
|
|
248
|
+
<Text color="cyan">❯ </Text>
|
|
249
|
+
<TextInput
|
|
250
|
+
value={apiKeyInput}
|
|
251
|
+
onChange={setApiKeyInput}
|
|
252
|
+
onSubmit={handleApiKeySubmit}
|
|
253
|
+
mask="*"
|
|
254
|
+
/>
|
|
255
|
+
</Box>
|
|
256
|
+
</Box>
|
|
257
|
+
<Box marginTop={1}>
|
|
258
|
+
<Text color="gray" dimColor>Press Esc to cancel</Text>
|
|
259
|
+
</Box>
|
|
147
260
|
</Box>
|
|
148
|
-
|
|
149
|
-
|
|
261
|
+
);
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// Ollama error
|
|
265
|
+
if (step === 'ollamaError' && ollamaStatus) {
|
|
266
|
+
return (
|
|
267
|
+
<Box flexDirection="column" borderStyle="round" borderColor="red" padding={1} width={60}>
|
|
268
|
+
<Box marginBottom={1}>
|
|
269
|
+
<Text bold color="red">Ollama Not Available</Text>
|
|
270
|
+
</Box>
|
|
271
|
+
<Text color="red">{ollamaStatus.error}</Text>
|
|
272
|
+
<Box marginTop={1} flexDirection="column">
|
|
273
|
+
{!ollamaStatus.installed && (
|
|
274
|
+
<>
|
|
275
|
+
<Text>1. Install Ollama from https://ollama.ai</Text>
|
|
276
|
+
<Text>2. Run: ollama pull qwen2.5:0.5b</Text>
|
|
277
|
+
<Text>3. Try again</Text>
|
|
278
|
+
</>
|
|
279
|
+
)}
|
|
280
|
+
{ollamaStatus.installed && !ollamaStatus.running && (
|
|
281
|
+
<>
|
|
282
|
+
<Text>1. Start Ollama: ollama serve</Text>
|
|
283
|
+
<Text>2. Or run any model: ollama run qwen2.5:0.5b</Text>
|
|
284
|
+
<Text>3. Try again</Text>
|
|
285
|
+
</>
|
|
286
|
+
)}
|
|
287
|
+
</Box>
|
|
288
|
+
<Box marginTop={1}>
|
|
289
|
+
<Text color="gray" dimColor>Press Enter or B to go back</Text>
|
|
290
|
+
</Box>
|
|
150
291
|
</Box>
|
|
292
|
+
);
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// Model selection
|
|
296
|
+
if (step === 'model' && selectedProvider) {
|
|
297
|
+
const isOllama = selectedProvider.id === 'ollama';
|
|
298
|
+
|
|
299
|
+
return (
|
|
300
|
+
<Box flexDirection="column" borderStyle="round" borderColor="cyan" padding={1} width={60}>
|
|
301
|
+
<Box marginBottom={1}>
|
|
302
|
+
<Text bold color="cyan">Select Model</Text>
|
|
303
|
+
</Box>
|
|
304
|
+
<Text><Text color="green">✓</Text> Provider: {selectedProvider.name}</Text>
|
|
305
|
+
|
|
306
|
+
{isOllama && checkingOllama && (
|
|
307
|
+
<Box marginY={1}>
|
|
308
|
+
<Text color="cyan"><Spinner type="dots" /></Text>
|
|
309
|
+
<Text> Checking Ollama status...</Text>
|
|
310
|
+
</Box>
|
|
311
|
+
)}
|
|
312
|
+
|
|
313
|
+
{isOllama && ollamaStatus && ollamaStatus.running && (
|
|
314
|
+
<Text color="green">✓ Ollama running ({ollamaStatus.models.length} models installed)</Text>
|
|
315
|
+
)}
|
|
316
|
+
|
|
317
|
+
<Box marginTop={1} marginBottom={1}>
|
|
318
|
+
<Text color="gray" dimColor>Arrows to navigate, Enter to select, B to go back</Text>
|
|
319
|
+
</Box>
|
|
320
|
+
|
|
321
|
+
{selectedProvider.models.map((model, index) => {
|
|
322
|
+
const isSelected = index === modelIndex;
|
|
323
|
+
const isCurrent = model.id === config.model && selectedProvider.id === config.provider;
|
|
151
324
|
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
325
|
+
// Check if Ollama model is available
|
|
326
|
+
let modelStatus = '';
|
|
327
|
+
if (isOllama && ollamaStatus) {
|
|
328
|
+
const available = hasModel(ollamaStatus, model.id);
|
|
329
|
+
modelStatus = available ? ' (installed)' : ' (not installed)';
|
|
330
|
+
}
|
|
156
331
|
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
<Text color={isSelected ? 'cyan' : 'white'}>
|
|
332
|
+
return (
|
|
333
|
+
<Text key={model.id} color={isSelected ? 'cyan' : 'white'}>
|
|
160
334
|
{isSelected ? '❯ ' : ' '}
|
|
161
|
-
{model}
|
|
335
|
+
{model.name}
|
|
336
|
+
{model.recommended && <Text color="yellow"> *</Text>}
|
|
162
337
|
{isCurrent && <Text color="green"> (current)</Text>}
|
|
163
|
-
{
|
|
338
|
+
{isOllama && ollamaStatus && (
|
|
339
|
+
hasModel(ollamaStatus, model.id)
|
|
340
|
+
? <Text color="green">{modelStatus}</Text>
|
|
341
|
+
: <Text color="red">{modelStatus}</Text>
|
|
342
|
+
)}
|
|
164
343
|
</Text>
|
|
344
|
+
);
|
|
345
|
+
})}
|
|
346
|
+
|
|
347
|
+
{isOllama && (
|
|
348
|
+
<Box marginTop={1} flexDirection="column">
|
|
349
|
+
<Text color="gray" dimColor>* = Recommended</Text>
|
|
350
|
+
{ollamaStatus && !hasModel(ollamaStatus, selectedProvider.models[modelIndex]?.id || '') && (
|
|
351
|
+
<Text color="yellow">Run: ollama pull {selectedProvider.models[modelIndex]?.id}</Text>
|
|
352
|
+
)}
|
|
165
353
|
</Box>
|
|
166
|
-
)
|
|
167
|
-
|
|
354
|
+
)}
|
|
355
|
+
|
|
356
|
+
<Box marginTop={1}>
|
|
357
|
+
<Text color="gray" dimColor>Press Esc to cancel</Text>
|
|
358
|
+
</Box>
|
|
359
|
+
</Box>
|
|
360
|
+
);
|
|
361
|
+
}
|
|
168
362
|
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
363
|
+
// Done
|
|
364
|
+
if (step === 'done' && selectedProvider) {
|
|
365
|
+
return (
|
|
366
|
+
<Box flexDirection="column" borderStyle="round" borderColor="green" padding={1} width={60}>
|
|
367
|
+
<Text color="green" bold>Configuration Updated!</Text>
|
|
368
|
+
<Text><Text color="green">✓</Text> Provider: {selectedProvider.name}</Text>
|
|
369
|
+
<Text><Text color="green">✓</Text> Model: {selectedProvider.models[modelIndex]?.name}</Text>
|
|
370
|
+
{selectedProvider.id === 'ollama' && ollamaStatus && !hasModel(ollamaStatus, selectedProvider.models[modelIndex]?.id || '') && (
|
|
371
|
+
<Text color="yellow">Remember to run: ollama pull {selectedProvider.models[modelIndex]?.id}</Text>
|
|
372
|
+
)}
|
|
173
373
|
</Box>
|
|
174
|
-
|
|
175
|
-
|
|
374
|
+
);
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
return null;
|
|
176
378
|
}
|
package/src/index.tsx
CHANGED
|
@@ -6,85 +6,94 @@ import { setApiKey, setProvider, setModel, getConfig } from './lib/config.js';
|
|
|
6
6
|
|
|
7
7
|
const args = process.argv.slice(2);
|
|
8
8
|
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
if (!provider || !key) {
|
|
19
|
-
console.log('Usage: cnapse auth <provider> <api-key>');
|
|
20
|
-
console.log('Providers: openrouter, anthropic, openai');
|
|
21
|
-
process.exit(1);
|
|
22
|
-
}
|
|
9
|
+
async function main() {
|
|
10
|
+
// Handle CLI commands
|
|
11
|
+
if (args.length > 0) {
|
|
12
|
+
const command = args[0];
|
|
13
|
+
|
|
14
|
+
switch (command) {
|
|
15
|
+
case 'auth': {
|
|
16
|
+
const provider = args[1] as 'openrouter' | 'anthropic' | 'openai';
|
|
17
|
+
const key = args[2];
|
|
23
18
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
19
|
+
if (!provider || !key) {
|
|
20
|
+
console.log('Usage: cnapse auth <provider> <api-key>');
|
|
21
|
+
console.log('Providers: openrouter, anthropic, openai');
|
|
22
|
+
process.exit(1);
|
|
23
|
+
}
|
|
29
24
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
25
|
+
if (!['openrouter', 'anthropic', 'openai'].includes(provider)) {
|
|
26
|
+
console.log(`Invalid provider: ${provider}`);
|
|
27
|
+
console.log('Valid providers: openrouter, anthropic, openai');
|
|
28
|
+
process.exit(1);
|
|
29
|
+
}
|
|
34
30
|
|
|
35
|
-
|
|
36
|
-
|
|
31
|
+
setApiKey(provider, key);
|
|
32
|
+
console.log(`✓ ${provider} API key saved`);
|
|
33
|
+
process.exit(0);
|
|
34
|
+
}
|
|
37
35
|
|
|
38
|
-
|
|
39
|
-
const
|
|
40
|
-
|
|
36
|
+
case 'config': {
|
|
37
|
+
const subcommand = args[1];
|
|
38
|
+
|
|
39
|
+
// Interactive config TUI if no subcommand
|
|
40
|
+
if (!subcommand) {
|
|
41
|
+
const { ConfigUI } = await import('./components/ConfigUI.js');
|
|
42
|
+
render(<ConfigUI />);
|
|
43
|
+
return; // Don't render App
|
|
44
|
+
}
|
|
41
45
|
|
|
42
|
-
if (
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
+
if (subcommand === 'set') {
|
|
47
|
+
const key = args[2];
|
|
48
|
+
const value = args[3];
|
|
49
|
+
|
|
50
|
+
if (key === 'provider') {
|
|
51
|
+
if (!['openrouter', 'ollama', 'anthropic', 'openai'].includes(value!)) {
|
|
52
|
+
console.log('Valid providers: openrouter, ollama, anthropic, openai');
|
|
53
|
+
process.exit(1);
|
|
54
|
+
}
|
|
55
|
+
setProvider(value as any);
|
|
56
|
+
console.log(`✓ Provider set to: ${value}`);
|
|
57
|
+
} else if (key === 'model') {
|
|
58
|
+
setModel(value!);
|
|
59
|
+
console.log(`✓ Model set to: ${value}`);
|
|
60
|
+
} else {
|
|
61
|
+
console.log('Usage: cnapse config set <provider|model> <value>');
|
|
46
62
|
}
|
|
47
|
-
|
|
48
|
-
console.log(`✓ Provider set to: ${value}`);
|
|
49
|
-
} else if (key === 'model') {
|
|
50
|
-
setModel(value!);
|
|
51
|
-
console.log(`✓ Model set to: ${value}`);
|
|
52
|
-
} else {
|
|
53
|
-
console.log('Usage: cnapse config set <provider|model> <value>');
|
|
63
|
+
process.exit(0);
|
|
54
64
|
}
|
|
55
|
-
process.exit(0);
|
|
56
|
-
}
|
|
57
65
|
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
66
|
+
if (subcommand === 'show') {
|
|
67
|
+
const config = getConfig();
|
|
68
|
+
console.log('\nC-napse Configuration:');
|
|
69
|
+
console.log(` Provider: ${config.provider}`);
|
|
70
|
+
console.log(` Model: ${config.model}`);
|
|
71
|
+
console.log(` Ollama Host: ${config.ollamaHost}`);
|
|
72
|
+
console.log(` API Keys configured:`);
|
|
73
|
+
console.log(` - OpenRouter: ${config.apiKeys.openrouter ? '✓' : '✗'}`);
|
|
74
|
+
console.log(` - Anthropic: ${config.apiKeys.anthropic ? '✓' : '✗'}`);
|
|
75
|
+
console.log(` - OpenAI: ${config.apiKeys.openai ? '✓' : '✗'}`);
|
|
76
|
+
console.log('');
|
|
77
|
+
process.exit(0);
|
|
78
|
+
}
|
|
71
79
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
80
|
+
console.log('Usage: cnapse config [show|set <key> <value>]');
|
|
81
|
+
process.exit(1);
|
|
82
|
+
}
|
|
75
83
|
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
84
|
+
case 'help':
|
|
85
|
+
case '--help':
|
|
86
|
+
case '-h': {
|
|
87
|
+
console.log(`
|
|
80
88
|
C-napse - Autonomous PC Intelligence
|
|
81
89
|
|
|
82
90
|
Usage:
|
|
83
91
|
cnapse Start interactive chat
|
|
84
92
|
cnapse init Interactive setup wizard
|
|
85
|
-
cnapse
|
|
86
|
-
cnapse config
|
|
93
|
+
cnapse config Interactive configuration
|
|
94
|
+
cnapse config show Show current configuration
|
|
87
95
|
cnapse config set <k> <v> Set config value
|
|
96
|
+
cnapse auth <provider> <key> Set API key
|
|
88
97
|
cnapse help Show this help
|
|
89
98
|
|
|
90
99
|
Providers:
|
|
@@ -95,36 +104,39 @@ Providers:
|
|
|
95
104
|
|
|
96
105
|
Quick Start:
|
|
97
106
|
cnapse init # Interactive setup
|
|
107
|
+
cnapse config # Change provider/model
|
|
98
108
|
|
|
99
109
|
Manual Setup:
|
|
100
110
|
cnapse auth openrouter sk-or-xxxxx
|
|
101
111
|
cnapse config set provider openrouter
|
|
102
112
|
cnapse config set model qwen/qwen-2.5-coder-32b-instruct
|
|
103
113
|
`);
|
|
104
|
-
|
|
105
|
-
|
|
114
|
+
process.exit(0);
|
|
115
|
+
}
|
|
106
116
|
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
117
|
+
case 'version':
|
|
118
|
+
case '--version':
|
|
119
|
+
case '-v': {
|
|
120
|
+
console.log('cnapse v0.5.0');
|
|
121
|
+
process.exit(0);
|
|
122
|
+
}
|
|
113
123
|
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
124
|
+
case 'init': {
|
|
125
|
+
// Interactive setup with Ink UI
|
|
126
|
+
const { Setup } = await import('./components/Setup.js');
|
|
127
|
+
render(<Setup />);
|
|
128
|
+
return; // Don't render App
|
|
129
|
+
}
|
|
120
130
|
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
131
|
+
default: {
|
|
132
|
+
// Unknown command - start app anyway
|
|
133
|
+
break;
|
|
134
|
+
}
|
|
125
135
|
}
|
|
126
136
|
}
|
|
137
|
+
|
|
138
|
+
// Start interactive TUI
|
|
139
|
+
render(<App />);
|
|
127
140
|
}
|
|
128
141
|
|
|
129
|
-
|
|
130
|
-
render(<App />);
|
|
142
|
+
main();
|