@projectservan8n/cnapse 0.5.0 → 0.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,69 +1,122 @@
1
- import React, { useState } from 'react';
1
+ /**
2
+ * Provider Selector - Used in TUI for /provider command
3
+ * - Shows provider list with API key status
4
+ * - Prompts for API key if needed
5
+ * - Shows model list with recommendations
6
+ * - For Ollama: shows model availability status
7
+ */
8
+
9
+ import React, { useState, useEffect } from 'react';
2
10
  import { Box, Text, useInput } from 'ink';
3
- import { getConfig, setProvider, setModel } from '../lib/config.js';
11
+ import TextInput from 'ink-text-input';
12
+ import Spinner from 'ink-spinner';
13
+ import { getConfig, setProvider, setModel, setApiKey } from '../lib/config.js';
14
+ import { checkOllamaStatus, hasModel, OllamaStatus } from '../lib/ollama.js';
4
15
 
5
16
  interface ProviderSelectorProps {
6
17
  onClose: () => void;
7
18
  onSelect: (provider: string, model: string) => void;
8
19
  }
9
20
 
10
- interface ProviderOption {
21
+ interface ModelConfig {
22
+ id: string;
23
+ name: string;
24
+ recommended?: boolean;
25
+ }
26
+
27
+ interface ProviderConfig {
11
28
  id: 'ollama' | 'openrouter' | 'anthropic' | 'openai';
12
29
  name: string;
13
30
  description: string;
14
- defaultModel: string;
15
- models: string[];
31
+ needsApiKey: boolean;
32
+ models: ModelConfig[];
16
33
  }
17
34
 
18
- const PROVIDERS: ProviderOption[] = [
35
+ const PROVIDERS: ProviderConfig[] = [
19
36
  {
20
37
  id: 'ollama',
21
38
  name: 'Ollama',
22
- description: 'Local AI - Free, private, no API key',
23
- defaultModel: 'qwen2.5:0.5b',
24
- models: ['qwen2.5:0.5b', 'qwen2.5:1.5b', 'qwen2.5:7b', 'llama3.2:1b', 'llama3.2:3b', 'mistral:7b', 'codellama:7b', 'llava:7b'],
39
+ description: 'Local AI - Free, private',
40
+ needsApiKey: false,
41
+ models: [
42
+ { id: 'qwen2.5:0.5b', name: 'Qwen 2.5 0.5B (fast)', recommended: true },
43
+ { id: 'qwen2.5:1.5b', name: 'Qwen 2.5 1.5B' },
44
+ { id: 'qwen2.5:7b', name: 'Qwen 2.5 7B (quality)' },
45
+ { id: 'llama3.2:1b', name: 'Llama 3.2 1B' },
46
+ { id: 'llama3.2:3b', name: 'Llama 3.2 3B' },
47
+ { id: 'codellama:7b', name: 'Code Llama 7B' },
48
+ { id: 'llava:7b', name: 'LLaVA 7B (vision)' },
49
+ ],
25
50
  },
26
51
  {
27
52
  id: 'openrouter',
28
53
  name: 'OpenRouter',
29
- description: 'Many models, pay-per-use',
30
- defaultModel: 'qwen/qwen-2.5-coder-32b-instruct',
54
+ description: 'Many models, budget-friendly',
55
+ needsApiKey: true,
31
56
  models: [
32
- 'qwen/qwen-2.5-coder-32b-instruct',
33
- 'anthropic/claude-3.5-sonnet',
34
- 'openai/gpt-4o',
35
- 'openai/gpt-4o-mini',
36
- 'google/gemini-pro-1.5',
37
- 'meta-llama/llama-3.1-70b-instruct',
57
+ { id: 'qwen/qwen-2.5-coder-32b-instruct', name: 'Qwen Coder 32B ($0.07/1M)', recommended: true },
58
+ { id: 'qwen/qwen-2-vl-7b-instruct', name: 'Qwen 2 VL (FREE vision)' },
59
+ { id: 'meta-llama/llama-3.3-70b-instruct', name: 'Llama 3.3 70B ($0.10/1M)' },
60
+ { id: 'google/gemini-2.0-flash-001', name: 'Gemini 2.0 Flash (free tier)' },
61
+ { id: 'deepseek/deepseek-chat', name: 'DeepSeek V3 ($0.14/1M)' },
38
62
  ],
39
63
  },
40
64
  {
41
65
  id: 'anthropic',
42
66
  name: 'Anthropic',
43
- description: 'Claude models - Best for coding',
44
- defaultModel: 'claude-3-5-sonnet-20241022',
45
- models: ['claude-3-5-sonnet-20241022', 'claude-3-opus-20240229', 'claude-3-haiku-20240307'],
67
+ description: 'Claude - Best reasoning',
68
+ needsApiKey: true,
69
+ models: [
70
+ { id: 'claude-3-5-sonnet-20241022', name: 'Claude 3.5 Sonnet', recommended: true },
71
+ { id: 'claude-3-opus-20240229', name: 'Claude 3 Opus' },
72
+ { id: 'claude-3-haiku-20240307', name: 'Claude 3 Haiku' },
73
+ ],
46
74
  },
47
75
  {
48
76
  id: 'openai',
49
77
  name: 'OpenAI',
50
78
  description: 'GPT models',
51
- defaultModel: 'gpt-4o',
52
- models: ['gpt-4o', 'gpt-4o-mini', 'gpt-4-turbo', 'gpt-3.5-turbo'],
79
+ needsApiKey: true,
80
+ models: [
81
+ { id: 'gpt-4o', name: 'GPT-4o', recommended: true },
82
+ { id: 'gpt-4o-mini', name: 'GPT-4o Mini' },
83
+ { id: 'gpt-4-turbo', name: 'GPT-4 Turbo' },
84
+ ],
53
85
  },
54
86
  ];
55
87
 
56
- type SelectionMode = 'provider' | 'model';
88
+ type Step = 'provider' | 'apiKey' | 'model' | 'ollamaError' | 'done';
57
89
 
58
90
  export function ProviderSelector({ onClose, onSelect }: ProviderSelectorProps) {
59
91
  const config = getConfig();
60
- const [mode, setMode] = useState<SelectionMode>('provider');
92
+ const [step, setStep] = useState<Step>('provider');
61
93
  const [providerIndex, setProviderIndex] = useState(() => {
62
94
  const idx = PROVIDERS.findIndex(p => p.id === config.provider);
63
95
  return idx >= 0 ? idx : 0;
64
96
  });
65
97
  const [modelIndex, setModelIndex] = useState(0);
66
- const [selectedProvider, setSelectedProvider] = useState<ProviderOption | null>(null);
98
+ const [apiKeyInput, setApiKeyInput] = useState('');
99
+ const [selectedProvider, setSelectedProvider] = useState<ProviderConfig | null>(null);
100
+
101
+ // Ollama status
102
+ const [ollamaStatus, setOllamaStatus] = useState<OllamaStatus | null>(null);
103
+ const [checkingOllama, setCheckingOllama] = useState(false);
104
+
105
+ // Check Ollama status when selecting Ollama provider
106
+ useEffect(() => {
107
+ if (step === 'model' && selectedProvider?.id === 'ollama' && !ollamaStatus) {
108
+ setCheckingOllama(true);
109
+ checkOllamaStatus().then(status => {
110
+ setOllamaStatus(status);
111
+ setCheckingOllama(false);
112
+
113
+ // If Ollama isn't running, show error
114
+ if (!status.running) {
115
+ setStep('ollamaError');
116
+ }
117
+ });
118
+ }
119
+ }, [step, selectedProvider, ollamaStatus]);
67
120
 
68
121
  useInput((input, key) => {
69
122
  if (key.escape) {
@@ -71,7 +124,7 @@ export function ProviderSelector({ onClose, onSelect }: ProviderSelectorProps) {
71
124
  return;
72
125
  }
73
126
 
74
- if (mode === 'provider') {
127
+ if (step === 'provider') {
75
128
  if (key.upArrow) {
76
129
  setProviderIndex(prev => (prev > 0 ? prev - 1 : PROVIDERS.length - 1));
77
130
  } else if (key.downArrow) {
@@ -79,98 +132,247 @@ export function ProviderSelector({ onClose, onSelect }: ProviderSelectorProps) {
79
132
  } else if (key.return) {
80
133
  const provider = PROVIDERS[providerIndex]!;
81
134
  setSelectedProvider(provider);
82
- // Find current model index if it exists in the provider's models
83
- const currentModelIdx = provider.models.findIndex(m => m === config.model);
84
- setModelIndex(currentModelIdx >= 0 ? currentModelIdx : 0);
85
- setMode('model');
135
+
136
+ // Find recommended or current model
137
+ const currentIdx = provider.models.findIndex(m => m.id === config.model);
138
+ const recommendedIdx = provider.models.findIndex(m => m.recommended);
139
+ setModelIndex(currentIdx >= 0 ? currentIdx : (recommendedIdx >= 0 ? recommendedIdx : 0));
140
+
141
+ // Check if we need API key
142
+ if (provider.needsApiKey) {
143
+ const apiKeyProvider = provider.id as 'openrouter' | 'anthropic' | 'openai';
144
+ if (!config.apiKeys[apiKeyProvider]) {
145
+ setStep('apiKey');
146
+ } else {
147
+ setStep('model');
148
+ }
149
+ } else {
150
+ setStep('model');
151
+ }
86
152
  }
87
- } else if (mode === 'model' && selectedProvider) {
153
+ } else if (step === 'model' && selectedProvider) {
88
154
  if (key.upArrow) {
89
155
  setModelIndex(prev => (prev > 0 ? prev - 1 : selectedProvider.models.length - 1));
90
156
  } else if (key.downArrow) {
91
157
  setModelIndex(prev => (prev < selectedProvider.models.length - 1 ? prev + 1 : 0));
92
158
  } else if (key.return) {
93
159
  const model = selectedProvider.models[modelIndex]!;
94
- // Save to config
160
+
161
+ // For Ollama, warn if model not available
162
+ if (selectedProvider.id === 'ollama' && ollamaStatus && !hasModel(ollamaStatus, model.id)) {
163
+ // Still allow selection, but they'll need to pull it
164
+ }
165
+
95
166
  setProvider(selectedProvider.id);
96
- setModel(model);
97
- onSelect(selectedProvider.id, model);
98
- onClose();
167
+ setModel(model.id);
168
+ setStep('done');
169
+ onSelect(selectedProvider.id, model.id);
170
+
171
+ // Brief delay to show confirmation
172
+ setTimeout(() => onClose(), 1500);
99
173
  } else if (key.leftArrow || input === 'b') {
100
- setMode('provider');
174
+ setStep('provider');
175
+ setOllamaStatus(null); // Reset Ollama status
176
+ }
177
+ } else if (step === 'ollamaError') {
178
+ if (key.return || input === 'b') {
179
+ setStep('provider');
180
+ setOllamaStatus(null);
101
181
  }
102
182
  }
103
183
  });
104
184
 
105
- if (mode === 'provider') {
185
+ const handleApiKeySubmit = (value: string) => {
186
+ if (value.trim() && selectedProvider) {
187
+ setApiKey(selectedProvider.id as 'openrouter' | 'anthropic' | 'openai', value.trim());
188
+ setStep('model');
189
+ }
190
+ };
191
+
192
+ // Provider selection
193
+ if (step === 'provider') {
106
194
  return (
107
- <Box flexDirection="column" borderStyle="round" borderColor="cyan" padding={1}>
195
+ <Box flexDirection="column" borderStyle="round" borderColor="cyan" padding={1} width={60}>
108
196
  <Box marginBottom={1}>
109
197
  <Text bold color="cyan">Select Provider</Text>
110
198
  </Box>
111
199
  <Box marginBottom={1}>
112
- <Text color="gray">Use arrows to navigate, Enter to select, Esc to cancel</Text>
200
+ <Text color="gray" dimColor>Arrows to navigate, Enter to select</Text>
113
201
  </Box>
114
202
 
115
203
  {PROVIDERS.map((provider, index) => {
116
204
  const isSelected = index === providerIndex;
117
205
  const isCurrent = provider.id === config.provider;
206
+ const hasKey = provider.needsApiKey && provider.id !== 'ollama'
207
+ ? !!config.apiKeys[provider.id as 'openrouter' | 'anthropic' | 'openai']
208
+ : true;
118
209
 
119
210
  return (
120
- <Box key={provider.id} marginY={0}>
211
+ <Box key={provider.id} flexDirection="column">
121
212
  <Text color={isSelected ? 'cyan' : 'white'}>
122
213
  {isSelected ? '❯ ' : ' '}
123
214
  {provider.name}
124
215
  {isCurrent && <Text color="green"> (current)</Text>}
216
+ {provider.needsApiKey && !hasKey && <Text color="red"> (needs key)</Text>}
217
+ {provider.needsApiKey && hasKey && !isCurrent && <Text color="yellow"> (key saved)</Text>}
125
218
  </Text>
126
219
  {isSelected && (
127
- <Text color="gray"> - {provider.description}</Text>
220
+ <Text color="gray"> {provider.description}</Text>
128
221
  )}
129
222
  </Box>
130
223
  );
131
224
  })}
132
225
 
133
- <Box marginTop={1} borderStyle="single" borderColor="gray" paddingX={1}>
134
- <Text color="gray">
135
- Current: {config.provider} / {config.model}
136
- </Text>
226
+ <Box marginTop={1}>
227
+ <Text color="gray" dimColor>Press Esc to cancel</Text>
137
228
  </Box>
138
229
  </Box>
139
230
  );
140
231
  }
141
232
 
142
- // Model selection mode
143
- return (
144
- <Box flexDirection="column" borderStyle="round" borderColor="cyan" padding={1}>
145
- <Box marginBottom={1}>
146
- <Text bold color="cyan">Select Model for {selectedProvider?.name}</Text>
233
+ // API Key input
234
+ if (step === 'apiKey' && selectedProvider) {
235
+ return (
236
+ <Box flexDirection="column" borderStyle="round" borderColor="cyan" padding={1} width={60}>
237
+ <Box marginBottom={1}>
238
+ <Text bold color="cyan">Enter API Key</Text>
239
+ </Box>
240
+ <Text><Text color="green">✓</Text> Provider: {selectedProvider.name}</Text>
241
+ <Box marginTop={1} flexDirection="column">
242
+ <Text color="gray" dimColor>
243
+ {selectedProvider.id === 'openrouter' && 'Get key: openrouter.ai/keys'}
244
+ {selectedProvider.id === 'anthropic' && 'Get key: console.anthropic.com'}
245
+ {selectedProvider.id === 'openai' && 'Get key: platform.openai.com/api-keys'}
246
+ </Text>
247
+ <Box marginTop={1}>
248
+ <Text color="cyan">❯ </Text>
249
+ <TextInput
250
+ value={apiKeyInput}
251
+ onChange={setApiKeyInput}
252
+ onSubmit={handleApiKeySubmit}
253
+ mask="*"
254
+ />
255
+ </Box>
256
+ </Box>
257
+ <Box marginTop={1}>
258
+ <Text color="gray" dimColor>Press Esc to cancel</Text>
259
+ </Box>
147
260
  </Box>
148
- <Box marginBottom={1}>
149
- <Text color="gray">Arrows to navigate, Enter to select, B/Left to go back</Text>
261
+ );
262
+ }
263
+
264
+ // Ollama error
265
+ if (step === 'ollamaError' && ollamaStatus) {
266
+ return (
267
+ <Box flexDirection="column" borderStyle="round" borderColor="red" padding={1} width={60}>
268
+ <Box marginBottom={1}>
269
+ <Text bold color="red">Ollama Not Available</Text>
270
+ </Box>
271
+ <Text color="red">{ollamaStatus.error}</Text>
272
+ <Box marginTop={1} flexDirection="column">
273
+ {!ollamaStatus.installed && (
274
+ <>
275
+ <Text>1. Install Ollama from https://ollama.ai</Text>
276
+ <Text>2. Run: ollama pull qwen2.5:0.5b</Text>
277
+ <Text>3. Try again</Text>
278
+ </>
279
+ )}
280
+ {ollamaStatus.installed && !ollamaStatus.running && (
281
+ <>
282
+ <Text>1. Start Ollama: ollama serve</Text>
283
+ <Text>2. Or run any model: ollama run qwen2.5:0.5b</Text>
284
+ <Text>3. Try again</Text>
285
+ </>
286
+ )}
287
+ </Box>
288
+ <Box marginTop={1}>
289
+ <Text color="gray" dimColor>Press Enter or B to go back</Text>
290
+ </Box>
150
291
  </Box>
292
+ );
293
+ }
294
+
295
+ // Model selection
296
+ if (step === 'model' && selectedProvider) {
297
+ const isOllama = selectedProvider.id === 'ollama';
298
+
299
+ return (
300
+ <Box flexDirection="column" borderStyle="round" borderColor="cyan" padding={1} width={60}>
301
+ <Box marginBottom={1}>
302
+ <Text bold color="cyan">Select Model</Text>
303
+ </Box>
304
+ <Text><Text color="green">✓</Text> Provider: {selectedProvider.name}</Text>
305
+
306
+ {isOllama && checkingOllama && (
307
+ <Box marginY={1}>
308
+ <Text color="cyan"><Spinner type="dots" /></Text>
309
+ <Text> Checking Ollama status...</Text>
310
+ </Box>
311
+ )}
312
+
313
+ {isOllama && ollamaStatus && ollamaStatus.running && (
314
+ <Text color="green">✓ Ollama running ({ollamaStatus.models.length} models installed)</Text>
315
+ )}
316
+
317
+ <Box marginTop={1} marginBottom={1}>
318
+ <Text color="gray" dimColor>Arrows to navigate, Enter to select, B to go back</Text>
319
+ </Box>
320
+
321
+ {selectedProvider.models.map((model, index) => {
322
+ const isSelected = index === modelIndex;
323
+ const isCurrent = model.id === config.model && selectedProvider.id === config.provider;
151
324
 
152
- {selectedProvider?.models.map((model, index) => {
153
- const isSelected = index === modelIndex;
154
- const isCurrent = model === config.model && selectedProvider.id === config.provider;
155
- const isDefault = model === selectedProvider.defaultModel;
325
+ // Check if Ollama model is available
326
+ let modelStatus = '';
327
+ if (isOllama && ollamaStatus) {
328
+ const available = hasModel(ollamaStatus, model.id);
329
+ modelStatus = available ? ' (installed)' : ' (not installed)';
330
+ }
156
331
 
157
- return (
158
- <Box key={model} marginY={0}>
159
- <Text color={isSelected ? 'cyan' : 'white'}>
332
+ return (
333
+ <Text key={model.id} color={isSelected ? 'cyan' : 'white'}>
160
334
  {isSelected ? '❯ ' : ' '}
161
- {model}
335
+ {model.name}
336
+ {model.recommended && <Text color="yellow"> *</Text>}
162
337
  {isCurrent && <Text color="green"> (current)</Text>}
163
- {isDefault && !isCurrent && <Text color="yellow"> (default)</Text>}
338
+ {isOllama && ollamaStatus && (
339
+ hasModel(ollamaStatus, model.id)
340
+ ? <Text color="green">{modelStatus}</Text>
341
+ : <Text color="red">{modelStatus}</Text>
342
+ )}
164
343
  </Text>
344
+ );
345
+ })}
346
+
347
+ {isOllama && (
348
+ <Box marginTop={1} flexDirection="column">
349
+ <Text color="gray" dimColor>* = Recommended</Text>
350
+ {ollamaStatus && !hasModel(ollamaStatus, selectedProvider.models[modelIndex]?.id || '') && (
351
+ <Text color="yellow">Run: ollama pull {selectedProvider.models[modelIndex]?.id}</Text>
352
+ )}
165
353
  </Box>
166
- );
167
- })}
354
+ )}
355
+
356
+ <Box marginTop={1}>
357
+ <Text color="gray" dimColor>Press Esc to cancel</Text>
358
+ </Box>
359
+ </Box>
360
+ );
361
+ }
168
362
 
169
- <Box marginTop={1} borderStyle="single" borderColor="gray" paddingX={1}>
170
- <Text color="gray">
171
- Provider: {selectedProvider?.name}
172
- </Text>
363
+ // Done
364
+ if (step === 'done' && selectedProvider) {
365
+ return (
366
+ <Box flexDirection="column" borderStyle="round" borderColor="green" padding={1} width={60}>
367
+ <Text color="green" bold>Configuration Updated!</Text>
368
+ <Text><Text color="green">✓</Text> Provider: {selectedProvider.name}</Text>
369
+ <Text><Text color="green">✓</Text> Model: {selectedProvider.models[modelIndex]?.name}</Text>
370
+ {selectedProvider.id === 'ollama' && ollamaStatus && !hasModel(ollamaStatus, selectedProvider.models[modelIndex]?.id || '') && (
371
+ <Text color="yellow">Remember to run: ollama pull {selectedProvider.models[modelIndex]?.id}</Text>
372
+ )}
173
373
  </Box>
174
- </Box>
175
- );
374
+ );
375
+ }
376
+
377
+ return null;
176
378
  }
@@ -15,17 +15,16 @@ const PROVIDERS = [
15
15
  const DEFAULT_MODELS: Record<string, string[]> = {
16
16
  ollama: ['qwen2.5:0.5b', 'qwen2.5:7b', 'llama3.2:3b', 'codellama:7b'],
17
17
  openrouter: [
18
- 'qwen/qwen-2.5-coder-32b-instruct',
19
- 'anthropic/claude-3.5-sonnet',
20
- 'openai/gpt-4o',
21
- 'google/gemini-pro-1.5',
18
+ 'qwen/qwen-2.5-coder-32b-instruct', // $0.07/1M - Best value!
19
+ 'qwen/qwen-2-vl-7b-instruct', // FREE - Vision model
20
+ 'meta-llama/llama-3.3-70b-instruct', // $0.10/1M
21
+ 'google/gemini-2.0-flash-001', // Free tier
22
22
  ],
23
23
  anthropic: [
24
24
  'claude-3-5-sonnet-20241022',
25
- 'claude-3-opus-20240229',
26
25
  'claude-3-haiku-20240307',
27
26
  ],
28
- openai: ['gpt-4o', 'gpt-4o-mini', 'gpt-4-turbo', 'gpt-3.5-turbo'],
27
+ openai: ['gpt-4o-mini', 'gpt-3.5-turbo'],
29
28
  };
30
29
 
31
30
  export function Setup() {