@projectservan8n/cnapse 0.5.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ConfigUI-V5TM6KKS.js +306 -0
- package/dist/index.js +392 -176
- package/package.json +1 -1
- package/src/components/ConfigUI.tsx +353 -0
- package/src/components/ProviderSelector.tsx +270 -68
- package/src/index.tsx +95 -83
- package/src/lib/ollama.ts +140 -0
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ollama utilities - Check status, list models, pull/run models
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { exec } from 'child_process';
|
|
6
|
+
import { promisify } from 'util';
|
|
7
|
+
|
|
8
|
+
const execAsync = promisify(exec);
|
|
9
|
+
|
|
10
|
+
export interface OllamaModel {
|
|
11
|
+
name: string;
|
|
12
|
+
size: string;
|
|
13
|
+
modified: string;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export interface OllamaStatus {
|
|
17
|
+
installed: boolean;
|
|
18
|
+
running: boolean;
|
|
19
|
+
models: OllamaModel[];
|
|
20
|
+
error?: string;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Check if Ollama is installed and running, list available models
|
|
25
|
+
*/
|
|
26
|
+
export async function checkOllamaStatus(): Promise<OllamaStatus> {
|
|
27
|
+
try {
|
|
28
|
+
// Try to list models - this checks both installation and if it's running
|
|
29
|
+
const { stdout } = await execAsync('ollama list', { timeout: 10000 });
|
|
30
|
+
|
|
31
|
+
// Parse the output
|
|
32
|
+
const lines = stdout.trim().split('\n');
|
|
33
|
+
const models: OllamaModel[] = [];
|
|
34
|
+
|
|
35
|
+
// Skip header line
|
|
36
|
+
for (let i = 1; i < lines.length; i++) {
|
|
37
|
+
const line = lines[i];
|
|
38
|
+
if (!line?.trim()) continue;
|
|
39
|
+
|
|
40
|
+
// Parse: NAME ID SIZE MODIFIED
|
|
41
|
+
const parts = line.split(/\s{2,}/);
|
|
42
|
+
if (parts.length >= 3) {
|
|
43
|
+
models.push({
|
|
44
|
+
name: parts[0]?.trim() || '',
|
|
45
|
+
size: parts[2]?.trim() || '',
|
|
46
|
+
modified: parts[3]?.trim() || '',
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
return {
|
|
52
|
+
installed: true,
|
|
53
|
+
running: true,
|
|
54
|
+
models,
|
|
55
|
+
};
|
|
56
|
+
} catch (err) {
|
|
57
|
+
const errorMsg = err instanceof Error ? err.message : 'Unknown error';
|
|
58
|
+
|
|
59
|
+
// Check if Ollama is installed but not running
|
|
60
|
+
if (errorMsg.includes('connect') || errorMsg.includes('refused')) {
|
|
61
|
+
return {
|
|
62
|
+
installed: true,
|
|
63
|
+
running: false,
|
|
64
|
+
models: [],
|
|
65
|
+
error: 'Ollama is not running. Start it with: ollama serve',
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// Ollama not installed
|
|
70
|
+
if (errorMsg.includes('not found') || errorMsg.includes('not recognized')) {
|
|
71
|
+
return {
|
|
72
|
+
installed: false,
|
|
73
|
+
running: false,
|
|
74
|
+
models: [],
|
|
75
|
+
error: 'Ollama not installed. Get it at: https://ollama.ai',
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return {
|
|
80
|
+
installed: false,
|
|
81
|
+
running: false,
|
|
82
|
+
models: [],
|
|
83
|
+
error: errorMsg,
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Check if a specific model is available
|
|
90
|
+
*/
|
|
91
|
+
export function hasModel(status: OllamaStatus, modelId: string): boolean {
|
|
92
|
+
const modelName = modelId.split(':')[0]?.toLowerCase() || '';
|
|
93
|
+
return status.models.some(m => m.name.toLowerCase().startsWith(modelName));
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Pull a model (download it)
|
|
98
|
+
*/
|
|
99
|
+
export async function pullModel(modelId: string, onProgress?: (msg: string) => void): Promise<boolean> {
|
|
100
|
+
try {
|
|
101
|
+
onProgress?.(`Downloading ${modelId}...`);
|
|
102
|
+
|
|
103
|
+
// Pull with a long timeout (models can be large)
|
|
104
|
+
await execAsync(`ollama pull ${modelId}`, { timeout: 600000 }); // 10 min
|
|
105
|
+
|
|
106
|
+
onProgress?.(`Downloaded ${modelId}`);
|
|
107
|
+
return true;
|
|
108
|
+
} catch (err) {
|
|
109
|
+
const errorMsg = err instanceof Error ? err.message : 'Unknown error';
|
|
110
|
+
onProgress?.(`Failed to download: ${errorMsg}`);
|
|
111
|
+
return false;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
/**
|
|
116
|
+
* Run a model to load it into memory
|
|
117
|
+
*/
|
|
118
|
+
export async function runModel(modelId: string): Promise<boolean> {
|
|
119
|
+
try {
|
|
120
|
+
// Send a simple prompt to load the model
|
|
121
|
+
await execAsync(`ollama run ${modelId} "Hi" --nowordwrap`, { timeout: 120000 });
|
|
122
|
+
return true;
|
|
123
|
+
} catch {
|
|
124
|
+
return false;
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Get model size in human-readable format
|
|
130
|
+
*/
|
|
131
|
+
export function getModelInfo(status: OllamaStatus, modelId: string): { available: boolean; size?: string } {
|
|
132
|
+
const modelName = modelId.split(':')[0]?.toLowerCase() || '';
|
|
133
|
+
const model = status.models.find(m => m.name.toLowerCase().startsWith(modelName));
|
|
134
|
+
|
|
135
|
+
if (model) {
|
|
136
|
+
return { available: true, size: model.size };
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
return { available: false };
|
|
140
|
+
}
|