scai 0.1.69 → 0.1.70
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/modelSetup.js +44 -12
- package/package.json +1 -1
package/dist/modelSetup.js
CHANGED
|
@@ -6,9 +6,11 @@ import chalk from 'chalk';
|
|
|
6
6
|
import { getDbForRepo } from './db/client.js';
|
|
7
7
|
import { readConfig, writeConfig } from './config.js';
|
|
8
8
|
import { CONFIG_PATH } from './constants.js';
|
|
9
|
+
import { platform } from 'os';
|
|
9
10
|
// Constants
|
|
10
11
|
const MODEL_PORT = 11434;
|
|
11
12
|
const REQUIRED_MODELS = ['llama3', 'mistral'];
|
|
13
|
+
const OLLAMA_URL = 'https://ollama.com/download';
|
|
12
14
|
const isYesMode = process.argv.includes('--yes') || process.env.SCAI_YES === '1';
|
|
13
15
|
// 🧠 Auto init config/db if missing
|
|
14
16
|
export async function autoInitIfNeeded() {
|
|
@@ -26,6 +28,16 @@ export async function autoInitIfNeeded() {
|
|
|
26
28
|
}
|
|
27
29
|
}
|
|
28
30
|
}
|
|
31
|
+
// 🗨 Prompt user
|
|
32
|
+
function promptUser(question) {
|
|
33
|
+
if (isYesMode)
|
|
34
|
+
return Promise.resolve('y');
|
|
35
|
+
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
|
|
36
|
+
return new Promise((resolve) => rl.question(question, (a) => {
|
|
37
|
+
rl.close();
|
|
38
|
+
resolve(a.trim());
|
|
39
|
+
}));
|
|
40
|
+
}
|
|
29
41
|
// 🚀 Ensure Ollama server is running
|
|
30
42
|
async function ensureOllamaRunning() {
|
|
31
43
|
try {
|
|
@@ -35,7 +47,7 @@ async function ensureOllamaRunning() {
|
|
|
35
47
|
return;
|
|
36
48
|
}
|
|
37
49
|
}
|
|
38
|
-
catch
|
|
50
|
+
catch {
|
|
39
51
|
console.log(chalk.yellow('⚙️ Challenge:') + ' Ollama is not running. Attempting to start it...');
|
|
40
52
|
}
|
|
41
53
|
console.log(chalk.yellow('⚙️ Challenge:') + ` Ollama does not appear to be running on port ${MODEL_PORT}.\n` +
|
|
@@ -46,15 +58,45 @@ async function ensureOllamaRunning() {
|
|
|
46
58
|
stdio: 'ignore',
|
|
47
59
|
windowsHide: true,
|
|
48
60
|
});
|
|
61
|
+
child.on('error', async (err) => {
|
|
62
|
+
if (err.code === 'ENOENT') {
|
|
63
|
+
console.log(chalk.red('❌ Ollama is not installed or not in PATH.'));
|
|
64
|
+
console.log(chalk.yellow(`📦 Ollama is required to run local AI models.\n`));
|
|
65
|
+
console.log(chalk.yellow(`🔗 Download it here: ${OLLAMA_URL}`));
|
|
66
|
+
const answer = await promptUser('🌐 Would you like to open the download page in your browser? (y/N): ');
|
|
67
|
+
if (answer.toLowerCase() === 'y') {
|
|
68
|
+
openBrowser(OLLAMA_URL);
|
|
69
|
+
}
|
|
70
|
+
process.exit(1);
|
|
71
|
+
}
|
|
72
|
+
else {
|
|
73
|
+
console.error(chalk.red('❌ Failed to start Ollama process:'), err);
|
|
74
|
+
process.exit(1);
|
|
75
|
+
}
|
|
76
|
+
});
|
|
49
77
|
child.unref();
|
|
50
78
|
await new Promise((res) => setTimeout(res, 3000));
|
|
51
79
|
console.log(chalk.green('✅ Result:') + ' Ollama started successfully.');
|
|
52
80
|
}
|
|
53
81
|
catch (err) {
|
|
54
|
-
console.error(chalk.red('❌
|
|
82
|
+
console.error(chalk.red('❌ Unexpected error while trying to start Ollama:'), err);
|
|
55
83
|
process.exit(1);
|
|
56
84
|
}
|
|
57
85
|
}
|
|
86
|
+
// 🧭 Cross-platform browser opener
|
|
87
|
+
function openBrowser(url) {
|
|
88
|
+
const command = platform() === 'win32'
|
|
89
|
+
? `start ${url}`
|
|
90
|
+
: platform() === 'darwin'
|
|
91
|
+
? `open ${url}`
|
|
92
|
+
: `xdg-open ${url}`;
|
|
93
|
+
try {
|
|
94
|
+
execSync(command, { stdio: 'ignore' });
|
|
95
|
+
}
|
|
96
|
+
catch (err) {
|
|
97
|
+
console.error(chalk.red('❌ Could not open browser. Please visit:'), url);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
58
100
|
// 🧰 List installed models
|
|
59
101
|
async function getInstalledModels() {
|
|
60
102
|
try {
|
|
@@ -69,16 +111,6 @@ async function getInstalledModels() {
|
|
|
69
111
|
return [];
|
|
70
112
|
}
|
|
71
113
|
}
|
|
72
|
-
// 💬 Prompt user
|
|
73
|
-
function promptUser(question) {
|
|
74
|
-
if (isYesMode)
|
|
75
|
-
return Promise.resolve('y');
|
|
76
|
-
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
|
|
77
|
-
return new Promise((resolve) => rl.question(question, (a) => {
|
|
78
|
-
rl.close();
|
|
79
|
-
resolve(a.trim());
|
|
80
|
-
}));
|
|
81
|
-
}
|
|
82
114
|
// 📥 Download missing models
|
|
83
115
|
async function ensureModelsDownloaded() {
|
|
84
116
|
const installed = await getInstalledModels();
|