scai 0.1.69 → 0.1.71

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/modelSetup.js +71 -27
  2. package/package.json +1 -1
@@ -3,13 +3,17 @@ import * as readline from 'readline';
3
3
  import * as fs from 'fs';
4
4
  import * as path from 'path';
5
5
  import chalk from 'chalk';
6
+ import { platform } from 'os';
6
7
  import { getDbForRepo } from './db/client.js';
7
8
  import { readConfig, writeConfig } from './config.js';
8
9
  import { CONFIG_PATH } from './constants.js';
9
10
  // Constants
10
11
  const MODEL_PORT = 11434;
11
12
  const REQUIRED_MODELS = ['llama3', 'mistral'];
13
+ const OLLAMA_URL = 'https://ollama.com/download';
12
14
  const isYesMode = process.argv.includes('--yes') || process.env.SCAI_YES === '1';
15
+ let ollamaChecked = false;
16
+ let ollamaAvailable = false;
13
17
  // 🧠 Auto init config/db if missing
14
18
  export async function autoInitIfNeeded() {
15
19
  const cfg = readConfig();
@@ -26,32 +30,81 @@ export async function autoInitIfNeeded() {
26
30
  }
27
31
  }
28
32
  }
29
- // 🚀 Ensure Ollama server is running
30
- async function ensureOllamaRunning() {
33
+ // 🗨 Prompt user
34
+ function promptUser(question) {
35
+ if (isYesMode)
36
+ return Promise.resolve('y');
37
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
38
+ return new Promise((resolve) => rl.question(question, (a) => {
39
+ rl.close();
40
+ resolve(a.trim());
41
+ }));
42
+ }
43
+ // 🧭 Cross-platform browser opener
44
+ function openBrowser(url) {
45
+ const command = platform() === 'win32'
46
+ ? `start ${url}`
47
+ : platform() === 'darwin'
48
+ ? `open ${url}`
49
+ : `xdg-open ${url}`;
50
+ try {
51
+ execSync(command, { stdio: 'ignore' });
52
+ }
53
+ catch {
54
+ console.log(chalk.yellow('🔗 Please manually open:'), url);
55
+ }
56
+ }
57
+ // 🌐 Check if Ollama is running
58
+ async function isOllamaRunning() {
31
59
  try {
32
60
  const res = await fetch(`http://localhost:${MODEL_PORT}`);
33
- if (res.ok) {
34
- console.log(chalk.green('✅ Result:') + ` Ollama is already running on port ${MODEL_PORT}.`);
35
- return;
36
- }
61
+ return res.ok;
37
62
  }
38
- catch (err) {
39
- console.log(chalk.yellow('⚙️ Challenge:') + ' Ollama is not running. Attempting to start it...');
63
+ catch {
64
+ return false;
65
+ }
66
+ }
67
+ // 🚀 Ensure Ollama server is running
68
+ async function ensureOllamaRunning() {
69
+ if (ollamaChecked)
70
+ return;
71
+ ollamaChecked = true;
72
+ if (await isOllamaRunning()) {
73
+ console.log(chalk.green('✅ Ollama is already running.'));
74
+ ollamaAvailable = true;
75
+ return;
40
76
  }
41
- console.log(chalk.yellow('⚙️ Challenge:') + ` Ollama does not appear to be running on port ${MODEL_PORT}.\n` +
42
- chalk.yellow('🚀 Action:') + ' Attempting to start Ollama in the background...');
77
+ console.log(chalk.yellow('⚙️ Ollama is not running. Attempting to start it...'));
43
78
  try {
44
79
  const child = spawn('ollama', ['serve'], {
45
80
  detached: true,
46
81
  stdio: 'ignore',
47
82
  windowsHide: true,
48
83
  });
84
+ child.on('error', async (err) => {
85
+ if (err.code === 'ENOENT') {
86
+ console.log(chalk.red('❌ Ollama is not installed or not in PATH.'));
87
+ console.log(chalk.yellow(`📦 Ollama is required to run local AI models.`));
88
+ const answer = await promptUser('🌐 Would you like to open the download page in your browser? (y/N): ');
89
+ if (answer.toLowerCase() === 'y') {
90
+ openBrowser(OLLAMA_URL);
91
+ }
92
+ process.exit(1);
93
+ }
94
+ });
49
95
  child.unref();
50
96
  await new Promise((res) => setTimeout(res, 3000));
51
- console.log(chalk.green('✅ Result:') + ' Ollama started successfully.');
97
+ if (await isOllamaRunning()) {
98
+ console.log(chalk.green('✅ Ollama started successfully.'));
99
+ ollamaAvailable = true;
100
+ }
101
+ else {
102
+ console.log(chalk.red('❌ Ollama did not start within timeout.'));
103
+ process.exit(1);
104
+ }
52
105
  }
53
- catch (err) {
54
- console.error(chalk.red('❌ Failed:') + ' Could not start Ollama process.', err);
106
+ catch {
107
+ console.log(chalk.red('❌ Unexpected error starting Ollama.'));
55
108
  process.exit(1);
56
109
  }
57
110
  }
@@ -64,23 +117,14 @@ async function getInstalledModels() {
64
117
  .map((line) => line.split(/\s+/)[0].split(':')[0])
65
118
  .filter((model) => REQUIRED_MODELS.includes(model));
66
119
  }
67
- catch (err) {
68
- console.error(chalk.red('❌ Could not fetch installed models:'), err);
120
+ catch {
69
121
  return [];
70
122
  }
71
123
  }
72
- // 💬 Prompt user
73
- function promptUser(question) {
74
- if (isYesMode)
75
- return Promise.resolve('y');
76
- const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
77
- return new Promise((resolve) => rl.question(question, (a) => {
78
- rl.close();
79
- resolve(a.trim());
80
- }));
81
- }
82
124
  // 📥 Download missing models
83
125
  async function ensureModelsDownloaded() {
126
+ if (!ollamaAvailable)
127
+ return;
84
128
  const installed = await getInstalledModels();
85
129
  const missing = REQUIRED_MODELS.filter((m) => !installed.includes(m));
86
130
  if (!missing.length) {
@@ -99,8 +143,8 @@ async function ensureModelsDownloaded() {
99
143
  execSync(`ollama pull ${model}`, { stdio: 'inherit' });
100
144
  console.log(chalk.green(`✅ Pulled ${model}`));
101
145
  }
102
- catch (err) {
103
- console.error(chalk.red(`❌ Failed to pull ${model}:`), err);
146
+ catch {
147
+ console.log(chalk.red(`❌ Failed to pull ${model}.`));
104
148
  process.exit(1);
105
149
  }
106
150
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "scai",
3
- "version": "0.1.69",
3
+ "version": "0.1.71",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "scai": "./dist/index.js"