scai 0.1.70 → 0.1.72

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/modelSetup.js +77 -51
  2. package/package.json +1 -1
@@ -3,15 +3,17 @@ import * as readline from 'readline';
3
3
  import * as fs from 'fs';
4
4
  import * as path from 'path';
5
5
  import chalk from 'chalk';
6
+ import { platform } from 'os';
6
7
  import { getDbForRepo } from './db/client.js';
7
8
  import { readConfig, writeConfig } from './config.js';
8
9
  import { CONFIG_PATH } from './constants.js';
9
- import { platform } from 'os';
10
10
  // Constants
11
11
  const MODEL_PORT = 11434;
12
12
  const REQUIRED_MODELS = ['llama3', 'mistral'];
13
13
  const OLLAMA_URL = 'https://ollama.com/download';
14
14
  const isYesMode = process.argv.includes('--yes') || process.env.SCAI_YES === '1';
15
+ let ollamaChecked = false;
16
+ let ollamaAvailable = false;
15
17
  // 🧠 Auto init config/db if missing
16
18
  export async function autoInitIfNeeded() {
17
19
  const cfg = readConfig();
@@ -28,73 +30,96 @@ export async function autoInitIfNeeded() {
28
30
  }
29
31
  }
30
32
  }
31
- // 🗨 Prompt user
33
+ // 🗨 Prompt user with 10-second timeout
32
34
  function promptUser(question) {
33
35
  if (isYesMode)
34
36
  return Promise.resolve('y');
35
37
  const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
36
- return new Promise((resolve) => rl.question(question, (a) => {
37
- rl.close();
38
- resolve(a.trim());
39
- }));
38
+ return new Promise((resolve) => {
39
+ const timer = setTimeout(() => {
40
+ rl.close();
41
+ resolve('');
42
+ }, 10000); // 10 second timeout
43
+ rl.question(question, (answer) => {
44
+ clearTimeout(timer);
45
+ rl.close();
46
+ resolve(answer.trim());
47
+ });
48
+ });
40
49
  }
41
- // 🚀 Ensure Ollama server is running
42
- async function ensureOllamaRunning() {
50
+ // 🧭 Cross-platform browser opener
51
+ function openBrowser(url) {
52
+ const command = platform() === 'win32'
53
+ ? `start ${url}`
54
+ : platform() === 'darwin'
55
+ ? `open ${url}`
56
+ : `xdg-open ${url}`;
57
+ try {
58
+ execSync(command, { stdio: 'ignore' });
59
+ }
60
+ catch {
61
+ console.log(chalk.yellow('🔗 Please manually open:'), url);
62
+ }
63
+ }
64
+ // 🌐 Check if Ollama is running
65
+ async function isOllamaRunning() {
43
66
  try {
44
67
  const res = await fetch(`http://localhost:${MODEL_PORT}`);
45
- if (res.ok) {
46
- console.log(chalk.green('✅ Result:') + ` Ollama is already running on port ${MODEL_PORT}.`);
47
- return;
48
- }
68
+ return res.ok;
49
69
  }
50
70
  catch {
51
- console.log(chalk.yellow('⚙️ Challenge:') + ' Ollama is not running. Attempting to start it...');
71
+ return false;
72
+ }
73
+ }
74
+ // 🚀 Ensure Ollama server is running
75
+ async function ensureOllamaRunning() {
76
+ if (ollamaChecked)
77
+ return;
78
+ ollamaChecked = true;
79
+ if (await isOllamaRunning()) {
80
+ console.log(chalk.green('✅ Ollama is already running.'));
81
+ ollamaAvailable = true;
82
+ return;
52
83
  }
53
- console.log(chalk.yellow('⚙️ Challenge:') + ` Ollama does not appear to be running on port ${MODEL_PORT}.\n` +
54
- chalk.yellow('🚀 Action:') + ' Attempting to start Ollama in the background...');
84
+ console.log(chalk.yellow('⚙️ Ollama is not running. Attempting to start it...'));
85
+ let ollamaStarted = false;
55
86
  try {
56
87
  const child = spawn('ollama', ['serve'], {
57
88
  detached: true,
58
89
  stdio: 'ignore',
59
90
  windowsHide: true,
60
91
  });
61
- child.on('error', async (err) => {
62
- if (err.code === 'ENOENT') {
63
- console.log(chalk.red('❌ Ollama is not installed or not in PATH.'));
64
- console.log(chalk.yellow(`📦 Ollama is required to run local AI models.\n`));
65
- console.log(chalk.yellow(`🔗 Download it here: ${OLLAMA_URL}`));
66
- const answer = await promptUser('🌐 Would you like to open the download page in your browser? (y/N): ');
67
- if (answer.toLowerCase() === 'y') {
68
- openBrowser(OLLAMA_URL);
69
- }
70
- process.exit(1);
71
- }
72
- else {
73
- console.error(chalk.red('❌ Failed to start Ollama process:'), err);
74
- process.exit(1);
75
- }
76
- });
77
92
  child.unref();
78
- await new Promise((res) => setTimeout(res, 3000));
79
- console.log(chalk.green('✅ Result:') + ' Ollama started successfully.');
93
+ await new Promise((res) => setTimeout(res, 10000));
94
+ if (await isOllamaRunning()) {
95
+ console.log(chalk.green('✅ Ollama started successfully.'));
96
+ ollamaAvailable = true;
97
+ return;
98
+ }
80
99
  }
81
100
  catch (err) {
82
- console.error(chalk.red('❌ Unexpected error while trying to start Ollama:'), err);
83
- process.exit(1);
101
+ if (err.code !== 'ENOENT') {
102
+ console.log(chalk.red('❌ Unexpected error starting Ollama.'));
103
+ process.exit(1);
104
+ }
84
105
  }
85
- }
86
- // 🧭 Cross-platform browser opener
87
- function openBrowser(url) {
88
- const command = platform() === 'win32'
89
- ? `start ${url}`
90
- : platform() === 'darwin'
91
- ? `open ${url}`
92
- : `xdg-open ${url}`;
93
- try {
94
- execSync(command, { stdio: 'ignore' });
106
+ // If we get here, Ollama likely isn't installed
107
+ console.log(chalk.red('❌ Ollama is not installed or not in PATH.'));
108
+ console.log(chalk.yellow(`📦 Ollama is required to run local AI models.`));
109
+ const answer = await promptUser('🌐 Would you like to open the download page in your browser? (y/N): ');
110
+ if (answer.toLowerCase() === 'y') {
111
+ openBrowser(OLLAMA_URL);
95
112
  }
96
- catch (err) {
97
- console.error(chalk.red(' Could not open browser. Please visit:'), url);
113
+ console.log(chalk.yellow('⏳ Waiting for you to install Ollama and press Enter to continue...'));
114
+ await promptUser('👉 Press Enter once Ollama is installed and ready: ');
115
+ // Retry once
116
+ if (await isOllamaRunning()) {
117
+ console.log(chalk.green('✅ Ollama detected. Continuing...'));
118
+ ollamaAvailable = true;
119
+ }
120
+ else {
121
+ console.log(chalk.red('❌ Ollama still not detected. Please check your installation.'));
122
+ process.exit(1);
98
123
  }
99
124
  }
100
125
  // 🧰 List installed models
@@ -106,13 +131,14 @@ async function getInstalledModels() {
106
131
  .map((line) => line.split(/\s+/)[0].split(':')[0])
107
132
  .filter((model) => REQUIRED_MODELS.includes(model));
108
133
  }
109
- catch (err) {
110
- console.error(chalk.red('❌ Could not fetch installed models:'), err);
134
+ catch {
111
135
  return [];
112
136
  }
113
137
  }
114
138
  // 📥 Download missing models
115
139
  async function ensureModelsDownloaded() {
140
+ if (!ollamaAvailable)
141
+ return;
116
142
  const installed = await getInstalledModels();
117
143
  const missing = REQUIRED_MODELS.filter((m) => !installed.includes(m));
118
144
  if (!missing.length) {
@@ -131,8 +157,8 @@ async function ensureModelsDownloaded() {
131
157
  execSync(`ollama pull ${model}`, { stdio: 'inherit' });
132
158
  console.log(chalk.green(`✅ Pulled ${model}`));
133
159
  }
134
- catch (err) {
135
- console.error(chalk.red(`❌ Failed to pull ${model}:`), err);
160
+ catch {
161
+ console.log(chalk.red(`❌ Failed to pull ${model}.`));
136
162
  process.exit(1);
137
163
  }
138
164
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "scai",
3
- "version": "0.1.70",
3
+ "version": "0.1.72",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "scai": "./dist/index.js"