scai 0.1.68 → 0.1.70

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -127,7 +127,6 @@ export async function runAskCommand(query) {
127
127
  }
128
128
  // 🟩 STEP 7: Build prompt
129
129
  console.log(chalk.blueBright('\nšŸ“¦ Building contextual prompt...'));
130
- console.log(chalk.gray(`[runAskCommand] Calling buildContextualPrompt()`));
131
130
  const promptContent = buildContextualPrompt({
132
131
  baseInstruction: query,
133
132
  code,
@@ -3,57 +3,100 @@ import * as readline from 'readline';
3
3
  import * as fs from 'fs';
4
4
  import * as path from 'path';
5
5
  import chalk from 'chalk';
6
- import { getDbForRepo } from './db/client.js'; // Ensure this function works correctly
6
+ import { getDbForRepo } from './db/client.js';
7
7
  import { readConfig, writeConfig } from './config.js';
8
8
  import { CONFIG_PATH } from './constants.js';
9
+ import { platform } from 'os';
9
10
  // Constants
10
11
  const MODEL_PORT = 11434;
11
- const REQUIRED_MODELS = ['llama3', 'mistral']; // Expand as needed
12
+ const REQUIRED_MODELS = ['llama3', 'mistral'];
13
+ const OLLAMA_URL = 'https://ollama.com/download';
12
14
  const isYesMode = process.argv.includes('--yes') || process.env.SCAI_YES === '1';
13
15
  // 🧠 Auto init config/db if missing
14
16
  export async function autoInitIfNeeded() {
15
17
  const cfg = readConfig();
16
18
  if (!fs.existsSync(CONFIG_PATH)) {
17
19
  console.log(chalk.green('šŸ› ļø Config not found. Initializing...'));
18
- writeConfig({}); // This will create config.json with defaults
20
+ writeConfig({});
19
21
  }
20
22
  const activeRepo = cfg.activeRepo && cfg.repos[cfg.activeRepo];
21
23
  if (activeRepo) {
22
24
  const dbPath = path.join(activeRepo.indexDir, 'scai.db');
23
25
  if (!fs.existsSync(dbPath)) {
24
26
  console.log(chalk.green('šŸ“¦ DB not found. Initializing...'));
25
- getDbForRepo(); // This creates the DB
27
+ getDbForRepo();
26
28
  }
27
29
  }
28
30
  }
31
+ // šŸ—Ø Prompt user
32
+ function promptUser(question) {
33
+ if (isYesMode)
34
+ return Promise.resolve('y');
35
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
36
+ return new Promise((resolve) => rl.question(question, (a) => {
37
+ rl.close();
38
+ resolve(a.trim());
39
+ }));
40
+ }
29
41
  // šŸš€ Ensure Ollama server is running
30
42
  async function ensureOllamaRunning() {
31
43
  try {
32
44
  const res = await fetch(`http://localhost:${MODEL_PORT}`);
33
45
  if (res.ok) {
34
- console.log('āœ… Ollama is already running.');
46
+ console.log(chalk.green('āœ… Result:') + ` Ollama is already running on port ${MODEL_PORT}.`);
35
47
  return;
36
48
  }
37
49
  }
38
50
  catch {
39
- // Continue to spawn below
51
+ console.log(chalk.yellow('āš™ļø Challenge:') + ' Ollama is not running. Attempting to start it...');
40
52
  }
41
- console.log(chalk.yellow('āš™ļø Ollama is not running. Starting it in the background...'));
53
+ console.log(chalk.yellow('āš™ļø Challenge:') + ` Ollama does not appear to be running on port ${MODEL_PORT}.\n` +
54
+ chalk.yellow('šŸš€ Action:') + ' Attempting to start Ollama in the background...');
42
55
  try {
43
56
  const child = spawn('ollama', ['serve'], {
44
57
  detached: true,
45
58
  stdio: 'ignore',
46
59
  windowsHide: true,
47
60
  });
61
+ child.on('error', async (err) => {
62
+ if (err.code === 'ENOENT') {
63
+ console.log(chalk.red('āŒ Ollama is not installed or not in PATH.'));
64
+ console.log(chalk.yellow(`šŸ“¦ Ollama is required to run local AI models.\n`));
65
+ console.log(chalk.yellow(`šŸ”— Download it here: ${OLLAMA_URL}`));
66
+ const answer = await promptUser('🌐 Would you like to open the download page in your browser? (y/N): ');
67
+ if (answer.toLowerCase() === 'y') {
68
+ openBrowser(OLLAMA_URL);
69
+ }
70
+ process.exit(1);
71
+ }
72
+ else {
73
+ console.error(chalk.red('āŒ Failed to start Ollama process:'), err);
74
+ process.exit(1);
75
+ }
76
+ });
48
77
  child.unref();
49
78
  await new Promise((res) => setTimeout(res, 3000));
50
- console.log('āœ… Ollama started.');
79
+ console.log(chalk.green('āœ… Result:') + ' Ollama started successfully.');
51
80
  }
52
81
  catch (err) {
53
- console.error('āŒ Failed to start Ollama:', err);
82
+ console.error(chalk.red('āŒ Unexpected error while trying to start Ollama:'), err);
54
83
  process.exit(1);
55
84
  }
56
85
  }
86
+ // 🧭 Cross-platform browser opener
87
+ function openBrowser(url) {
88
+ const command = platform() === 'win32'
89
+ ? `start ${url}`
90
+ : platform() === 'darwin'
91
+ ? `open ${url}`
92
+ : `xdg-open ${url}`;
93
+ try {
94
+ execSync(command, { stdio: 'ignore' });
95
+ }
96
+ catch (err) {
97
+ console.error(chalk.red('āŒ Could not open browser. Please visit:'), url);
98
+ }
99
+ }
57
100
  // 🧰 List installed models
58
101
  async function getInstalledModels() {
59
102
  try {
@@ -64,32 +107,22 @@ async function getInstalledModels() {
64
107
  .filter((model) => REQUIRED_MODELS.includes(model));
65
108
  }
66
109
  catch (err) {
67
- console.error('āŒ Could not fetch installed models:', err);
110
+ console.error(chalk.red('āŒ Could not fetch installed models:'), err);
68
111
  return [];
69
112
  }
70
113
  }
71
- // šŸ’¬ Prompt user
72
- function promptUser(question) {
73
- if (isYesMode)
74
- return Promise.resolve('y');
75
- const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
76
- return new Promise((resolve) => rl.question(question, (a) => {
77
- rl.close();
78
- resolve(a.trim());
79
- }));
80
- }
81
114
  // šŸ“„ Download missing models
82
115
  async function ensureModelsDownloaded() {
83
116
  const installed = await getInstalledModels();
84
117
  const missing = REQUIRED_MODELS.filter((m) => !installed.includes(m));
85
118
  if (!missing.length) {
86
- console.log('āœ… All required models are installed.');
119
+ console.log(chalk.green('āœ… All required models are installed.'));
87
120
  return;
88
121
  }
89
122
  console.log(chalk.yellow(`šŸ“¦ Missing models: ${missing.join(', ')}`));
90
123
  const answer = await promptUser('ā¬‡ļø Do you want to download them now? (y/N): ');
91
124
  if (answer.toLowerCase() !== 'y') {
92
- console.log('🚫 Aborting due to missing models.');
125
+ console.log(chalk.red('🚫 Aborting due to missing models.'));
93
126
  process.exit(1);
94
127
  }
95
128
  for (const model of missing) {
@@ -99,7 +132,7 @@ async function ensureModelsDownloaded() {
99
132
  console.log(chalk.green(`āœ… Pulled ${model}`));
100
133
  }
101
134
  catch (err) {
102
- console.error(`āŒ Failed to pull ${model}:`, err);
135
+ console.error(chalk.red(`āŒ Failed to pull ${model}:`), err);
103
136
  process.exit(1);
104
137
  }
105
138
  }
@@ -16,8 +16,7 @@ export function buildContextualPrompt({ baseInstruction, code, summary, function
16
16
  parts.push(`šŸ”§ Functions:\n${formattedFunctions}`);
17
17
  }
18
18
  else {
19
- console.log(chalk.gray(`[buildContextualPrompt]`) +
20
- chalk.yellow(` āš ļø No functions found in top file.`));
19
+ console.log(chalk.yellow(` āš ļø No functions found in top rated file.`));
21
20
  }
22
21
  if (relatedFiles?.length) {
23
22
  const formattedRelatedFiles = relatedFiles
@@ -1,10 +1,16 @@
1
- import { wrapText } from "./textWrapper.js";
2
- // src/utils/summarizer.ts
1
+ import columnify from 'columnify';
3
2
  export function summarizeCode(summaryText) {
4
- // Get the terminal width (default to 80 if not available)
5
3
  const terminalWidth = process.stdout.columns || 80;
6
- // Use two-thirds of the terminal width for the wrapping
7
- const twoThirdsTerminalWidth = Math.floor((terminalWidth * 2) / 3);
8
- // Wrap the summary output to fit within two-thirds of the terminal width
9
- return wrapText(summaryText, twoThirdsTerminalWidth);
4
+ // You can control wrapping here
5
+ const formatted = columnify([{ Summary: summaryText }], {
6
+ columnSplitter: ' ',
7
+ maxLineWidth: terminalWidth,
8
+ config: {
9
+ Summary: {
10
+ maxWidth: Math.floor((terminalWidth * 2) / 3), // Use 2/3 width like before
11
+ align: "left",
12
+ },
13
+ },
14
+ });
15
+ return formatted;
10
16
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "scai",
3
- "version": "0.1.68",
3
+ "version": "0.1.70",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "scai": "./dist/index.js"
@@ -41,6 +41,7 @@
41
41
  "acorn-walk": "^8.3.2",
42
42
  "better-sqlite3": "^12.1.1",
43
43
  "chalk": "^5.4.1",
44
+ "columnify": "^1.6.0",
44
45
  "commander": "^11.0.0",
45
46
  "fast-glob": "^3.3.3",
46
47
  "proper-lockfile": "^4.1.2",
@@ -49,6 +50,7 @@
49
50
  },
50
51
  "devDependencies": {
51
52
  "@types/better-sqlite3": "^7.6.13",
53
+ "@types/columnify": "^1.5.4",
52
54
  "@types/jest": "^30.0.0",
53
55
  "@types/node": "^24.1.0",
54
56
  "@types/proper-lockfile": "^4.1.4",
@@ -1,26 +0,0 @@
1
- export function wrapText(text, maxWidth) {
2
- const words = text.split(' ');
3
- let wrappedText = '';
4
- let currentLine = '';
5
- words.forEach(word => {
6
- // If the word is longer than the maxWidth, break it up into multiple lines
7
- if (word.length > maxWidth) {
8
- // Break the word into smaller chunks
9
- while (word.length > maxWidth) {
10
- wrappedText += word.slice(0, maxWidth) + '\n';
11
- word = word.slice(maxWidth);
12
- }
13
- }
14
- // Check if adding the word would exceed the max width
15
- if ((currentLine + word).length > maxWidth) {
16
- wrappedText += currentLine + '\n'; // Add the current line and start a new one
17
- currentLine = word + ' '; // Start the new line with the current word
18
- }
19
- else {
20
- currentLine += word + ' '; // Add the word to the current line
21
- }
22
- });
23
- // Append the last line if any
24
- wrappedText += currentLine.trim(); // trim() to remove the extra space at the end
25
- return wrappedText;
26
- }