scai 0.1.61 → 0.1.62

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -17,7 +17,7 @@ export async function runAskCommand(query) {
17
17
  console.error('āŒ No question provided.\nšŸ‘‰ Usage: scai ask "your question"');
18
18
  return;
19
19
  }
20
- console.log(`šŸ“ Using index root: ${getIndexDir}`);
20
+ console.log(`šŸ“ Using index root: ${getIndexDir()}`);
21
21
  console.log(`šŸ” Searching for: "${query}"\n`);
22
22
  // 🟩 STEP 1: Semantic Search
23
23
  const start = Date.now();
package/dist/config.js CHANGED
@@ -6,6 +6,7 @@ import { getRepoKeyForPath, normalizePath } from './utils/normalizePath.js';
6
6
  import chalk from 'chalk';
7
7
  const defaultConfig = {
8
8
  model: 'llama3',
9
+ contextLength: 8192,
9
10
  language: 'ts',
10
11
  indexDir: '',
11
12
  githubToken: '',
@@ -17,7 +18,7 @@ function ensureConfigDir() {
17
18
  fs.mkdirSync(SCAI_HOME, { recursive: true });
18
19
  }
19
20
  }
20
- function readConfig() {
21
+ export function readConfig() {
21
22
  try {
22
23
  const content = fs.readFileSync(CONFIG_PATH, 'utf-8');
23
24
  return { ...defaultConfig, ...JSON.parse(content) };
package/dist/constants.js CHANGED
@@ -52,7 +52,7 @@ export function getIndexDir() {
52
52
  /**
53
53
  * Limit for number of related files included in model prompt.
54
54
  */
55
- export const RELATED_FILES_LIMIT = 3;
55
+ export const RELATED_FILES_LIMIT = 5;
56
56
  /**
57
57
  * Limit for number of candidate files to score.
58
58
  */
package/dist/index.js CHANGED
@@ -1,10 +1,9 @@
1
- //!/usr/bin/env node
2
- import { Command } from "commander";
3
- import { Config } from './config.js';
1
+ #!/usr/bin/env node
4
2
  import { createRequire } from 'module';
5
3
  const require = createRequire(import.meta.url);
6
4
  const { version } = require('../package.json');
7
- // 🧠 Commands
5
+ import { Command } from "commander";
6
+ import { Config } from './config.js';
8
7
  import { suggestCommitMessage } from "./commands/CommitSuggesterCmd.js";
9
8
  import { handleRefactor } from "./commands/RefactorCmd.js";
10
9
  import { generateTests } from "./commands/TestGenCmd.js";
@@ -27,8 +26,8 @@ import { validateGitHubTokenAgainstRepo } from "./github/githubAuthCheck.js";
27
26
  import { checkGit } from "./commands/GitCmd.js";
28
27
  import { runSwitchCommand, runInteractiveSwitch } from "./commands/SwitchCmd.js";
29
28
  import { execSync } from "child_process";
30
- import { dirname, resolve } from "path";
31
29
  import { fileURLToPath } from "url";
30
+ import { dirname, resolve } from "path";
32
31
  // šŸŽ›ļø CLI Setup
33
32
  const cmd = new Command('scai')
34
33
  .version(version)
@@ -1,96 +1,112 @@
1
1
  import { spawn, execSync } from 'child_process';
2
2
  import * as readline from 'readline';
3
- // Port and models
3
+ import * as fs from 'fs';
4
+ import * as path from 'path';
5
+ import chalk from 'chalk';
6
+ import { getDbForRepo } from './db/client.js'; // Ensure this function works correctly
7
+ import { readConfig, writeConfig } from './config.js';
8
+ import { CONFIG_PATH } from './constants.js';
9
+ // Constants
4
10
  const MODEL_PORT = 11434;
5
- const REQUIRED_MODELS = ['llama3', 'mistral']; // Add more if needed
6
- // Ensure Ollama is running
11
+ const REQUIRED_MODELS = ['llama3', 'mistral']; // Expand as needed
12
+ const isYesMode = process.argv.includes('--yes') || process.env.SCAI_YES === '1';
13
+ // 🧠 Auto init config/db if missing
14
+ export async function autoInitIfNeeded() {
15
+ const cfg = readConfig();
16
+ if (!fs.existsSync(CONFIG_PATH)) {
17
+ console.log(chalk.green('šŸ› ļø Config not found. Initializing...'));
18
+ writeConfig({}); // This will create config.json with defaults
19
+ }
20
+ const activeRepo = cfg.activeRepo && cfg.repos[cfg.activeRepo];
21
+ if (activeRepo) {
22
+ const dbPath = path.join(activeRepo.indexDir, 'scai.db');
23
+ if (!fs.existsSync(dbPath)) {
24
+ console.log(chalk.green('šŸ“¦ DB not found. Initializing...'));
25
+ getDbForRepo(); // This creates the DB
26
+ }
27
+ }
28
+ }
29
+ // šŸš€ Ensure Ollama server is running
7
30
  async function ensureOllamaRunning() {
8
31
  try {
9
32
  const res = await fetch(`http://localhost:${MODEL_PORT}`);
10
33
  if (res.ok) {
11
34
  console.log('āœ… Ollama is already running.');
35
+ return;
12
36
  }
13
37
  }
14
- catch (error) {
15
- console.error('🟔 Ollama is not running. Starting it in the background...');
16
- if (error instanceof Error) {
17
- console.error('āŒ Error during Ollama health check:', error.message);
18
- }
19
- else {
20
- console.error('āŒ Unexpected error during Ollama health check:', error);
21
- }
38
+ catch {
39
+ // Continue to spawn below
40
+ }
41
+ console.log(chalk.yellow('āš™ļø Ollama is not running. Starting it in the background...'));
42
+ try {
22
43
  const child = spawn('ollama', ['serve'], {
23
44
  detached: true,
24
45
  stdio: 'ignore',
25
46
  windowsHide: true,
26
47
  });
27
48
  child.unref();
28
- await new Promise((res) => setTimeout(res, 3000)); // Wait a bit for server to be ready
49
+ await new Promise((res) => setTimeout(res, 3000));
50
+ console.log('āœ… Ollama started.');
51
+ }
52
+ catch (err) {
53
+ console.error('āŒ Failed to start Ollama:', err);
54
+ process.exit(1);
29
55
  }
30
56
  }
31
- // Get installed models via ollama list
57
+ // 🧰 List installed models
32
58
  async function getInstalledModels() {
33
59
  try {
34
60
  const result = execSync('ollama list', { encoding: 'utf-8' });
35
- const installedModels = result
61
+ return result
36
62
  .split('\n')
37
- .map((line) => line.split(/\s+/)[0].split(':')[0]) // Get model name, ignore version (e.g., 'llama3:latest' becomes 'llama3')
38
- .filter((model) => REQUIRED_MODELS.includes(model)); // Filter based on required models
39
- return installedModels;
63
+ .map((line) => line.split(/\s+/)[0].split(':')[0])
64
+ .filter((model) => REQUIRED_MODELS.includes(model));
40
65
  }
41
- catch (error) {
42
- console.error('āŒ Failed to fetch installed models:', error instanceof Error ? error.message : error);
66
+ catch (err) {
67
+ console.error('āŒ Could not fetch installed models:', err);
43
68
  return [];
44
69
  }
45
70
  }
46
- // Prompt user for input
71
+ // šŸ’¬ Prompt user
47
72
  function promptUser(question) {
48
- const rl = readline.createInterface({
49
- input: process.stdin,
50
- output: process.stdout,
51
- });
52
- return new Promise((resolve) => rl.question(question, (answer) => {
73
+ if (isYesMode)
74
+ return Promise.resolve('y');
75
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
76
+ return new Promise((resolve) => rl.question(question, (a) => {
53
77
  rl.close();
54
- resolve(answer.trim());
78
+ resolve(a.trim());
55
79
  }));
56
80
  }
57
- // Ensure all required models are downloaded
81
+ // šŸ“„ Download missing models
58
82
  async function ensureModelsDownloaded() {
59
- const installedModels = await getInstalledModels();
60
- const missingModels = REQUIRED_MODELS.filter((model) => !installedModels.includes(model));
61
- if (missingModels.length === 0) {
62
- console.log('āœ… All required models are already installed.');
83
+ const installed = await getInstalledModels();
84
+ const missing = REQUIRED_MODELS.filter((m) => !installed.includes(m));
85
+ if (!missing.length) {
86
+ console.log('āœ… All required models are installed.');
63
87
  return;
64
88
  }
65
- console.log(`🟔 Missing models: ${missingModels.join(', ')}`);
66
- const answer = await promptUser('Do you want to download the missing models now? (y/N): ');
89
+ console.log(chalk.yellow(`šŸ“¦ Missing models: ${missing.join(', ')}`));
90
+ const answer = await promptUser('ā¬‡ļø Do you want to download them now? (y/N): ');
67
91
  if (answer.toLowerCase() !== 'y') {
68
- console.log('🚫 Missing models not downloaded. Exiting.');
92
+ console.log('🚫 Aborting due to missing models.');
69
93
  process.exit(1);
70
94
  }
71
- for (const model of missingModels) {
95
+ for (const model of missing) {
72
96
  try {
73
- console.log(`ā¬‡ļø Pulling model: ${model} ...`);
97
+ console.log(`ā¬‡ļø Pulling ${model}...`);
74
98
  execSync(`ollama pull ${model}`, { stdio: 'inherit' });
75
- console.log(`āœ… Successfully pulled ${model}.`);
99
+ console.log(chalk.green(`āœ… Pulled ${model}`));
76
100
  }
77
101
  catch (err) {
78
- console.error(`āŒ Failed to pull ${model}:`, err instanceof Error ? err.message : err);
102
+ console.error(`āŒ Failed to pull ${model}:`, err);
79
103
  process.exit(1);
80
104
  }
81
105
  }
82
106
  }
83
- // Initialize the application
107
+ // šŸ Main bootstrap logic
84
108
  export async function bootstrap() {
85
- try {
86
- // Ensure Ollama is running only once at the start
87
- await ensureOllamaRunning();
88
- // Ensure models are downloaded once
89
- await ensureModelsDownloaded();
90
- // Now your CLI logic can proceed here...
91
- }
92
- catch (error) {
93
- console.error('āŒ Error during initialization:', error instanceof Error ? error.message : error);
94
- process.exit(1);
95
- }
109
+ await autoInitIfNeeded();
110
+ await ensureOllamaRunning();
111
+ await ensureModelsDownloaded();
96
112
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "scai",
3
- "version": "0.1.61",
3
+ "version": "0.1.62",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "scai": "./dist/index.js"