scai 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +4 -0
- package/README.md +29 -13
- package/dist/commands/CommitSuggesterCmd.js +3 -1
- package/dist/index.js +13 -1
- package/dist/modelSetup.js +96 -0
- package/package.json +2 -2
package/LICENSE
ADDED
package/README.md
CHANGED
|
@@ -1,13 +1,29 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
1
|
+
# scai — Smart Commit AI ✨
|
|
2
|
+
|
|
3
|
+
> AI-powered commit message suggestions from staged changes — directly in your terminal.
|
|
4
|
+
|
|
5
|
+
**scai** (Smart Commit AI) is a lightweight, privacy-focused CLI tool that uses a local AI model to generate clear, meaningful commit messages based on your staged Git changes. No internet required. No telemetry. Just smart commits.
|
|
6
|
+
|
|
7
|
+
---
|
|
8
|
+
|
|
9
|
+
## 🚀 Features
|
|
10
|
+
|
|
11
|
+
- 🔍 Automatically suggests commit messages based on `git diff`
|
|
12
|
+
- 🤖 Uses a local language model via [Ollama](https://ollama.com/) — no external API keys
|
|
13
|
+
- 🧱 Designed for local developer workflows and automation
|
|
14
|
+
- 📦 Works with any Git repository
|
|
15
|
+
|
|
16
|
+
---
|
|
17
|
+
|
|
18
|
+
## 📦 Installation
|
|
19
|
+
|
|
20
|
+
You'll need [Ollama](https://ollama.com/) installed and running locally, with a supported model like `mistral` pulled:
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
ollama run mistral
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
## License
|
|
27
|
+
|
|
28
|
+
This software is licensed for non-commercial use only.
|
|
29
|
+
See the [LICENSE](./LICENSE) file for details.
|
|
@@ -23,8 +23,10 @@ export async function suggestCommitMessage(options) {
|
|
|
23
23
|
if (response.error) {
|
|
24
24
|
throw new Error(`LLM error: ${response.error}`);
|
|
25
25
|
}
|
|
26
|
-
|
|
26
|
+
let message = response?.trim();
|
|
27
27
|
console.log(`${message}`);
|
|
28
|
+
// Remove double quotes from the message
|
|
29
|
+
message = message.replace(/^"(.*)"$/, '$1');
|
|
28
30
|
// 3) Optionally commit
|
|
29
31
|
if (options.commit) {
|
|
30
32
|
// If code not already staged
|
package/dist/index.js
CHANGED
|
@@ -4,8 +4,19 @@ import { checkEnv } from "./commands/EnvCmd.js";
|
|
|
4
4
|
import { checkGit } from "./commands/GitCmd.js";
|
|
5
5
|
import { suggestCommitMessage } from "./commands/CommitSuggesterCmd.js";
|
|
6
6
|
import { handleRefactor } from "./commands/RefactorCmd.js";
|
|
7
|
-
|
|
7
|
+
// Import the model check and initialization logic
|
|
8
|
+
import { bootstrap } from './modelSetup.js';
|
|
9
|
+
// Create the CLI instance
|
|
10
|
+
const cmd = new Command('scai')
|
|
8
11
|
.version('0.1.0');
|
|
12
|
+
// Define CLI commands
|
|
13
|
+
cmd
|
|
14
|
+
.command('init')
|
|
15
|
+
.description('Initialize the model and download required models')
|
|
16
|
+
.action(async () => {
|
|
17
|
+
await bootstrap();
|
|
18
|
+
console.log('✅ Model initialization completed!');
|
|
19
|
+
});
|
|
9
20
|
cmd
|
|
10
21
|
.command('env')
|
|
11
22
|
.description('Check environment variables')
|
|
@@ -23,4 +34,5 @@ cmd
|
|
|
23
34
|
.command('refactor <file>')
|
|
24
35
|
.description('Suggest a refactor for the given JS file')
|
|
25
36
|
.action((file) => handleRefactor(file));
|
|
37
|
+
// Parse CLI arguments
|
|
26
38
|
cmd.parse(process.argv);
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { spawn, execSync } from 'child_process';
|
|
2
|
+
import * as readline from 'readline';
|
|
3
|
+
// Port and models
|
|
4
|
+
const MODEL_PORT = 11434;
|
|
5
|
+
const REQUIRED_MODELS = ['llama3', 'mistral']; // Add more if needed
|
|
6
|
+
// Ensure Ollama is running
|
|
7
|
+
async function ensureOllamaRunning() {
|
|
8
|
+
try {
|
|
9
|
+
const res = await fetch(`http://localhost:${MODEL_PORT}`);
|
|
10
|
+
if (res.ok) {
|
|
11
|
+
console.log('✅ Ollama is already running.');
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
catch (error) {
|
|
15
|
+
console.error('🟡 Ollama is not running. Starting it in the background...');
|
|
16
|
+
if (error instanceof Error) {
|
|
17
|
+
console.error('❌ Error during Ollama health check:', error.message);
|
|
18
|
+
}
|
|
19
|
+
else {
|
|
20
|
+
console.error('❌ Unexpected error during Ollama health check:', error);
|
|
21
|
+
}
|
|
22
|
+
const child = spawn('ollama', ['serve'], {
|
|
23
|
+
detached: true,
|
|
24
|
+
stdio: 'ignore',
|
|
25
|
+
windowsHide: true,
|
|
26
|
+
});
|
|
27
|
+
child.unref();
|
|
28
|
+
await new Promise((res) => setTimeout(res, 3000)); // Wait a bit for server to be ready
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
// Get installed models via ollama list
|
|
32
|
+
async function getInstalledModels() {
|
|
33
|
+
try {
|
|
34
|
+
const result = execSync('ollama list', { encoding: 'utf-8' });
|
|
35
|
+
const installedModels = result
|
|
36
|
+
.split('\n')
|
|
37
|
+
.map((line) => line.split(/\s+/)[0].split(':')[0]) // Get model name, ignore version (e.g., 'llama3:latest' becomes 'llama3')
|
|
38
|
+
.filter((model) => REQUIRED_MODELS.includes(model)); // Filter based on required models
|
|
39
|
+
return installedModels;
|
|
40
|
+
}
|
|
41
|
+
catch (error) {
|
|
42
|
+
console.error('❌ Failed to fetch installed models:', error instanceof Error ? error.message : error);
|
|
43
|
+
return [];
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
// Prompt user for input
|
|
47
|
+
function promptUser(question) {
|
|
48
|
+
const rl = readline.createInterface({
|
|
49
|
+
input: process.stdin,
|
|
50
|
+
output: process.stdout,
|
|
51
|
+
});
|
|
52
|
+
return new Promise((resolve) => rl.question(question, (answer) => {
|
|
53
|
+
rl.close();
|
|
54
|
+
resolve(answer.trim());
|
|
55
|
+
}));
|
|
56
|
+
}
|
|
57
|
+
// Ensure all required models are downloaded
|
|
58
|
+
async function ensureModelsDownloaded() {
|
|
59
|
+
const installedModels = await getInstalledModels();
|
|
60
|
+
const missingModels = REQUIRED_MODELS.filter((model) => !installedModels.includes(model));
|
|
61
|
+
if (missingModels.length === 0) {
|
|
62
|
+
console.log('✅ All required models are already installed.');
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
console.log(`🟡 Missing models: ${missingModels.join(', ')}`);
|
|
66
|
+
const answer = await promptUser('Do you want to download the missing models now? (y/N): ');
|
|
67
|
+
if (answer.toLowerCase() !== 'y') {
|
|
68
|
+
console.log('🚫 Missing models not downloaded. Exiting.');
|
|
69
|
+
process.exit(1);
|
|
70
|
+
}
|
|
71
|
+
for (const model of missingModels) {
|
|
72
|
+
try {
|
|
73
|
+
console.log(`⬇️ Pulling model: ${model} ...`);
|
|
74
|
+
execSync(`ollama pull ${model}`, { stdio: 'inherit' });
|
|
75
|
+
console.log(`✅ Successfully pulled ${model}.`);
|
|
76
|
+
}
|
|
77
|
+
catch (err) {
|
|
78
|
+
console.error(`❌ Failed to pull ${model}:`, err instanceof Error ? err.message : err);
|
|
79
|
+
process.exit(1);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
// Initialize the application
|
|
84
|
+
export async function bootstrap() {
|
|
85
|
+
try {
|
|
86
|
+
// Ensure Ollama is running only once at the start
|
|
87
|
+
await ensureOllamaRunning();
|
|
88
|
+
// Ensure models are downloaded once
|
|
89
|
+
await ensureModelsDownloaded();
|
|
90
|
+
// Now your CLI logic can proceed here...
|
|
91
|
+
}
|
|
92
|
+
catch (error) {
|
|
93
|
+
console.error('❌ Error during initialization:', error instanceof Error ? error.message : error);
|
|
94
|
+
process.exit(1);
|
|
95
|
+
}
|
|
96
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "scai",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.3",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"bin": {
|
|
6
6
|
"scai": "./dist/index.js"
|
|
@@ -10,7 +10,7 @@
|
|
|
10
10
|
"url": "git+https://github.com/rzs/scai.git"
|
|
11
11
|
},
|
|
12
12
|
"author": "Rasmus Uhd Norgaard",
|
|
13
|
-
"license": "
|
|
13
|
+
"license": "SEE LICENSE IN LICENSE",
|
|
14
14
|
"keywords": ["cli", "ai", "refactor", "devtools", "local", "typescript"],
|
|
15
15
|
"scripts": {
|
|
16
16
|
"build": "tsc",
|