commitmind 1.0.3 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -96,6 +96,23 @@ This will:
96
96
 
97
97
  ---
98
98
 
99
+
100
+ ## Model Configuration
101
+
102
+ CommitMind allows you to choose and persist your preferred local AI model.
103
+
104
+ ### First-time setup
105
+
106
+ On the first run, CommitMind will automatically prompt you to select an installed Ollama model.
107
+ Your choice will be saved and used for future commits.
108
+
109
+ ---
110
+
111
+ ### Change model anytime
112
+
113
+ ```bash
114
+ aic model set
115
+
99
116
  ## Example Workflow
100
117
 
101
118
  ```bash
@@ -106,6 +123,18 @@ aic push main
106
123
  ```
107
124
 
108
125
  ---
126
+ ### View current model
127
+ ```bash
128
+ aic model get
129
+ ```
130
+
131
+ #### Override model (optional)
132
+
133
+ You can also specify a model directly:
134
+
135
+ ```bash
136
+ aic auto --model mistral
137
+ ```
109
138
 
110
139
  ## Why CommitMind?
111
140
 
package/dist/config.js ADDED
@@ -0,0 +1,21 @@
1
+ import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
2
+ import { homedir } from "node:os";
3
+ import path from "node:path";
4
+ const CONFIG_DIR = path.join(homedir(), '.commitmind');
5
+ const CONFIG_FILE = path.join(CONFIG_DIR, 'config.json');
6
+ export function getConfig() {
7
+ try {
8
+ if (!existsSync(CONFIG_FILE))
9
+ return null;
10
+ return JSON.parse(readFileSync(CONFIG_FILE, 'utf-8'));
11
+ }
12
+ catch (err) {
13
+ return null;
14
+ }
15
+ }
16
+ export function saveConfig(config) {
17
+ if (!existsSync(CONFIG_DIR)) {
18
+ mkdirSync(CONFIG_DIR);
19
+ }
20
+ writeFileSync(CONFIG_FILE, JSON.stringify(config, null, 2));
21
+ }
package/dist/index.js CHANGED
@@ -1,30 +1,71 @@
1
1
  #!/usr/bin/env node
2
2
  import { stageAll, getDiff, commit, push } from "./git.js";
3
3
  import { generateCommit } from "./ollama.js";
4
+ import { getConfig, saveConfig } from "./config.js";
5
+ import { selectModel } from "./selectModel.js";
6
+ async function resolveModel(cliModel) {
7
+ // 1. CLI flag wins (old users safe)
8
+ if (cliModel) {
9
+ saveConfig({ model: cliModel });
10
+ return cliModel;
11
+ }
12
+ const config = getConfig();
13
+ if (config?.model)
14
+ return config.model;
15
+ if (!process.stdout.isTTY) {
16
+ return "llama3.1:latest";
17
+ }
18
+ console.log("No model configured.");
19
+ return await selectModel();
20
+ }
4
21
  async function main() {
5
- const command = process.argv[2];
22
+ const args = process.argv.slice(2);
23
+ const command = args[0];
6
24
  if (!command) {
7
25
  console.log(`
8
26
  Usage:
9
27
  aic auto
10
28
  aic push <branch>
11
29
 
30
+ Model:
31
+ aic model set
32
+ aic model get
33
+
12
34
  Examples:
13
35
  aic auto
14
36
  aic push main
15
37
  `);
16
38
  return;
17
39
  }
40
+ if (command === "model") {
41
+ const sub = args[1];
42
+ if (sub === "set") {
43
+ await selectModel();
44
+ return;
45
+ }
46
+ if (sub === "get") {
47
+ const config = getConfig();
48
+ console.log("Current model:", config?.model || "Not set");
49
+ return;
50
+ }
51
+ }
52
+ // -------- model flag --------
53
+ let cliModel;
54
+ const modelIndex = args.indexOf("--model");
55
+ if (modelIndex !== -1 && args[modelIndex + 1]) {
56
+ cliModel = args[modelIndex + 1];
57
+ }
58
+ const model = await resolveModel(cliModel);
18
59
  console.log("📦 Staging changes...");
19
60
  await stageAll();
20
61
  const diff = await getDiff();
21
- console.log(" Generating commit...");
22
- const message = await generateCommit(diff);
23
- console.log("Commit:", message);
62
+ console.log(`Generating commit using ${model}...`);
63
+ const message = await generateCommit(diff, model);
64
+ console.log("Commit:", message);
24
65
  await commit(message);
25
66
  if (command === "push") {
26
- const branch = process.argv[3] || "main";
27
- console.log(` Pushing to ${branch}...`);
67
+ const branch = args[1] || "main";
68
+ console.log(`Pushing to ${branch}...`);
28
69
  await push(branch);
29
70
  }
30
71
  }
package/dist/ollama.js CHANGED
@@ -1,4 +1,4 @@
1
- export async function generateCommit(diff) {
1
+ export async function generateCommit(diff, model = 'llama3.1:latest') {
2
2
  if (!diff) {
3
3
  console.log(`No changes done`);
4
4
  process.exit(0);
@@ -48,14 +48,20 @@ Now generate the commit message.
48
48
  Diff:
49
49
  ${diff}
50
50
  `;
51
- const res = await fetch('http://localhost:11434/api/generate', {
52
- method: 'POST',
53
- body: JSON.stringify({
54
- model: 'llama3.1:latest',
55
- prompt,
56
- stream: false,
57
- })
58
- });
59
- const data = await res.json();
60
- return data.response.trim();
51
+ try {
52
+ const res = await fetch('http://localhost:11434/api/generate', {
53
+ method: 'POST',
54
+ body: JSON.stringify({
55
+ model,
56
+ prompt,
57
+ stream: false,
58
+ })
59
+ });
60
+ const data = (await res.json());
61
+ return data.response.trim();
62
+ }
63
+ catch (err) {
64
+ console.log(`Failed to connect to Ollama`);
65
+ process.exit(1);
66
+ }
61
67
  }
@@ -0,0 +1,40 @@
1
+ import { saveConfig } from "./config.js";
2
+ import readline from "readline";
3
+ async function getOllamaModels() {
4
+ try {
5
+ const res = await fetch('http://localhost:11434/api/tags');
6
+ const data = await res.json();
7
+ return data.models.map((m) => m.name);
8
+ }
9
+ catch (err) {
10
+ console.log(`Ollama is not running ${err}`);
11
+ process.exit(1);
12
+ }
13
+ }
14
+ export async function selectModel() {
15
+ const models = await getOllamaModels();
16
+ if (!models.length) {
17
+ console.log(`No Ollama models found ! `);
18
+ process.exit(1);
19
+ }
20
+ console.log('Choose an Ollama model : ');
21
+ models.forEach((m, i) => console.log(`${i + 1}.${m}`));
22
+ const rl = readline.createInterface({
23
+ input: process.stdin,
24
+ output: process.stdout,
25
+ });
26
+ const choice = await new Promise((res) => {
27
+ rl.question('Enter number : ', (ans) => {
28
+ rl.close();
29
+ res(Number(ans));
30
+ });
31
+ });
32
+ const selected = models[choice - 1];
33
+ if (!selected) {
34
+ console.log(`Invalid Choice`);
35
+ process.exit(1);
36
+ }
37
+ saveConfig({ model: selected });
38
+ console.log(`Model saved : ${selected}`);
39
+ return selected;
40
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "commitmind",
3
- "version": "1.0.3",
3
+ "version": "1.1.0",
4
4
  "author": "Nabin Sharma",
5
5
  "bin": {
6
6
  "aic": "./dist/index.js",
@@ -24,7 +24,6 @@
24
24
  "typescript": "^5.9.3"
25
25
  },
26
26
  "dependencies": {
27
- "commitmind": "^1.0.0",
28
27
  "readline": "^1.3.0"
29
28
  }
30
29
  }
@@ -1,5 +1,5 @@
1
1
 
2
- import { saveConfig } from "./config";
2
+ import { saveConfig } from "./config.js";
3
3
  import readline from "readline";
4
4
 
5
5
  async function getOllamaModels():Promise<string[]> {