@vavasilva/git-commit-ai 0.2.2 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +359 -7
- package/dist/chunk-5MPJCPJ4.js +225 -0
- package/dist/chunk-5MPJCPJ4.js.map +1 -0
- package/dist/git-F4ZHBA3B.js +36 -0
- package/dist/git-F4ZHBA3B.js.map +1 -0
- package/dist/index.js +757 -148
- package/dist/index.js.map +1 -1
- package/package.json +11 -3
package/dist/index.js
CHANGED
|
@@ -1,4 +1,16 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
addFiles,
|
|
4
|
+
commit,
|
|
5
|
+
commitAmend,
|
|
6
|
+
filterDiffByPatterns,
|
|
7
|
+
getFileDiff,
|
|
8
|
+
getLastCommitDiff,
|
|
9
|
+
getStagedDiff,
|
|
10
|
+
getStagedFiles,
|
|
11
|
+
push,
|
|
12
|
+
resetStaged
|
|
13
|
+
} from "./chunk-5MPJCPJ4.js";
|
|
2
14
|
|
|
3
15
|
// src/cli.ts
|
|
4
16
|
import { Command } from "commander";
|
|
@@ -12,31 +24,83 @@ import { homedir } from "os";
|
|
|
12
24
|
import { join, dirname } from "path";
|
|
13
25
|
import { parse as parseToml } from "smol-toml";
|
|
14
26
|
var DEFAULT_CONFIG = {
|
|
27
|
+
backend: "ollama",
|
|
15
28
|
model: "llama3.1:8b",
|
|
16
29
|
ollama_url: "http://localhost:11434",
|
|
30
|
+
openai_base_url: "https://api.openai.com/v1",
|
|
17
31
|
temperature: 0.7,
|
|
18
|
-
retry_temperatures: [0.5, 0.3, 0.2]
|
|
32
|
+
retry_temperatures: [0.5, 0.3, 0.2],
|
|
33
|
+
ignore_patterns: []
|
|
34
|
+
};
|
|
35
|
+
var VALID_BACKENDS = ["ollama", "openai", "anthropic", "groq", "llamacpp"];
|
|
36
|
+
var LOCAL_CONFIG_NAMES = [".gitcommitai", ".gitcommitai.toml"];
|
|
37
|
+
var VALID_CONFIG_KEYS = [
|
|
38
|
+
"backend",
|
|
39
|
+
"model",
|
|
40
|
+
"ollama_url",
|
|
41
|
+
"openai_base_url",
|
|
42
|
+
"temperature",
|
|
43
|
+
"default_scope",
|
|
44
|
+
"default_type",
|
|
45
|
+
"default_language"
|
|
46
|
+
];
|
|
47
|
+
var CONFIG_ALIASES = {
|
|
48
|
+
lang: "default_language",
|
|
49
|
+
scope: "default_scope",
|
|
50
|
+
type: "default_type",
|
|
51
|
+
url: "ollama_url",
|
|
52
|
+
temp: "temperature"
|
|
19
53
|
};
|
|
20
54
|
function getConfigPath() {
|
|
21
55
|
return join(homedir(), ".config", "git-commit-ai", "config.toml");
|
|
22
56
|
}
|
|
23
|
-
function
|
|
24
|
-
const
|
|
25
|
-
|
|
26
|
-
|
|
57
|
+
function getLocalConfigPath() {
|
|
58
|
+
for (const name of LOCAL_CONFIG_NAMES) {
|
|
59
|
+
if (existsSync(name)) {
|
|
60
|
+
return name;
|
|
61
|
+
}
|
|
27
62
|
}
|
|
63
|
+
return null;
|
|
64
|
+
}
|
|
65
|
+
function parseConfigFile(path) {
|
|
28
66
|
try {
|
|
29
|
-
const content = readFileSync(
|
|
30
|
-
|
|
31
|
-
return {
|
|
32
|
-
model: data.model ?? DEFAULT_CONFIG.model,
|
|
33
|
-
ollama_url: data.ollama_url ?? DEFAULT_CONFIG.ollama_url,
|
|
34
|
-
temperature: data.temperature ?? DEFAULT_CONFIG.temperature,
|
|
35
|
-
retry_temperatures: data.retry_temperatures ?? DEFAULT_CONFIG.retry_temperatures
|
|
36
|
-
};
|
|
67
|
+
const content = readFileSync(path, "utf-8");
|
|
68
|
+
return parseToml(content);
|
|
37
69
|
} catch {
|
|
38
|
-
return
|
|
70
|
+
return null;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
function mergeConfigs(base, override) {
|
|
74
|
+
return {
|
|
75
|
+
backend: VALID_BACKENDS.includes(override.backend) ? override.backend : base.backend,
|
|
76
|
+
model: override.model ?? base.model,
|
|
77
|
+
ollama_url: override.ollama_url ?? base.ollama_url,
|
|
78
|
+
openai_base_url: override.openai_base_url ?? base.openai_base_url,
|
|
79
|
+
temperature: override.temperature ?? base.temperature,
|
|
80
|
+
retry_temperatures: override.retry_temperatures ?? base.retry_temperatures,
|
|
81
|
+
ignore_patterns: override.ignore_patterns ?? base.ignore_patterns,
|
|
82
|
+
default_scope: override.default_scope ?? base.default_scope,
|
|
83
|
+
default_type: override.default_type ?? base.default_type,
|
|
84
|
+
default_language: override.default_language ?? base.default_language
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
function loadConfig() {
|
|
88
|
+
let config = { ...DEFAULT_CONFIG };
|
|
89
|
+
const globalPath = getConfigPath();
|
|
90
|
+
if (existsSync(globalPath)) {
|
|
91
|
+
const globalData = parseConfigFile(globalPath);
|
|
92
|
+
if (globalData) {
|
|
93
|
+
config = mergeConfigs(config, globalData);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
const localPath = getLocalConfigPath();
|
|
97
|
+
if (localPath) {
|
|
98
|
+
const localData = parseConfigFile(localPath);
|
|
99
|
+
if (localData) {
|
|
100
|
+
config = mergeConfigs(config, localData);
|
|
101
|
+
}
|
|
39
102
|
}
|
|
103
|
+
return config;
|
|
40
104
|
}
|
|
41
105
|
function saveConfig(config) {
|
|
42
106
|
const configPath = getConfigPath();
|
|
@@ -44,21 +108,126 @@ function saveConfig(config) {
|
|
|
44
108
|
if (!existsSync(dir)) {
|
|
45
109
|
mkdirSync(dir, { recursive: true });
|
|
46
110
|
}
|
|
47
|
-
|
|
111
|
+
let content = `# git-commit-ai configuration
|
|
112
|
+
# Backend: ollama, llamacpp, openai, anthropic, groq
|
|
113
|
+
backend = "${config.backend}"
|
|
48
114
|
model = "${config.model}"
|
|
49
115
|
ollama_url = "${config.ollama_url}"
|
|
116
|
+
# OpenAI Base URL - change this to use OpenAI-compatible APIs like llama.cpp
|
|
117
|
+
# Example: http://localhost:8080/v1 for llama-server
|
|
118
|
+
openai_base_url = "${config.openai_base_url}"
|
|
50
119
|
temperature = ${config.temperature}
|
|
51
120
|
retry_temperatures = [${config.retry_temperatures.join(", ")}]
|
|
52
121
|
`;
|
|
122
|
+
if (config.default_language) {
|
|
123
|
+
content += `default_language = "${config.default_language}"
|
|
124
|
+
`;
|
|
125
|
+
}
|
|
126
|
+
if (config.default_scope) {
|
|
127
|
+
content += `default_scope = "${config.default_scope}"
|
|
128
|
+
`;
|
|
129
|
+
}
|
|
130
|
+
if (config.default_type) {
|
|
131
|
+
content += `default_type = "${config.default_type}"
|
|
132
|
+
`;
|
|
133
|
+
}
|
|
134
|
+
if (config.ignore_patterns && config.ignore_patterns.length > 0) {
|
|
135
|
+
content += `ignore_patterns = [${config.ignore_patterns.map((p) => `"${p}"`).join(", ")}]
|
|
136
|
+
`;
|
|
137
|
+
}
|
|
53
138
|
writeFileSync(configPath, content, "utf-8");
|
|
54
139
|
}
|
|
55
140
|
function showConfig(config) {
|
|
56
|
-
|
|
141
|
+
const localPath = getLocalConfigPath();
|
|
142
|
+
let output = `Configuration:
|
|
143
|
+
Backend: ${config.backend}
|
|
57
144
|
Model: ${config.model}
|
|
58
145
|
Ollama URL: ${config.ollama_url}
|
|
146
|
+
OpenAI Base URL: ${config.openai_base_url}
|
|
59
147
|
Temperature: ${config.temperature}
|
|
60
|
-
Retry temperatures: [${config.retry_temperatures.join(", ")}]
|
|
61
|
-
|
|
148
|
+
Retry temperatures: [${config.retry_temperatures.join(", ")}]`;
|
|
149
|
+
if (config.ignore_patterns && config.ignore_patterns.length > 0) {
|
|
150
|
+
output += `
|
|
151
|
+
Ignore patterns: [${config.ignore_patterns.join(", ")}]`;
|
|
152
|
+
}
|
|
153
|
+
if (config.default_scope) {
|
|
154
|
+
output += `
|
|
155
|
+
Default scope: ${config.default_scope}`;
|
|
156
|
+
}
|
|
157
|
+
if (config.default_type) {
|
|
158
|
+
output += `
|
|
159
|
+
Default type: ${config.default_type}`;
|
|
160
|
+
}
|
|
161
|
+
if (config.default_language) {
|
|
162
|
+
output += `
|
|
163
|
+
Default language: ${config.default_language}`;
|
|
164
|
+
}
|
|
165
|
+
output += `
|
|
166
|
+
Global config: ${getConfigPath()}`;
|
|
167
|
+
if (localPath) {
|
|
168
|
+
output += `
|
|
169
|
+
Local config: ${localPath}`;
|
|
170
|
+
}
|
|
171
|
+
return output;
|
|
172
|
+
}
|
|
173
|
+
function updateConfig(key, value) {
|
|
174
|
+
const resolvedKey = CONFIG_ALIASES[key] || key;
|
|
175
|
+
if (!VALID_CONFIG_KEYS.includes(resolvedKey)) {
|
|
176
|
+
const aliasHelp = Object.entries(CONFIG_ALIASES).map(([alias, full]) => `${alias} \u2192 ${full}`).join(", ");
|
|
177
|
+
return {
|
|
178
|
+
success: false,
|
|
179
|
+
message: `Invalid config key: "${key}". Valid keys: ${VALID_CONFIG_KEYS.join(", ")}. Aliases: ${aliasHelp}`
|
|
180
|
+
};
|
|
181
|
+
}
|
|
182
|
+
const configKey = resolvedKey;
|
|
183
|
+
if (configKey === "backend" && !VALID_BACKENDS.includes(value)) {
|
|
184
|
+
return {
|
|
185
|
+
success: false,
|
|
186
|
+
message: `Invalid backend: "${value}". Valid backends: ${VALID_BACKENDS.join(", ")}`
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
if (configKey === "temperature") {
|
|
190
|
+
const temp = parseFloat(value);
|
|
191
|
+
if (isNaN(temp) || temp < 0 || temp > 1) {
|
|
192
|
+
return {
|
|
193
|
+
success: false,
|
|
194
|
+
message: `Invalid temperature: "${value}". Must be a number between 0 and 1.`
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
const config = loadConfig();
|
|
199
|
+
switch (configKey) {
|
|
200
|
+
case "backend":
|
|
201
|
+
config.backend = value;
|
|
202
|
+
break;
|
|
203
|
+
case "model":
|
|
204
|
+
config.model = value;
|
|
205
|
+
break;
|
|
206
|
+
case "ollama_url":
|
|
207
|
+
config.ollama_url = value;
|
|
208
|
+
break;
|
|
209
|
+
case "openai_base_url":
|
|
210
|
+
config.openai_base_url = value;
|
|
211
|
+
break;
|
|
212
|
+
case "temperature":
|
|
213
|
+
config.temperature = parseFloat(value);
|
|
214
|
+
break;
|
|
215
|
+
case "default_scope":
|
|
216
|
+
config.default_scope = value;
|
|
217
|
+
break;
|
|
218
|
+
case "default_type":
|
|
219
|
+
config.default_type = value;
|
|
220
|
+
break;
|
|
221
|
+
case "default_language":
|
|
222
|
+
config.default_language = value;
|
|
223
|
+
break;
|
|
224
|
+
}
|
|
225
|
+
saveConfig(config);
|
|
226
|
+
const keyDisplay = key !== configKey ? `${key} (${configKey})` : configKey;
|
|
227
|
+
return {
|
|
228
|
+
success: true,
|
|
229
|
+
message: `Config updated: ${keyDisplay} = "${value}"`
|
|
230
|
+
};
|
|
62
231
|
}
|
|
63
232
|
|
|
64
233
|
// src/backends/ollama.ts
|
|
@@ -120,98 +289,265 @@ var OllamaBackend = class {
|
|
|
120
289
|
}
|
|
121
290
|
};
|
|
122
291
|
|
|
123
|
-
// src/
|
|
124
|
-
|
|
125
|
-
var
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
292
|
+
// src/backends/openai.ts
|
|
293
|
+
var OPENAI_DEFAULT_URL = "https://api.openai.com/v1";
|
|
294
|
+
var OpenAIBackend = class _OpenAIBackend {
|
|
295
|
+
model;
|
|
296
|
+
apiKey;
|
|
297
|
+
baseUrl;
|
|
298
|
+
isLocalServer;
|
|
299
|
+
constructor(model = "gpt-4o-mini", apiKey, baseUrl) {
|
|
300
|
+
this.model = model;
|
|
301
|
+
this.baseUrl = baseUrl ?? process.env.OPENAI_BASE_URL ?? OPENAI_DEFAULT_URL;
|
|
302
|
+
this.isLocalServer = this.baseUrl.includes("localhost") || this.baseUrl.includes("127.0.0.1");
|
|
303
|
+
this.apiKey = apiKey ?? process.env.OPENAI_API_KEY ?? (this.isLocalServer ? "no-key-required" : "");
|
|
304
|
+
}
|
|
305
|
+
async generate(prompt, temperature = 0.7) {
|
|
306
|
+
if (!this.apiKey && !this.isLocalServer) {
|
|
307
|
+
throw new Error("OPENAI_API_KEY environment variable is not set");
|
|
308
|
+
}
|
|
309
|
+
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
310
|
+
method: "POST",
|
|
311
|
+
headers: {
|
|
312
|
+
"Content-Type": "application/json",
|
|
313
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
314
|
+
},
|
|
315
|
+
body: JSON.stringify({
|
|
316
|
+
model: this.model,
|
|
317
|
+
messages: [
|
|
318
|
+
{
|
|
319
|
+
role: "user",
|
|
320
|
+
content: prompt
|
|
321
|
+
}
|
|
322
|
+
],
|
|
323
|
+
temperature,
|
|
324
|
+
max_tokens: 256
|
|
325
|
+
})
|
|
326
|
+
});
|
|
327
|
+
if (!response.ok) {
|
|
328
|
+
const error = await response.text();
|
|
329
|
+
throw new Error(`OpenAI API error: ${response.status} - ${error}`);
|
|
330
|
+
}
|
|
331
|
+
const data = await response.json();
|
|
332
|
+
return data.choices?.[0]?.message?.content ?? "";
|
|
333
|
+
}
|
|
334
|
+
async isAvailable() {
|
|
335
|
+
if (!this.apiKey && !this.isLocalServer) {
|
|
336
|
+
return false;
|
|
337
|
+
}
|
|
338
|
+
try {
|
|
339
|
+
const controller = new AbortController();
|
|
340
|
+
const timeoutId = setTimeout(() => controller.abort(), 5e3);
|
|
341
|
+
const headers = {};
|
|
342
|
+
if (this.apiKey) {
|
|
343
|
+
headers["Authorization"] = `Bearer ${this.apiKey}`;
|
|
344
|
+
}
|
|
345
|
+
const response = await fetch(`${this.baseUrl}/models`, {
|
|
346
|
+
headers,
|
|
347
|
+
signal: controller.signal
|
|
348
|
+
});
|
|
349
|
+
clearTimeout(timeoutId);
|
|
350
|
+
return response.ok;
|
|
351
|
+
} catch {
|
|
352
|
+
return false;
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
/**
|
|
356
|
+
* Check if OpenAI API key is configured or if a custom base URL is set
|
|
357
|
+
*/
|
|
358
|
+
static hasApiKey() {
|
|
359
|
+
return !!process.env.OPENAI_API_KEY;
|
|
360
|
+
}
|
|
361
|
+
/**
|
|
362
|
+
* Check if a custom base URL is configured (for llama.cpp, etc.)
|
|
363
|
+
*/
|
|
364
|
+
static hasCustomBaseUrl() {
|
|
365
|
+
return !!process.env.OPENAI_BASE_URL;
|
|
366
|
+
}
|
|
367
|
+
/**
|
|
368
|
+
* Check if this backend can potentially work (has API key or custom URL)
|
|
369
|
+
*/
|
|
370
|
+
static isConfigured() {
|
|
371
|
+
return _OpenAIBackend.hasApiKey() || _OpenAIBackend.hasCustomBaseUrl();
|
|
129
372
|
}
|
|
130
373
|
};
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
374
|
+
|
|
375
|
+
// src/backends/anthropic.ts
|
|
376
|
+
var AnthropicBackend = class {
|
|
377
|
+
model;
|
|
378
|
+
apiKey;
|
|
379
|
+
baseUrl;
|
|
380
|
+
constructor(model = "claude-3-haiku-20240307", apiKey, baseUrl = "https://api.anthropic.com") {
|
|
381
|
+
this.model = model;
|
|
382
|
+
this.apiKey = apiKey ?? process.env.ANTHROPIC_API_KEY ?? "";
|
|
383
|
+
this.baseUrl = baseUrl;
|
|
384
|
+
}
|
|
385
|
+
async generate(prompt, temperature = 0.7) {
|
|
386
|
+
if (!this.apiKey) {
|
|
387
|
+
throw new Error("ANTHROPIC_API_KEY environment variable is not set");
|
|
388
|
+
}
|
|
389
|
+
const response = await fetch(`${this.baseUrl}/v1/messages`, {
|
|
390
|
+
method: "POST",
|
|
391
|
+
headers: {
|
|
392
|
+
"Content-Type": "application/json",
|
|
393
|
+
"x-api-key": this.apiKey,
|
|
394
|
+
"anthropic-version": "2023-06-01"
|
|
395
|
+
},
|
|
396
|
+
body: JSON.stringify({
|
|
397
|
+
model: this.model,
|
|
398
|
+
max_tokens: 256,
|
|
399
|
+
messages: [
|
|
400
|
+
{
|
|
401
|
+
role: "user",
|
|
402
|
+
content: prompt
|
|
403
|
+
}
|
|
404
|
+
],
|
|
405
|
+
temperature
|
|
406
|
+
})
|
|
136
407
|
});
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
408
|
+
if (!response.ok) {
|
|
409
|
+
const error = await response.text();
|
|
410
|
+
throw new Error(`Anthropic API error: ${response.status} - ${error}`);
|
|
411
|
+
}
|
|
412
|
+
const data = await response.json();
|
|
413
|
+
const textBlock = data.content?.find((block) => block.type === "text");
|
|
414
|
+
return textBlock?.text ?? "";
|
|
142
415
|
}
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
try {
|
|
146
|
-
return runGit(...args);
|
|
147
|
-
} catch {
|
|
148
|
-
return "";
|
|
416
|
+
async isAvailable() {
|
|
417
|
+
return !!this.apiKey;
|
|
149
418
|
}
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
const diff = runGitSafe("diff", "--cached");
|
|
153
|
-
const stats = runGitSafe("diff", "--cached", "--stat");
|
|
154
|
-
const filesOutput = runGitSafe("diff", "--cached", "--name-only");
|
|
155
|
-
const files = filesOutput.split("\n").filter((f) => f);
|
|
156
|
-
return {
|
|
157
|
-
diff,
|
|
158
|
-
stats,
|
|
159
|
-
files,
|
|
160
|
-
isEmpty: !diff.trim()
|
|
161
|
-
};
|
|
162
|
-
}
|
|
163
|
-
function getFileDiff(filePath) {
|
|
164
|
-
const diff = runGitSafe("diff", "--cached", "--", filePath);
|
|
165
|
-
const stats = runGitSafe("diff", "--cached", "--stat", "--", filePath);
|
|
166
|
-
const files = diff ? [filePath] : [];
|
|
167
|
-
return {
|
|
168
|
-
diff,
|
|
169
|
-
stats,
|
|
170
|
-
files,
|
|
171
|
-
isEmpty: !diff.trim()
|
|
172
|
-
};
|
|
173
|
-
}
|
|
174
|
-
function addFiles(...paths) {
|
|
175
|
-
if (paths.length === 0) {
|
|
176
|
-
paths = ["."];
|
|
419
|
+
static hasApiKey() {
|
|
420
|
+
return !!process.env.ANTHROPIC_API_KEY;
|
|
177
421
|
}
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
422
|
+
};
|
|
423
|
+
|
|
424
|
+
// src/backends/groq.ts
|
|
425
|
+
var GroqBackend = class {
|
|
426
|
+
model;
|
|
427
|
+
apiKey;
|
|
428
|
+
baseUrl;
|
|
429
|
+
constructor(model = "llama-3.1-8b-instant", apiKey, baseUrl = "https://api.groq.com/openai/v1") {
|
|
430
|
+
this.model = model;
|
|
431
|
+
this.apiKey = apiKey ?? process.env.GROQ_API_KEY ?? "";
|
|
432
|
+
this.baseUrl = baseUrl;
|
|
433
|
+
}
|
|
434
|
+
async generate(prompt, temperature = 0.7) {
|
|
435
|
+
if (!this.apiKey) {
|
|
436
|
+
throw new Error("GROQ_API_KEY environment variable is not set");
|
|
437
|
+
}
|
|
438
|
+
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
439
|
+
method: "POST",
|
|
440
|
+
headers: {
|
|
441
|
+
"Content-Type": "application/json",
|
|
442
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
443
|
+
},
|
|
444
|
+
body: JSON.stringify({
|
|
445
|
+
model: this.model,
|
|
446
|
+
messages: [
|
|
447
|
+
{
|
|
448
|
+
role: "user",
|
|
449
|
+
content: prompt
|
|
450
|
+
}
|
|
451
|
+
],
|
|
452
|
+
temperature,
|
|
453
|
+
max_tokens: 256
|
|
454
|
+
})
|
|
455
|
+
});
|
|
456
|
+
if (!response.ok) {
|
|
457
|
+
const error = await response.text();
|
|
458
|
+
throw new Error(`Groq API error: ${response.status} - ${error}`);
|
|
459
|
+
}
|
|
460
|
+
const data = await response.json();
|
|
461
|
+
return data.choices?.[0]?.message?.content ?? "";
|
|
462
|
+
}
|
|
463
|
+
async isAvailable() {
|
|
464
|
+
if (!this.apiKey) {
|
|
465
|
+
return false;
|
|
466
|
+
}
|
|
467
|
+
try {
|
|
468
|
+
const controller = new AbortController();
|
|
469
|
+
const timeoutId = setTimeout(() => controller.abort(), 5e3);
|
|
470
|
+
const response = await fetch(`${this.baseUrl}/models`, {
|
|
471
|
+
headers: {
|
|
472
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
473
|
+
},
|
|
474
|
+
signal: controller.signal
|
|
475
|
+
});
|
|
476
|
+
clearTimeout(timeoutId);
|
|
477
|
+
return response.ok;
|
|
478
|
+
} catch {
|
|
184
479
|
return false;
|
|
185
480
|
}
|
|
186
|
-
|
|
481
|
+
}
|
|
482
|
+
static hasApiKey() {
|
|
483
|
+
return !!process.env.GROQ_API_KEY;
|
|
484
|
+
}
|
|
485
|
+
};
|
|
486
|
+
|
|
487
|
+
// src/backends/index.ts
|
|
488
|
+
var LLAMACPP_DEFAULT_URL = "http://localhost:8080/v1";
|
|
489
|
+
var DEFAULT_MODELS = {
|
|
490
|
+
ollama: "llama3.1:8b",
|
|
491
|
+
openai: "gpt-4o-mini",
|
|
492
|
+
anthropic: "claude-3-haiku-20240307",
|
|
493
|
+
groq: "llama-3.1-8b-instant",
|
|
494
|
+
llamacpp: "gpt-4o-mini"
|
|
495
|
+
// Model alias used by llama-server (--alias flag)
|
|
496
|
+
};
|
|
497
|
+
function createBackend(config) {
|
|
498
|
+
const model = config.model || DEFAULT_MODELS[config.backend];
|
|
499
|
+
switch (config.backend) {
|
|
500
|
+
case "openai":
|
|
501
|
+
return new OpenAIBackend(model, void 0, config.openai_base_url);
|
|
502
|
+
case "llamacpp":
|
|
503
|
+
return new OpenAIBackend(model, void 0, LLAMACPP_DEFAULT_URL);
|
|
504
|
+
case "anthropic":
|
|
505
|
+
return new AnthropicBackend(model);
|
|
506
|
+
case "groq":
|
|
507
|
+
return new GroqBackend(model);
|
|
508
|
+
case "ollama":
|
|
509
|
+
default:
|
|
510
|
+
return new OllamaBackend(model, config.ollama_url);
|
|
187
511
|
}
|
|
188
512
|
}
|
|
189
|
-
function
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
const
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
513
|
+
async function detectBackend() {
|
|
514
|
+
const ollama = new OllamaBackend();
|
|
515
|
+
if (await ollama.isAvailable()) {
|
|
516
|
+
return "ollama";
|
|
517
|
+
}
|
|
518
|
+
const llamacpp = new OpenAIBackend(DEFAULT_MODELS.llamacpp, void 0, LLAMACPP_DEFAULT_URL);
|
|
519
|
+
if (await llamacpp.isAvailable()) {
|
|
520
|
+
return "llamacpp";
|
|
521
|
+
}
|
|
522
|
+
if (OpenAIBackend.hasCustomBaseUrl()) {
|
|
523
|
+
const localOpenai = new OpenAIBackend();
|
|
524
|
+
if (await localOpenai.isAvailable()) {
|
|
525
|
+
return "openai";
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
if (GroqBackend.hasApiKey()) {
|
|
529
|
+
return "groq";
|
|
530
|
+
}
|
|
531
|
+
if (OpenAIBackend.hasApiKey()) {
|
|
532
|
+
return "openai";
|
|
533
|
+
}
|
|
534
|
+
if (AnthropicBackend.hasApiKey()) {
|
|
535
|
+
return "anthropic";
|
|
536
|
+
}
|
|
537
|
+
return "ollama";
|
|
199
538
|
}
|
|
200
|
-
function
|
|
201
|
-
const
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
staged.split("\n").forEach((f) => files.add(f));
|
|
539
|
+
function getAvailableBackends() {
|
|
540
|
+
const available = ["ollama", "llamacpp"];
|
|
541
|
+
if (OpenAIBackend.isConfigured()) {
|
|
542
|
+
available.push("openai");
|
|
205
543
|
}
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
modified.split("\n").forEach((f) => files.add(f));
|
|
544
|
+
if (AnthropicBackend.hasApiKey()) {
|
|
545
|
+
available.push("anthropic");
|
|
209
546
|
}
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
untracked.split("\n").forEach((f) => files.add(f));
|
|
547
|
+
if (GroqBackend.hasApiKey()) {
|
|
548
|
+
available.push("groq");
|
|
213
549
|
}
|
|
214
|
-
return
|
|
550
|
+
return available;
|
|
215
551
|
}
|
|
216
552
|
|
|
217
553
|
// src/prompts.ts
|
|
@@ -261,6 +597,8 @@ EXAMPLES based on diff content:
|
|
|
261
597
|
IMPORTANT: Base your message ONLY on the actual changes shown in the diff below.
|
|
262
598
|
Do NOT use the examples above if they don't match the diff content.
|
|
263
599
|
|
|
600
|
+
{constraints}
|
|
601
|
+
|
|
264
602
|
{context}
|
|
265
603
|
|
|
266
604
|
DIFF TO ANALYZE:
|
|
@@ -269,6 +607,45 @@ DIFF TO ANALYZE:
|
|
|
269
607
|
\`\`\`
|
|
270
608
|
|
|
271
609
|
Reply with ONLY the commit message, nothing else. No quotes, no explanation.`;
|
|
610
|
+
var VALID_TYPES = ["feat", "fix", "docs", "style", "refactor", "test", "chore", "build", "ci", "perf", "revert"];
|
|
611
|
+
function isValidType(type) {
|
|
612
|
+
return VALID_TYPES.includes(type.toLowerCase());
|
|
613
|
+
}
|
|
614
|
+
function getValidTypes() {
|
|
615
|
+
return [...VALID_TYPES];
|
|
616
|
+
}
|
|
617
|
+
function buildConstraintsText(constraints) {
|
|
618
|
+
const parts = [];
|
|
619
|
+
if (constraints.type) {
|
|
620
|
+
parts.push(`CONSTRAINT: You MUST use "${constraints.type}" as the commit type.`);
|
|
621
|
+
}
|
|
622
|
+
if (constraints.scope) {
|
|
623
|
+
parts.push(`CONSTRAINT: You MUST use "(${constraints.scope})" as the scope in the commit message.`);
|
|
624
|
+
}
|
|
625
|
+
if (constraints.breaking) {
|
|
626
|
+
parts.push(`CONSTRAINT: This is a BREAKING CHANGE. You MUST use "!" after the type/scope (e.g., "feat!:" or "feat(api)!:").`);
|
|
627
|
+
}
|
|
628
|
+
if (constraints.language) {
|
|
629
|
+
const langMap = {
|
|
630
|
+
en: "English",
|
|
631
|
+
pt: "Portuguese",
|
|
632
|
+
es: "Spanish",
|
|
633
|
+
fr: "French",
|
|
634
|
+
de: "German",
|
|
635
|
+
it: "Italian",
|
|
636
|
+
ja: "Japanese",
|
|
637
|
+
zh: "Chinese",
|
|
638
|
+
ko: "Korean",
|
|
639
|
+
ru: "Russian"
|
|
640
|
+
};
|
|
641
|
+
const langName = langMap[constraints.language.toLowerCase()] || constraints.language;
|
|
642
|
+
parts.push(`CONSTRAINT: Write the commit message subject in ${langName}.`);
|
|
643
|
+
}
|
|
644
|
+
if (constraints.context) {
|
|
645
|
+
parts.push(`ADDITIONAL CONTEXT: ${constraints.context}`);
|
|
646
|
+
}
|
|
647
|
+
return parts.join("\n");
|
|
648
|
+
}
|
|
272
649
|
var KARMA_PATTERN = /^(feat|fix|docs|style|refactor|test|chore|build|ci|perf|revert)(\([^)]+\))?:\s*.+/;
|
|
273
650
|
var ACTION_TO_TYPE = {
|
|
274
651
|
add: "feat",
|
|
@@ -306,9 +683,10 @@ function truncateDiff(diff, maxChars = MAX_DIFF_CHARS) {
|
|
|
306
683
|
}
|
|
307
684
|
return truncated + "\n\n[... diff truncated for brevity ...]";
|
|
308
685
|
}
|
|
309
|
-
function buildPrompt(diff, context) {
|
|
686
|
+
function buildPrompt(diff, context, constraints) {
|
|
310
687
|
const truncatedDiff = truncateDiff(diff);
|
|
311
|
-
|
|
688
|
+
const constraintsText = constraints ? buildConstraintsText(constraints) : "";
|
|
689
|
+
return KARMA_PROMPT.replace("{diff}", truncatedDiff).replace("{context}", context).replace("{constraints}", constraintsText);
|
|
312
690
|
}
|
|
313
691
|
function buildSummarizePrompt(diff, context) {
|
|
314
692
|
const truncatedDiff = truncateDiff(diff);
|
|
@@ -344,9 +722,30 @@ function fixMessage(message) {
|
|
|
344
722
|
}
|
|
345
723
|
return `chore: ${cleaned.toLowerCase()}`;
|
|
346
724
|
}
|
|
725
|
+
function addIssueReference(message, issue) {
|
|
726
|
+
const issueRef = issue.startsWith("#") ? issue : `#${issue}`;
|
|
727
|
+
return `${message}
|
|
728
|
+
|
|
729
|
+
Refs: ${issueRef}`;
|
|
730
|
+
}
|
|
731
|
+
function addCoAuthors(message, coAuthors) {
|
|
732
|
+
if (coAuthors.length === 0) {
|
|
733
|
+
return message;
|
|
734
|
+
}
|
|
735
|
+
const trailers = coAuthors.map((author) => `Co-authored-by: ${author}`).join("\n");
|
|
736
|
+
return `${message}
|
|
737
|
+
|
|
738
|
+
${trailers}`;
|
|
739
|
+
}
|
|
740
|
+
function ensureBreakingMarker(message) {
|
|
741
|
+
if (message.includes("!:")) {
|
|
742
|
+
return message;
|
|
743
|
+
}
|
|
744
|
+
return message.replace(/:/, "!:");
|
|
745
|
+
}
|
|
347
746
|
|
|
348
747
|
// src/hook.ts
|
|
349
|
-
import { execSync
|
|
748
|
+
import { execSync } from "child_process";
|
|
350
749
|
import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, unlinkSync, existsSync as existsSync2, chmodSync, mkdirSync as mkdirSync2 } from "fs";
|
|
351
750
|
import { join as join2, dirname as dirname2 } from "path";
|
|
352
751
|
var HOOK_SCRIPT = `#!/bin/sh
|
|
@@ -389,7 +788,7 @@ exit 0
|
|
|
389
788
|
var HOOK_NAME = "prepare-commit-msg";
|
|
390
789
|
function getGitDir() {
|
|
391
790
|
try {
|
|
392
|
-
const result =
|
|
791
|
+
const result = execSync("git rev-parse --git-dir", {
|
|
393
792
|
encoding: "utf-8",
|
|
394
793
|
stdio: ["pipe", "pipe", "pipe"]
|
|
395
794
|
});
|
|
@@ -506,6 +905,32 @@ function debugValidation(message, isValid, fixed) {
|
|
|
506
905
|
}
|
|
507
906
|
|
|
508
907
|
// src/cli.ts
|
|
908
|
+
function buildDiffContext(diffResult) {
|
|
909
|
+
const parts = [];
|
|
910
|
+
if (diffResult.filesAdded.length > 0) {
|
|
911
|
+
parts.push(`Files added:
|
|
912
|
+
${diffResult.filesAdded.slice(0, 5).join("\n")}`);
|
|
913
|
+
if (diffResult.filesAdded.length > 5) {
|
|
914
|
+
parts.push(` ... and ${diffResult.filesAdded.length - 5} more added`);
|
|
915
|
+
}
|
|
916
|
+
}
|
|
917
|
+
if (diffResult.filesDeleted.length > 0) {
|
|
918
|
+
parts.push(`Files deleted:
|
|
919
|
+
${diffResult.filesDeleted.slice(0, 5).join("\n")}`);
|
|
920
|
+
if (diffResult.filesDeleted.length > 5) {
|
|
921
|
+
parts.push(` ... and ${diffResult.filesDeleted.length - 5} more deleted`);
|
|
922
|
+
}
|
|
923
|
+
}
|
|
924
|
+
if (diffResult.filesModified.length > 0) {
|
|
925
|
+
parts.push(`Files modified:
|
|
926
|
+
${diffResult.filesModified.slice(0, 5).join("\n")}`);
|
|
927
|
+
if (diffResult.filesModified.length > 5) {
|
|
928
|
+
parts.push(` ... and ${diffResult.filesModified.length - 5} more modified`);
|
|
929
|
+
}
|
|
930
|
+
}
|
|
931
|
+
parts.push(`Stats: ${diffResult.stats}`);
|
|
932
|
+
return parts.join("\n");
|
|
933
|
+
}
|
|
509
934
|
async function promptUser(question, choices) {
|
|
510
935
|
const rl = createInterface({
|
|
511
936
|
input: process.stdin,
|
|
@@ -536,15 +961,18 @@ async function promptEdit(currentMessage) {
|
|
|
536
961
|
});
|
|
537
962
|
});
|
|
538
963
|
}
|
|
539
|
-
async function generateMessage(backend, diffContent, context, temperatures) {
|
|
540
|
-
const prompt = buildPrompt(diffContent, context);
|
|
964
|
+
async function generateMessage(backend, diffContent, context, temperatures, constraints) {
|
|
965
|
+
const prompt = buildPrompt(diffContent, context, constraints);
|
|
541
966
|
debugPrompt(prompt);
|
|
542
967
|
for (const temp of temperatures) {
|
|
543
968
|
debug(`Trying temperature: ${temp}`);
|
|
544
969
|
try {
|
|
545
970
|
const rawMessage = await backend.generate(prompt, temp);
|
|
546
971
|
debugResponse(rawMessage);
|
|
547
|
-
|
|
972
|
+
let message = cleanMessage(rawMessage);
|
|
973
|
+
if (constraints?.breaking) {
|
|
974
|
+
message = ensureBreakingMarker(message);
|
|
975
|
+
}
|
|
548
976
|
const isValid = validateMessage(message);
|
|
549
977
|
debugValidation(message, isValid);
|
|
550
978
|
if (isValid) {
|
|
@@ -553,7 +981,7 @@ async function generateMessage(backend, diffContent, context, temperatures) {
|
|
|
553
981
|
const fixed = fixMessage(message);
|
|
554
982
|
if (validateMessage(fixed)) {
|
|
555
983
|
debugValidation(fixed, true, fixed);
|
|
556
|
-
return fixed;
|
|
984
|
+
return constraints?.breaking ? ensureBreakingMarker(fixed) : fixed;
|
|
557
985
|
}
|
|
558
986
|
} catch (e) {
|
|
559
987
|
const error = e;
|
|
@@ -579,13 +1007,13 @@ async function promptAction(message) {
|
|
|
579
1007
|
["c", "e", "r", "a"]
|
|
580
1008
|
);
|
|
581
1009
|
}
|
|
582
|
-
async function runCommitFlow(backend, cfg, diffContent, context, skipConfirm) {
|
|
1010
|
+
async function runCommitFlow(backend, cfg, diffContent, context, skipConfirm, constraints) {
|
|
583
1011
|
const temperatures = [cfg.temperature, ...cfg.retry_temperatures];
|
|
584
1012
|
const spinner = ora("Generating commit message...").start();
|
|
585
1013
|
while (true) {
|
|
586
1014
|
let message;
|
|
587
1015
|
try {
|
|
588
|
-
message = await generateMessage(backend, diffContent, context, temperatures);
|
|
1016
|
+
message = await generateMessage(backend, diffContent, context, temperatures, constraints);
|
|
589
1017
|
} finally {
|
|
590
1018
|
spinner.stop();
|
|
591
1019
|
}
|
|
@@ -611,25 +1039,58 @@ async function runCommitFlow(backend, cfg, diffContent, context, skipConfirm) {
|
|
|
611
1039
|
}
|
|
612
1040
|
}
|
|
613
1041
|
}
|
|
614
|
-
async function handleSingleCommit(backend, cfg,
|
|
615
|
-
|
|
616
|
-
if (
|
|
617
|
-
|
|
618
|
-
|
|
1042
|
+
async function handleSingleCommit(backend, cfg, options) {
|
|
1043
|
+
let diffResult;
|
|
1044
|
+
if (options.amend) {
|
|
1045
|
+
diffResult = getLastCommitDiff();
|
|
1046
|
+
if (diffResult.isEmpty) {
|
|
1047
|
+
console.log(chalk2.yellow("No previous commit to amend."));
|
|
1048
|
+
process.exit(1);
|
|
1049
|
+
}
|
|
1050
|
+
console.log(chalk2.dim("Amending last commit..."));
|
|
1051
|
+
} else {
|
|
1052
|
+
diffResult = getStagedDiff();
|
|
1053
|
+
if (diffResult.isEmpty) {
|
|
1054
|
+
console.log(chalk2.yellow("No changes to commit."));
|
|
1055
|
+
process.exit(0);
|
|
1056
|
+
}
|
|
1057
|
+
}
|
|
1058
|
+
let diff = diffResult.diff;
|
|
1059
|
+
if (cfg.ignore_patterns && cfg.ignore_patterns.length > 0) {
|
|
1060
|
+
diff = filterDiffByPatterns(diff, cfg.ignore_patterns);
|
|
1061
|
+
if (!diff.trim()) {
|
|
1062
|
+
console.log(chalk2.yellow("All changes are ignored by ignore_patterns."));
|
|
1063
|
+
process.exit(0);
|
|
1064
|
+
}
|
|
619
1065
|
}
|
|
620
|
-
debugDiff(
|
|
621
|
-
const context =
|
|
622
|
-
|
|
623
|
-
Stats: ${diffResult.stats}`;
|
|
624
|
-
const message = await runCommitFlow(backend, cfg, diffResult.diff, context, skipConfirm);
|
|
1066
|
+
debugDiff(diff, diffResult.files);
|
|
1067
|
+
const context = buildDiffContext(diffResult);
|
|
1068
|
+
let message = await runCommitFlow(backend, cfg, diff, context, options.skipConfirm, options.constraints);
|
|
625
1069
|
if (message === null) {
|
|
626
1070
|
console.log(chalk2.yellow("Aborted."));
|
|
627
1071
|
process.exit(0);
|
|
628
1072
|
}
|
|
1073
|
+
if (options.issue) {
|
|
1074
|
+
message = addIssueReference(message, options.issue);
|
|
1075
|
+
}
|
|
1076
|
+
if (options.coAuthors && options.coAuthors.length > 0) {
|
|
1077
|
+
message = addCoAuthors(message, options.coAuthors);
|
|
1078
|
+
}
|
|
1079
|
+
if (options.dryRun) {
|
|
1080
|
+
console.log(chalk2.cyan("Dry run - message not committed:"));
|
|
1081
|
+
console.log(message);
|
|
1082
|
+
return;
|
|
1083
|
+
}
|
|
629
1084
|
try {
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
1085
|
+
if (options.amend) {
|
|
1086
|
+
commitAmend(message);
|
|
1087
|
+
debug(`Amend successful: ${message}`);
|
|
1088
|
+
console.log(chalk2.green("\u2713 Amended:"), message.split("\n")[0]);
|
|
1089
|
+
} else {
|
|
1090
|
+
commit(message);
|
|
1091
|
+
debug(`Commit successful: ${message}`);
|
|
1092
|
+
console.log(chalk2.green("\u2713 Committed:"), message.split("\n")[0]);
|
|
1093
|
+
}
|
|
633
1094
|
} catch (e) {
|
|
634
1095
|
const error = e;
|
|
635
1096
|
debug(`Commit failed: ${error.message}`);
|
|
@@ -637,14 +1098,23 @@ Stats: ${diffResult.stats}`;
|
|
|
637
1098
|
process.exit(1);
|
|
638
1099
|
}
|
|
639
1100
|
}
|
|
640
|
-
async function handleIndividualCommits(backend, cfg,
|
|
641
|
-
const
|
|
642
|
-
if (
|
|
643
|
-
console.log(chalk2.yellow("No files to commit."));
|
|
1101
|
+
async function handleIndividualCommits(backend, cfg, options) {
|
|
1102
|
+
const stagedFiles = getStagedFiles();
|
|
1103
|
+
if (stagedFiles.length === 0) {
|
|
1104
|
+
console.log(chalk2.yellow("No staged files to commit."));
|
|
1105
|
+
console.log(chalk2.dim("Stage files with: git add <files>"));
|
|
644
1106
|
process.exit(0);
|
|
645
1107
|
}
|
|
646
|
-
console.log(chalk2.dim(`Found ${
|
|
647
|
-
|
|
1108
|
+
console.log(chalk2.dim(`Found ${stagedFiles.length} files to commit individually.`));
|
|
1109
|
+
resetStaged();
|
|
1110
|
+
for (const filePath of stagedFiles) {
|
|
1111
|
+
if (cfg.ignore_patterns && cfg.ignore_patterns.length > 0) {
|
|
1112
|
+
const { shouldIgnoreFile } = await import("./git-F4ZHBA3B.js");
|
|
1113
|
+
if (shouldIgnoreFile(filePath, cfg.ignore_patterns)) {
|
|
1114
|
+
console.log(chalk2.dim(`Skipping ignored file: ${filePath}`));
|
|
1115
|
+
continue;
|
|
1116
|
+
}
|
|
1117
|
+
}
|
|
648
1118
|
const added = addFiles(filePath);
|
|
649
1119
|
if (!added) {
|
|
650
1120
|
continue;
|
|
@@ -655,16 +1125,26 @@ async function handleIndividualCommits(backend, cfg, skipConfirm) {
|
|
|
655
1125
|
}
|
|
656
1126
|
console.log(chalk2.bold(`
|
|
657
1127
|
Processing: ${filePath}`));
|
|
658
|
-
const context =
|
|
659
|
-
|
|
660
|
-
const message = await runCommitFlow(backend, cfg, diffResult.diff, context, skipConfirm);
|
|
1128
|
+
const context = buildDiffContext(diffResult);
|
|
1129
|
+
let message = await runCommitFlow(backend, cfg, diffResult.diff, context, options.skipConfirm, options.constraints);
|
|
661
1130
|
if (message === null) {
|
|
662
1131
|
console.log(chalk2.yellow(`Skipped: ${filePath}`));
|
|
663
1132
|
continue;
|
|
664
1133
|
}
|
|
1134
|
+
if (options.issue) {
|
|
1135
|
+
message = addIssueReference(message, options.issue);
|
|
1136
|
+
}
|
|
1137
|
+
if (options.coAuthors && options.coAuthors.length > 0) {
|
|
1138
|
+
message = addCoAuthors(message, options.coAuthors);
|
|
1139
|
+
}
|
|
1140
|
+
if (options.dryRun) {
|
|
1141
|
+
console.log(chalk2.cyan(`Dry run - ${filePath}:`));
|
|
1142
|
+
console.log(message);
|
|
1143
|
+
continue;
|
|
1144
|
+
}
|
|
665
1145
|
try {
|
|
666
1146
|
commit(message);
|
|
667
|
-
console.log(chalk2.green("\u2713 Committed:"), message);
|
|
1147
|
+
console.log(chalk2.green("\u2713 Committed:"), message.split("\n")[0]);
|
|
668
1148
|
} catch (e) {
|
|
669
1149
|
const error = e;
|
|
670
1150
|
console.log(chalk2.red(`Error committing ${filePath}: ${error.message}`));
|
|
@@ -673,46 +1153,124 @@ Stats: ${diffResult.stats}`;
|
|
|
673
1153
|
}
|
|
674
1154
|
function createProgram() {
|
|
675
1155
|
const program2 = new Command();
|
|
676
|
-
program2.name("git-commit-ai").description("Generate commit messages using
|
|
1156
|
+
program2.name("git-commit-ai").description("Generate commit messages using LLMs (Ollama, OpenAI, Anthropic, Groq, llama.cpp)").version("0.3.0").option("-p, --push", "Push after commit").option("-y, --yes", "Skip confirmation").option("-i, --individual", "Commit files individually").option("-d, --debug", "Enable debug output").option("--dry-run", "Show generated message without committing").option("-b, --backend <backend>", "Backend to use (ollama, openai, anthropic, groq, llamacpp)").option("-m, --model <model>", "Override model from config").option("-t, --temperature <temp>", "Override temperature (0.0-1.0)", parseFloat).option("--hook-mode", "Called by git hook (outputs message only)").option("--amend", "Regenerate and amend the last commit message").option("-s, --scope <scope>", "Force a specific scope (e.g., auth, api)").option("--type <type>", `Force commit type (${getValidTypes().join(", ")})`).option("-c, --context <text>", "Provide additional context for message generation").option("-l, --lang <code>", "Language for commit message (en, pt, es, fr, de, etc.)").option("--issue <ref>", "Reference an issue (e.g., 123 or #123)").option("--breaking", "Mark as breaking change (adds ! to type)").option("--co-author <author>", "Add co-author (can be used multiple times)", (val, prev) => prev.concat([val]), []).action(async (options) => {
|
|
677
1157
|
if (options.debug) {
|
|
678
1158
|
enableDebug();
|
|
679
1159
|
debug("Debug mode enabled");
|
|
680
1160
|
}
|
|
681
1161
|
const cfg = loadConfig();
|
|
1162
|
+
if (options.type && !isValidType(options.type)) {
|
|
1163
|
+
console.log(chalk2.red(`Error: Invalid commit type "${options.type}"`));
|
|
1164
|
+
console.log(chalk2.dim(`Valid types: ${getValidTypes().join(", ")}`));
|
|
1165
|
+
process.exit(1);
|
|
1166
|
+
}
|
|
1167
|
+
if (options.backend) {
|
|
1168
|
+
const validBackends = ["ollama", "openai", "anthropic", "groq", "llamacpp"];
|
|
1169
|
+
if (validBackends.includes(options.backend)) {
|
|
1170
|
+
cfg.backend = options.backend;
|
|
1171
|
+
if (!options.model && cfg.model === "llama3.1:8b") {
|
|
1172
|
+
cfg.model = DEFAULT_MODELS[cfg.backend];
|
|
1173
|
+
}
|
|
1174
|
+
debug(`Backend overridden to: ${cfg.backend}`);
|
|
1175
|
+
} else {
|
|
1176
|
+
console.log(chalk2.red(`Error: Invalid backend "${options.backend}"`));
|
|
1177
|
+
console.log(chalk2.dim(`Valid backends: ${validBackends.join(", ")}`));
|
|
1178
|
+
process.exit(1);
|
|
1179
|
+
}
|
|
1180
|
+
}
|
|
1181
|
+
if (options.model) {
|
|
1182
|
+
cfg.model = options.model;
|
|
1183
|
+
debug(`Model overridden to: ${cfg.model}`);
|
|
1184
|
+
}
|
|
1185
|
+
if (options.temperature !== void 0 && !isNaN(options.temperature)) {
|
|
1186
|
+
cfg.temperature = options.temperature;
|
|
1187
|
+
debug(`Temperature overridden to: ${cfg.temperature}`);
|
|
1188
|
+
}
|
|
682
1189
|
debugConfig(cfg);
|
|
683
|
-
|
|
1190
|
+
if (!options.backend && cfg.backend === "ollama") {
|
|
1191
|
+
const detected = await detectBackend();
|
|
1192
|
+
if (detected !== "ollama") {
|
|
1193
|
+
cfg.backend = detected;
|
|
1194
|
+
cfg.model = DEFAULT_MODELS[detected];
|
|
1195
|
+
debug(`Auto-detected backend: ${detected}`);
|
|
1196
|
+
}
|
|
1197
|
+
}
|
|
1198
|
+
const backend = createBackend(cfg);
|
|
1199
|
+
debug(`Using backend: ${cfg.backend} with model: ${cfg.model}`);
|
|
684
1200
|
const available = await backend.isAvailable();
|
|
685
1201
|
if (!available) {
|
|
686
1202
|
if (options.hookMode) {
|
|
687
1203
|
process.exit(1);
|
|
688
1204
|
}
|
|
689
|
-
|
|
690
|
-
|
|
1205
|
+
if (cfg.backend === "ollama") {
|
|
1206
|
+
console.log(chalk2.red("Error: Ollama is not running."));
|
|
1207
|
+
console.log(chalk2.dim("Start it with: brew services start ollama"));
|
|
1208
|
+
} else if (cfg.backend === "llamacpp") {
|
|
1209
|
+
console.log(chalk2.red("Error: llama.cpp server is not running."));
|
|
1210
|
+
console.log(chalk2.dim("Start it with: llama-server -m model.gguf --port 8080"));
|
|
1211
|
+
} else {
|
|
1212
|
+
console.log(chalk2.red(`Error: ${cfg.backend} backend is not available.`));
|
|
1213
|
+
const envVar = {
|
|
1214
|
+
openai: "OPENAI_API_KEY",
|
|
1215
|
+
anthropic: "ANTHROPIC_API_KEY",
|
|
1216
|
+
groq: "GROQ_API_KEY"
|
|
1217
|
+
}[cfg.backend];
|
|
1218
|
+
if (envVar) {
|
|
1219
|
+
console.log(chalk2.dim(`Set ${envVar} environment variable.`));
|
|
1220
|
+
}
|
|
1221
|
+
}
|
|
1222
|
+
const availableBackends = getAvailableBackends();
|
|
1223
|
+
if (availableBackends.length > 1) {
|
|
1224
|
+
console.log(chalk2.dim(`Available backends: ${availableBackends.join(", ")}`));
|
|
1225
|
+
}
|
|
691
1226
|
process.exit(1);
|
|
692
1227
|
}
|
|
1228
|
+
const constraints = {
|
|
1229
|
+
type: options.type || cfg.default_type,
|
|
1230
|
+
scope: options.scope || cfg.default_scope,
|
|
1231
|
+
language: options.lang || cfg.default_language,
|
|
1232
|
+
breaking: options.breaking,
|
|
1233
|
+
context: options.context
|
|
1234
|
+
};
|
|
693
1235
|
if (options.hookMode) {
|
|
694
1236
|
const diffResult = getStagedDiff();
|
|
695
1237
|
if (diffResult.isEmpty) {
|
|
696
1238
|
process.exit(1);
|
|
697
1239
|
}
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
1240
|
+
let diff = diffResult.diff;
|
|
1241
|
+
if (cfg.ignore_patterns && cfg.ignore_patterns.length > 0) {
|
|
1242
|
+
diff = filterDiffByPatterns(diff, cfg.ignore_patterns);
|
|
1243
|
+
}
|
|
1244
|
+
const context = buildDiffContext(diffResult);
|
|
701
1245
|
const temperatures = [cfg.temperature, ...cfg.retry_temperatures];
|
|
702
|
-
const message = await generateMessage(backend,
|
|
1246
|
+
const message = await generateMessage(backend, diff, context, temperatures, constraints);
|
|
703
1247
|
if (message) {
|
|
704
1248
|
console.log(message);
|
|
705
1249
|
process.exit(0);
|
|
706
1250
|
}
|
|
707
1251
|
process.exit(1);
|
|
708
1252
|
}
|
|
709
|
-
|
|
1253
|
+
const commitOptions = {
|
|
1254
|
+
skipConfirm: options.yes,
|
|
1255
|
+
dryRun: options.dryRun,
|
|
1256
|
+
amend: options.amend,
|
|
1257
|
+
constraints,
|
|
1258
|
+
issue: options.issue,
|
|
1259
|
+
coAuthors: options.coAuthor
|
|
1260
|
+
};
|
|
1261
|
+
if (!options.amend) {
|
|
1262
|
+
addFiles(".");
|
|
1263
|
+
}
|
|
710
1264
|
if (options.individual) {
|
|
711
|
-
|
|
1265
|
+
if (options.amend) {
|
|
1266
|
+
console.log(chalk2.red("Error: --amend cannot be used with --individual"));
|
|
1267
|
+
process.exit(1);
|
|
1268
|
+
}
|
|
1269
|
+
await handleIndividualCommits(backend, cfg, commitOptions);
|
|
712
1270
|
} else {
|
|
713
|
-
await handleSingleCommit(backend, cfg,
|
|
1271
|
+
await handleSingleCommit(backend, cfg, commitOptions);
|
|
714
1272
|
}
|
|
715
|
-
if (options.push) {
|
|
1273
|
+
if (options.push && !options.dryRun && !options.amend) {
|
|
716
1274
|
try {
|
|
717
1275
|
push();
|
|
718
1276
|
console.log(chalk2.green("\u2713 Changes pushed to remote."));
|
|
@@ -723,7 +1281,41 @@ Stats: ${diffResult.stats}`;
|
|
|
723
1281
|
}
|
|
724
1282
|
}
|
|
725
1283
|
});
|
|
726
|
-
program2.command("config").description("Show or edit configuration").option("-e, --edit", "Create/edit configuration file").action((options) => {
|
|
1284
|
+
program2.command("config").description("Show or edit configuration").option("-e, --edit", "Create/edit configuration file").option("-s, --set <key=value>", "Set a config value (e.g., --set backend=llamacpp)").option("-l, --list-keys", "List all valid config keys").action((options) => {
|
|
1285
|
+
if (options.listKeys) {
|
|
1286
|
+
console.log(chalk2.bold("Valid config keys:"));
|
|
1287
|
+
for (const key of VALID_CONFIG_KEYS) {
|
|
1288
|
+
const alias = Object.entries(CONFIG_ALIASES).find(([, v]) => v === key)?.[0];
|
|
1289
|
+
if (alias) {
|
|
1290
|
+
console.log(` ${key} ${chalk2.dim(`(alias: ${alias})`)}`);
|
|
1291
|
+
} else {
|
|
1292
|
+
console.log(` ${key}`);
|
|
1293
|
+
}
|
|
1294
|
+
}
|
|
1295
|
+
console.log();
|
|
1296
|
+
console.log(chalk2.bold("Short aliases:"));
|
|
1297
|
+
for (const [alias, fullKey] of Object.entries(CONFIG_ALIASES)) {
|
|
1298
|
+
console.log(` ${alias} \u2192 ${fullKey}`);
|
|
1299
|
+
}
|
|
1300
|
+
return;
|
|
1301
|
+
}
|
|
1302
|
+
if (options.set) {
|
|
1303
|
+
const match = options.set.match(/^([^=]+)=(.*)$/);
|
|
1304
|
+
if (!match) {
|
|
1305
|
+
console.log(chalk2.red("Error: Invalid format. Use: --set key=value"));
|
|
1306
|
+
console.log(chalk2.dim("Example: git-commit-ai config --set backend=llamacpp"));
|
|
1307
|
+
process.exit(1);
|
|
1308
|
+
}
|
|
1309
|
+
const [, key, value] = match;
|
|
1310
|
+
const result = updateConfig(key, value);
|
|
1311
|
+
if (result.success) {
|
|
1312
|
+
console.log(chalk2.green(`\u2713 ${result.message}`));
|
|
1313
|
+
} else {
|
|
1314
|
+
console.log(chalk2.red(`Error: ${result.message}`));
|
|
1315
|
+
process.exit(1);
|
|
1316
|
+
}
|
|
1317
|
+
return;
|
|
1318
|
+
}
|
|
727
1319
|
const cfg = loadConfig();
|
|
728
1320
|
if (options.edit) {
|
|
729
1321
|
console.log(chalk2.dim("Creating default config file..."));
|
|
@@ -734,16 +1326,34 @@ Stats: ${diffResult.stats}`;
|
|
|
734
1326
|
console.log(showConfig(cfg));
|
|
735
1327
|
}
|
|
736
1328
|
});
|
|
737
|
-
program2.command("summarize").description("Summarize staged changes in plain English").option("--diff", "Also show the raw diff").option("-d, --debug", "Enable debug output").action(async (options) => {
|
|
1329
|
+
program2.command("summarize").description("Summarize staged changes in plain English").option("--diff", "Also show the raw diff").option("-b, --backend <backend>", "Backend to use (ollama, openai, anthropic, groq, llamacpp)").option("-d, --debug", "Enable debug output").action(async (options) => {
|
|
738
1330
|
if (options.debug) {
|
|
739
1331
|
enableDebug();
|
|
740
1332
|
}
|
|
741
1333
|
const cfg = loadConfig();
|
|
742
|
-
|
|
1334
|
+
if (options.backend) {
|
|
1335
|
+
const validBackends = ["ollama", "openai", "anthropic", "groq", "llamacpp"];
|
|
1336
|
+
if (validBackends.includes(options.backend)) {
|
|
1337
|
+
cfg.backend = options.backend;
|
|
1338
|
+
cfg.model = DEFAULT_MODELS[cfg.backend];
|
|
1339
|
+
}
|
|
1340
|
+
}
|
|
1341
|
+
if (cfg.backend === "ollama") {
|
|
1342
|
+
const detected = await detectBackend();
|
|
1343
|
+
if (detected !== "ollama") {
|
|
1344
|
+
cfg.backend = detected;
|
|
1345
|
+
cfg.model = DEFAULT_MODELS[detected];
|
|
1346
|
+
}
|
|
1347
|
+
}
|
|
1348
|
+
const backend = createBackend(cfg);
|
|
743
1349
|
const available = await backend.isAvailable();
|
|
744
1350
|
if (!available) {
|
|
745
|
-
|
|
746
|
-
|
|
1351
|
+
if (cfg.backend === "ollama") {
|
|
1352
|
+
console.log(chalk2.red("Error: Ollama is not running."));
|
|
1353
|
+
console.log(chalk2.dim("Start it with: brew services start ollama"));
|
|
1354
|
+
} else {
|
|
1355
|
+
console.log(chalk2.red(`Error: ${cfg.backend} backend is not available.`));
|
|
1356
|
+
}
|
|
747
1357
|
process.exit(1);
|
|
748
1358
|
}
|
|
749
1359
|
const diffResult = getStagedDiff();
|
|
@@ -761,8 +1371,7 @@ Files to summarize: ${diffResult.files.length}`));
|
|
|
761
1371
|
if (diffResult.files.length > 10) {
|
|
762
1372
|
console.log(` ... and ${diffResult.files.length - 10} more`);
|
|
763
1373
|
}
|
|
764
|
-
const context =
|
|
765
|
-
Stats: ${diffResult.stats}`;
|
|
1374
|
+
const context = buildDiffContext(diffResult);
|
|
766
1375
|
const prompt = buildSummarizePrompt(diffResult.diff, context);
|
|
767
1376
|
debugPrompt(prompt);
|
|
768
1377
|
const spinner = ora("Generating summary...").start();
|