@vavasilva/git-commit-ai 0.2.3 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,4 +1,16 @@
1
1
  #!/usr/bin/env node
2
+ import {
3
+ addFiles,
4
+ commit,
5
+ commitAmend,
6
+ filterDiffByPatterns,
7
+ getFileDiff,
8
+ getLastCommitDiff,
9
+ getStagedDiff,
10
+ getStagedFiles,
11
+ push,
12
+ resetStaged
13
+ } from "./chunk-5MPJCPJ4.js";
2
14
 
3
15
  // src/cli.ts
4
16
  import { Command } from "commander";
@@ -12,53 +24,210 @@ import { homedir } from "os";
12
24
  import { join, dirname } from "path";
13
25
  import { parse as parseToml } from "smol-toml";
14
26
  var DEFAULT_CONFIG = {
27
+ backend: "ollama",
15
28
  model: "llama3.1:8b",
16
29
  ollama_url: "http://localhost:11434",
30
+ openai_base_url: "https://api.openai.com/v1",
17
31
  temperature: 0.7,
18
- retry_temperatures: [0.5, 0.3, 0.2]
32
+ retry_temperatures: [0.5, 0.3, 0.2],
33
+ ignore_patterns: []
34
+ };
35
+ var VALID_BACKENDS = ["ollama", "openai", "anthropic", "groq", "llamacpp"];
36
+ var LOCAL_CONFIG_NAMES = [".gitcommitai", ".gitcommitai.toml"];
37
+ var VALID_CONFIG_KEYS = [
38
+ "backend",
39
+ "model",
40
+ "ollama_url",
41
+ "openai_base_url",
42
+ "temperature",
43
+ "default_scope",
44
+ "default_type",
45
+ "default_language"
46
+ ];
47
+ var CONFIG_ALIASES = {
48
+ lang: "default_language",
49
+ scope: "default_scope",
50
+ type: "default_type",
51
+ url: "ollama_url",
52
+ temp: "temperature"
19
53
  };
20
54
  function getConfigPath() {
21
55
  return join(homedir(), ".config", "git-commit-ai", "config.toml");
22
56
  }
23
- function loadConfig() {
24
- const configPath = getConfigPath();
25
- if (!existsSync(configPath)) {
26
- return { ...DEFAULT_CONFIG };
57
+ function getLocalConfigPath() {
58
+ for (const name of LOCAL_CONFIG_NAMES) {
59
+ if (existsSync(name)) {
60
+ return name;
61
+ }
27
62
  }
63
+ return null;
64
+ }
65
+ function parseConfigFile(path) {
28
66
  try {
29
- const content = readFileSync(configPath, "utf-8");
30
- const data = parseToml(content);
31
- return {
32
- model: data.model ?? DEFAULT_CONFIG.model,
33
- ollama_url: data.ollama_url ?? DEFAULT_CONFIG.ollama_url,
34
- temperature: data.temperature ?? DEFAULT_CONFIG.temperature,
35
- retry_temperatures: data.retry_temperatures ?? DEFAULT_CONFIG.retry_temperatures
36
- };
67
+ const content = readFileSync(path, "utf-8");
68
+ return parseToml(content);
37
69
  } catch {
38
- return { ...DEFAULT_CONFIG };
70
+ return null;
39
71
  }
40
72
  }
73
+ function mergeConfigs(base, override) {
74
+ return {
75
+ backend: VALID_BACKENDS.includes(override.backend) ? override.backend : base.backend,
76
+ model: override.model ?? base.model,
77
+ ollama_url: override.ollama_url ?? base.ollama_url,
78
+ openai_base_url: override.openai_base_url ?? base.openai_base_url,
79
+ temperature: override.temperature ?? base.temperature,
80
+ retry_temperatures: override.retry_temperatures ?? base.retry_temperatures,
81
+ ignore_patterns: override.ignore_patterns ?? base.ignore_patterns,
82
+ default_scope: override.default_scope ?? base.default_scope,
83
+ default_type: override.default_type ?? base.default_type,
84
+ default_language: override.default_language ?? base.default_language
85
+ };
86
+ }
87
+ function loadConfig() {
88
+ let config = { ...DEFAULT_CONFIG };
89
+ const globalPath = getConfigPath();
90
+ if (existsSync(globalPath)) {
91
+ const globalData = parseConfigFile(globalPath);
92
+ if (globalData) {
93
+ config = mergeConfigs(config, globalData);
94
+ }
95
+ }
96
+ const localPath = getLocalConfigPath();
97
+ if (localPath) {
98
+ const localData = parseConfigFile(localPath);
99
+ if (localData) {
100
+ config = mergeConfigs(config, localData);
101
+ }
102
+ }
103
+ return config;
104
+ }
41
105
  function saveConfig(config) {
42
106
  const configPath = getConfigPath();
43
107
  const dir = dirname(configPath);
44
108
  if (!existsSync(dir)) {
45
109
  mkdirSync(dir, { recursive: true });
46
110
  }
47
- const content = `# git-commit-ai configuration
111
+ let content = `# git-commit-ai configuration
112
+ # Backend: ollama, llamacpp, openai, anthropic, groq
113
+ backend = "${config.backend}"
48
114
  model = "${config.model}"
49
115
  ollama_url = "${config.ollama_url}"
116
+ # OpenAI Base URL - change this to use OpenAI-compatible APIs like llama.cpp
117
+ # Example: http://localhost:8080/v1 for llama-server
118
+ openai_base_url = "${config.openai_base_url}"
50
119
  temperature = ${config.temperature}
51
120
  retry_temperatures = [${config.retry_temperatures.join(", ")}]
52
121
  `;
122
+ if (config.default_language) {
123
+ content += `default_language = "${config.default_language}"
124
+ `;
125
+ }
126
+ if (config.default_scope) {
127
+ content += `default_scope = "${config.default_scope}"
128
+ `;
129
+ }
130
+ if (config.default_type) {
131
+ content += `default_type = "${config.default_type}"
132
+ `;
133
+ }
134
+ if (config.ignore_patterns && config.ignore_patterns.length > 0) {
135
+ content += `ignore_patterns = [${config.ignore_patterns.map((p) => `"${p}"`).join(", ")}]
136
+ `;
137
+ }
53
138
  writeFileSync(configPath, content, "utf-8");
54
139
  }
55
140
  function showConfig(config) {
56
- return `Configuration:
141
+ const localPath = getLocalConfigPath();
142
+ let output = `Configuration:
143
+ Backend: ${config.backend}
57
144
  Model: ${config.model}
58
145
  Ollama URL: ${config.ollama_url}
146
+ OpenAI Base URL: ${config.openai_base_url}
59
147
  Temperature: ${config.temperature}
60
- Retry temperatures: [${config.retry_temperatures.join(", ")}]
61
- Config file: ${getConfigPath()}`;
148
+ Retry temperatures: [${config.retry_temperatures.join(", ")}]`;
149
+ if (config.ignore_patterns && config.ignore_patterns.length > 0) {
150
+ output += `
151
+ Ignore patterns: [${config.ignore_patterns.join(", ")}]`;
152
+ }
153
+ if (config.default_scope) {
154
+ output += `
155
+ Default scope: ${config.default_scope}`;
156
+ }
157
+ if (config.default_type) {
158
+ output += `
159
+ Default type: ${config.default_type}`;
160
+ }
161
+ if (config.default_language) {
162
+ output += `
163
+ Default language: ${config.default_language}`;
164
+ }
165
+ output += `
166
+ Global config: ${getConfigPath()}`;
167
+ if (localPath) {
168
+ output += `
169
+ Local config: ${localPath}`;
170
+ }
171
+ return output;
172
+ }
173
+ function updateConfig(key, value) {
174
+ const resolvedKey = CONFIG_ALIASES[key] || key;
175
+ if (!VALID_CONFIG_KEYS.includes(resolvedKey)) {
176
+ const aliasHelp = Object.entries(CONFIG_ALIASES).map(([alias, full]) => `${alias} \u2192 ${full}`).join(", ");
177
+ return {
178
+ success: false,
179
+ message: `Invalid config key: "${key}". Valid keys: ${VALID_CONFIG_KEYS.join(", ")}. Aliases: ${aliasHelp}`
180
+ };
181
+ }
182
+ const configKey = resolvedKey;
183
+ if (configKey === "backend" && !VALID_BACKENDS.includes(value)) {
184
+ return {
185
+ success: false,
186
+ message: `Invalid backend: "${value}". Valid backends: ${VALID_BACKENDS.join(", ")}`
187
+ };
188
+ }
189
+ if (configKey === "temperature") {
190
+ const temp = parseFloat(value);
191
+ if (isNaN(temp) || temp < 0 || temp > 1) {
192
+ return {
193
+ success: false,
194
+ message: `Invalid temperature: "${value}". Must be a number between 0 and 1.`
195
+ };
196
+ }
197
+ }
198
+ const config = loadConfig();
199
+ switch (configKey) {
200
+ case "backend":
201
+ config.backend = value;
202
+ break;
203
+ case "model":
204
+ config.model = value;
205
+ break;
206
+ case "ollama_url":
207
+ config.ollama_url = value;
208
+ break;
209
+ case "openai_base_url":
210
+ config.openai_base_url = value;
211
+ break;
212
+ case "temperature":
213
+ config.temperature = parseFloat(value);
214
+ break;
215
+ case "default_scope":
216
+ config.default_scope = value;
217
+ break;
218
+ case "default_type":
219
+ config.default_type = value;
220
+ break;
221
+ case "default_language":
222
+ config.default_language = value;
223
+ break;
224
+ }
225
+ saveConfig(config);
226
+ const keyDisplay = key !== configKey ? `${key} (${configKey})` : configKey;
227
+ return {
228
+ success: true,
229
+ message: `Config updated: ${keyDisplay} = "${value}"`
230
+ };
62
231
  }
63
232
 
64
233
  // src/backends/ollama.ts
@@ -120,89 +289,265 @@ var OllamaBackend = class {
120
289
  }
121
290
  };
122
291
 
123
- // src/git.ts
124
- import { execSync } from "child_process";
125
- var GitError = class extends Error {
126
- constructor(message) {
127
- super(message);
128
- this.name = "GitError";
292
+ // src/backends/openai.ts
293
+ var OPENAI_DEFAULT_URL = "https://api.openai.com/v1";
294
+ var OpenAIBackend = class _OpenAIBackend {
295
+ model;
296
+ apiKey;
297
+ baseUrl;
298
+ isLocalServer;
299
+ constructor(model = "gpt-4o-mini", apiKey, baseUrl) {
300
+ this.model = model;
301
+ this.baseUrl = baseUrl ?? process.env.OPENAI_BASE_URL ?? OPENAI_DEFAULT_URL;
302
+ this.isLocalServer = this.baseUrl.includes("localhost") || this.baseUrl.includes("127.0.0.1");
303
+ this.apiKey = apiKey ?? process.env.OPENAI_API_KEY ?? (this.isLocalServer ? "no-key-required" : "");
304
+ }
305
+ async generate(prompt, temperature = 0.7) {
306
+ if (!this.apiKey && !this.isLocalServer) {
307
+ throw new Error("OPENAI_API_KEY environment variable is not set");
308
+ }
309
+ const response = await fetch(`${this.baseUrl}/chat/completions`, {
310
+ method: "POST",
311
+ headers: {
312
+ "Content-Type": "application/json",
313
+ Authorization: `Bearer ${this.apiKey}`
314
+ },
315
+ body: JSON.stringify({
316
+ model: this.model,
317
+ messages: [
318
+ {
319
+ role: "user",
320
+ content: prompt
321
+ }
322
+ ],
323
+ temperature,
324
+ max_tokens: 256
325
+ })
326
+ });
327
+ if (!response.ok) {
328
+ const error = await response.text();
329
+ throw new Error(`OpenAI API error: ${response.status} - ${error}`);
330
+ }
331
+ const data = await response.json();
332
+ return data.choices?.[0]?.message?.content ?? "";
333
+ }
334
+ async isAvailable() {
335
+ if (!this.apiKey && !this.isLocalServer) {
336
+ return false;
337
+ }
338
+ try {
339
+ const controller = new AbortController();
340
+ const timeoutId = setTimeout(() => controller.abort(), 5e3);
341
+ const headers = {};
342
+ if (this.apiKey) {
343
+ headers["Authorization"] = `Bearer ${this.apiKey}`;
344
+ }
345
+ const response = await fetch(`${this.baseUrl}/models`, {
346
+ headers,
347
+ signal: controller.signal
348
+ });
349
+ clearTimeout(timeoutId);
350
+ return response.ok;
351
+ } catch {
352
+ return false;
353
+ }
354
+ }
355
+ /**
356
+ * Check if OpenAI API key is configured or if a custom base URL is set
357
+ */
358
+ static hasApiKey() {
359
+ return !!process.env.OPENAI_API_KEY;
360
+ }
361
+ /**
362
+ * Check if a custom base URL is configured (for llama.cpp, etc.)
363
+ */
364
+ static hasCustomBaseUrl() {
365
+ return !!process.env.OPENAI_BASE_URL;
366
+ }
367
+ /**
368
+ * Check if this backend can potentially work (has API key or custom URL)
369
+ */
370
+ static isConfigured() {
371
+ return _OpenAIBackend.hasApiKey() || _OpenAIBackend.hasCustomBaseUrl();
129
372
  }
130
373
  };
131
- function runGit(...args) {
132
- try {
133
- const result = execSync(["git", ...args].join(" "), {
134
- encoding: "utf-8",
135
- stdio: ["pipe", "pipe", "pipe"]
374
+
375
+ // src/backends/anthropic.ts
376
+ var AnthropicBackend = class {
377
+ model;
378
+ apiKey;
379
+ baseUrl;
380
+ constructor(model = "claude-3-haiku-20240307", apiKey, baseUrl = "https://api.anthropic.com") {
381
+ this.model = model;
382
+ this.apiKey = apiKey ?? process.env.ANTHROPIC_API_KEY ?? "";
383
+ this.baseUrl = baseUrl;
384
+ }
385
+ async generate(prompt, temperature = 0.7) {
386
+ if (!this.apiKey) {
387
+ throw new Error("ANTHROPIC_API_KEY environment variable is not set");
388
+ }
389
+ const response = await fetch(`${this.baseUrl}/v1/messages`, {
390
+ method: "POST",
391
+ headers: {
392
+ "Content-Type": "application/json",
393
+ "x-api-key": this.apiKey,
394
+ "anthropic-version": "2023-06-01"
395
+ },
396
+ body: JSON.stringify({
397
+ model: this.model,
398
+ max_tokens: 256,
399
+ messages: [
400
+ {
401
+ role: "user",
402
+ content: prompt
403
+ }
404
+ ],
405
+ temperature
406
+ })
136
407
  });
137
- return result.trim();
138
- } catch (error) {
139
- const err = error;
140
- const message = err.stderr?.trim() || err.message;
141
- throw new GitError(`Git command failed: ${message}`);
408
+ if (!response.ok) {
409
+ const error = await response.text();
410
+ throw new Error(`Anthropic API error: ${response.status} - ${error}`);
411
+ }
412
+ const data = await response.json();
413
+ const textBlock = data.content?.find((block) => block.type === "text");
414
+ return textBlock?.text ?? "";
142
415
  }
143
- }
144
- function runGitSafe(...args) {
145
- try {
146
- return runGit(...args);
147
- } catch {
148
- return "";
416
+ async isAvailable() {
417
+ return !!this.apiKey;
149
418
  }
150
- }
151
- function getStagedDiff() {
152
- const diff = runGitSafe("diff", "--cached");
153
- const stats = runGitSafe("diff", "--cached", "--stat");
154
- const filesOutput = runGitSafe("diff", "--cached", "--name-only");
155
- const files = filesOutput.split("\n").filter((f) => f);
156
- return {
157
- diff,
158
- stats,
159
- files,
160
- isEmpty: !diff.trim()
161
- };
162
- }
163
- function getFileDiff(filePath) {
164
- const diff = runGitSafe("diff", "--cached", "--", filePath);
165
- const stats = runGitSafe("diff", "--cached", "--stat", "--", filePath);
166
- const files = diff ? [filePath] : [];
167
- return {
168
- diff,
169
- stats,
170
- files,
171
- isEmpty: !diff.trim()
172
- };
173
- }
174
- function addFiles(...paths) {
175
- if (paths.length === 0) {
176
- paths = ["."];
419
+ static hasApiKey() {
420
+ return !!process.env.ANTHROPIC_API_KEY;
177
421
  }
178
- try {
179
- runGit("add", ...paths);
180
- return true;
181
- } catch (error) {
182
- const err = error;
183
- if (err.message.includes("ignored by one of your .gitignore") || err.message.includes("pathspec") && err.message.includes("did not match")) {
422
+ };
423
+
424
+ // src/backends/groq.ts
425
+ var GroqBackend = class {
426
+ model;
427
+ apiKey;
428
+ baseUrl;
429
+ constructor(model = "llama-3.1-8b-instant", apiKey, baseUrl = "https://api.groq.com/openai/v1") {
430
+ this.model = model;
431
+ this.apiKey = apiKey ?? process.env.GROQ_API_KEY ?? "";
432
+ this.baseUrl = baseUrl;
433
+ }
434
+ async generate(prompt, temperature = 0.7) {
435
+ if (!this.apiKey) {
436
+ throw new Error("GROQ_API_KEY environment variable is not set");
437
+ }
438
+ const response = await fetch(`${this.baseUrl}/chat/completions`, {
439
+ method: "POST",
440
+ headers: {
441
+ "Content-Type": "application/json",
442
+ Authorization: `Bearer ${this.apiKey}`
443
+ },
444
+ body: JSON.stringify({
445
+ model: this.model,
446
+ messages: [
447
+ {
448
+ role: "user",
449
+ content: prompt
450
+ }
451
+ ],
452
+ temperature,
453
+ max_tokens: 256
454
+ })
455
+ });
456
+ if (!response.ok) {
457
+ const error = await response.text();
458
+ throw new Error(`Groq API error: ${response.status} - ${error}`);
459
+ }
460
+ const data = await response.json();
461
+ return data.choices?.[0]?.message?.content ?? "";
462
+ }
463
+ async isAvailable() {
464
+ if (!this.apiKey) {
465
+ return false;
466
+ }
467
+ try {
468
+ const controller = new AbortController();
469
+ const timeoutId = setTimeout(() => controller.abort(), 5e3);
470
+ const response = await fetch(`${this.baseUrl}/models`, {
471
+ headers: {
472
+ Authorization: `Bearer ${this.apiKey}`
473
+ },
474
+ signal: controller.signal
475
+ });
476
+ clearTimeout(timeoutId);
477
+ return response.ok;
478
+ } catch {
184
479
  return false;
185
480
  }
186
- throw error;
481
+ }
482
+ static hasApiKey() {
483
+ return !!process.env.GROQ_API_KEY;
484
+ }
485
+ };
486
+
487
+ // src/backends/index.ts
488
+ var LLAMACPP_DEFAULT_URL = "http://localhost:8080/v1";
489
+ var DEFAULT_MODELS = {
490
+ ollama: "llama3.1:8b",
491
+ openai: "gpt-4o-mini",
492
+ anthropic: "claude-3-haiku-20240307",
493
+ groq: "llama-3.1-8b-instant",
494
+ llamacpp: "gpt-4o-mini"
495
+ // Model alias used by llama-server (--alias flag)
496
+ };
497
+ function createBackend(config) {
498
+ const model = config.model || DEFAULT_MODELS[config.backend];
499
+ switch (config.backend) {
500
+ case "openai":
501
+ return new OpenAIBackend(model, void 0, config.openai_base_url);
502
+ case "llamacpp":
503
+ return new OpenAIBackend(model, void 0, LLAMACPP_DEFAULT_URL);
504
+ case "anthropic":
505
+ return new AnthropicBackend(model);
506
+ case "groq":
507
+ return new GroqBackend(model);
508
+ case "ollama":
509
+ default:
510
+ return new OllamaBackend(model, config.ollama_url);
187
511
  }
188
512
  }
189
- function commit(message) {
190
- runGit("commit", "-m", `"${message.replace(/"/g, '\\"')}"`);
191
- return runGit("rev-parse", "HEAD");
192
- }
193
- function push() {
194
- const branch = getCurrentBranch();
195
- runGit("push", "origin", branch);
196
- }
197
- function getCurrentBranch() {
198
- return runGit("rev-parse", "--abbrev-ref", "HEAD");
199
- }
200
- function getStagedFiles() {
201
- const output = runGitSafe("diff", "--cached", "--name-only");
202
- return output.split("\n").filter((f) => f);
513
+ async function detectBackend() {
514
+ const ollama = new OllamaBackend();
515
+ if (await ollama.isAvailable()) {
516
+ return "ollama";
517
+ }
518
+ const llamacpp = new OpenAIBackend(DEFAULT_MODELS.llamacpp, void 0, LLAMACPP_DEFAULT_URL);
519
+ if (await llamacpp.isAvailable()) {
520
+ return "llamacpp";
521
+ }
522
+ if (OpenAIBackend.hasCustomBaseUrl()) {
523
+ const localOpenai = new OpenAIBackend();
524
+ if (await localOpenai.isAvailable()) {
525
+ return "openai";
526
+ }
527
+ }
528
+ if (GroqBackend.hasApiKey()) {
529
+ return "groq";
530
+ }
531
+ if (OpenAIBackend.hasApiKey()) {
532
+ return "openai";
533
+ }
534
+ if (AnthropicBackend.hasApiKey()) {
535
+ return "anthropic";
536
+ }
537
+ return "ollama";
203
538
  }
204
- function resetStaged() {
205
- runGitSafe("reset", "HEAD");
539
+ function getAvailableBackends() {
540
+ const available = ["ollama", "llamacpp"];
541
+ if (OpenAIBackend.isConfigured()) {
542
+ available.push("openai");
543
+ }
544
+ if (AnthropicBackend.hasApiKey()) {
545
+ available.push("anthropic");
546
+ }
547
+ if (GroqBackend.hasApiKey()) {
548
+ available.push("groq");
549
+ }
550
+ return available;
206
551
  }
207
552
 
208
553
  // src/prompts.ts
@@ -252,6 +597,8 @@ EXAMPLES based on diff content:
252
597
  IMPORTANT: Base your message ONLY on the actual changes shown in the diff below.
253
598
  Do NOT use the examples above if they don't match the diff content.
254
599
 
600
+ {constraints}
601
+
255
602
  {context}
256
603
 
257
604
  DIFF TO ANALYZE:
@@ -260,6 +607,45 @@ DIFF TO ANALYZE:
260
607
  \`\`\`
261
608
 
262
609
  Reply with ONLY the commit message, nothing else. No quotes, no explanation.`;
610
+ var VALID_TYPES = ["feat", "fix", "docs", "style", "refactor", "test", "chore", "build", "ci", "perf", "revert"];
611
+ function isValidType(type) {
612
+ return VALID_TYPES.includes(type.toLowerCase());
613
+ }
614
+ function getValidTypes() {
615
+ return [...VALID_TYPES];
616
+ }
617
+ function buildConstraintsText(constraints) {
618
+ const parts = [];
619
+ if (constraints.type) {
620
+ parts.push(`CONSTRAINT: You MUST use "${constraints.type}" as the commit type.`);
621
+ }
622
+ if (constraints.scope) {
623
+ parts.push(`CONSTRAINT: You MUST use "(${constraints.scope})" as the scope in the commit message.`);
624
+ }
625
+ if (constraints.breaking) {
626
+ parts.push(`CONSTRAINT: This is a BREAKING CHANGE. You MUST use "!" after the type/scope (e.g., "feat!:" or "feat(api)!:").`);
627
+ }
628
+ if (constraints.language) {
629
+ const langMap = {
630
+ en: "English",
631
+ pt: "Portuguese",
632
+ es: "Spanish",
633
+ fr: "French",
634
+ de: "German",
635
+ it: "Italian",
636
+ ja: "Japanese",
637
+ zh: "Chinese",
638
+ ko: "Korean",
639
+ ru: "Russian"
640
+ };
641
+ const langName = langMap[constraints.language.toLowerCase()] || constraints.language;
642
+ parts.push(`CONSTRAINT: Write the commit message subject in ${langName}.`);
643
+ }
644
+ if (constraints.context) {
645
+ parts.push(`ADDITIONAL CONTEXT: ${constraints.context}`);
646
+ }
647
+ return parts.join("\n");
648
+ }
263
649
  var KARMA_PATTERN = /^(feat|fix|docs|style|refactor|test|chore|build|ci|perf|revert)(\([^)]+\))?:\s*.+/;
264
650
  var ACTION_TO_TYPE = {
265
651
  add: "feat",
@@ -297,9 +683,10 @@ function truncateDiff(diff, maxChars = MAX_DIFF_CHARS) {
297
683
  }
298
684
  return truncated + "\n\n[... diff truncated for brevity ...]";
299
685
  }
300
- function buildPrompt(diff, context) {
686
+ function buildPrompt(diff, context, constraints) {
301
687
  const truncatedDiff = truncateDiff(diff);
302
- return KARMA_PROMPT.replace("{diff}", truncatedDiff).replace("{context}", context);
688
+ const constraintsText = constraints ? buildConstraintsText(constraints) : "";
689
+ return KARMA_PROMPT.replace("{diff}", truncatedDiff).replace("{context}", context).replace("{constraints}", constraintsText);
303
690
  }
304
691
  function buildSummarizePrompt(diff, context) {
305
692
  const truncatedDiff = truncateDiff(diff);
@@ -335,9 +722,30 @@ function fixMessage(message) {
335
722
  }
336
723
  return `chore: ${cleaned.toLowerCase()}`;
337
724
  }
725
+ function addIssueReference(message, issue) {
726
+ const issueRef = issue.startsWith("#") ? issue : `#${issue}`;
727
+ return `${message}
728
+
729
+ Refs: ${issueRef}`;
730
+ }
731
+ function addCoAuthors(message, coAuthors) {
732
+ if (coAuthors.length === 0) {
733
+ return message;
734
+ }
735
+ const trailers = coAuthors.map((author) => `Co-authored-by: ${author}`).join("\n");
736
+ return `${message}
737
+
738
+ ${trailers}`;
739
+ }
740
+ function ensureBreakingMarker(message) {
741
+ if (message.includes("!:")) {
742
+ return message;
743
+ }
744
+ return message.replace(/:/, "!:");
745
+ }
338
746
 
339
747
  // src/hook.ts
340
- import { execSync as execSync2 } from "child_process";
748
+ import { execSync } from "child_process";
341
749
  import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, unlinkSync, existsSync as existsSync2, chmodSync, mkdirSync as mkdirSync2 } from "fs";
342
750
  import { join as join2, dirname as dirname2 } from "path";
343
751
  var HOOK_SCRIPT = `#!/bin/sh
@@ -380,7 +788,7 @@ exit 0
380
788
  var HOOK_NAME = "prepare-commit-msg";
381
789
  function getGitDir() {
382
790
  try {
383
- const result = execSync2("git rev-parse --git-dir", {
791
+ const result = execSync("git rev-parse --git-dir", {
384
792
  encoding: "utf-8",
385
793
  stdio: ["pipe", "pipe", "pipe"]
386
794
  });
@@ -497,6 +905,32 @@ function debugValidation(message, isValid, fixed) {
497
905
  }
498
906
 
499
907
  // src/cli.ts
908
+ function buildDiffContext(diffResult) {
909
+ const parts = [];
910
+ if (diffResult.filesAdded.length > 0) {
911
+ parts.push(`Files added:
912
+ ${diffResult.filesAdded.slice(0, 5).join("\n")}`);
913
+ if (diffResult.filesAdded.length > 5) {
914
+ parts.push(` ... and ${diffResult.filesAdded.length - 5} more added`);
915
+ }
916
+ }
917
+ if (diffResult.filesDeleted.length > 0) {
918
+ parts.push(`Files deleted:
919
+ ${diffResult.filesDeleted.slice(0, 5).join("\n")}`);
920
+ if (diffResult.filesDeleted.length > 5) {
921
+ parts.push(` ... and ${diffResult.filesDeleted.length - 5} more deleted`);
922
+ }
923
+ }
924
+ if (diffResult.filesModified.length > 0) {
925
+ parts.push(`Files modified:
926
+ ${diffResult.filesModified.slice(0, 5).join("\n")}`);
927
+ if (diffResult.filesModified.length > 5) {
928
+ parts.push(` ... and ${diffResult.filesModified.length - 5} more modified`);
929
+ }
930
+ }
931
+ parts.push(`Stats: ${diffResult.stats}`);
932
+ return parts.join("\n");
933
+ }
500
934
  async function promptUser(question, choices) {
501
935
  const rl = createInterface({
502
936
  input: process.stdin,
@@ -527,15 +961,18 @@ async function promptEdit(currentMessage) {
527
961
  });
528
962
  });
529
963
  }
530
- async function generateMessage(backend, diffContent, context, temperatures) {
531
- const prompt = buildPrompt(diffContent, context);
964
+ async function generateMessage(backend, diffContent, context, temperatures, constraints) {
965
+ const prompt = buildPrompt(diffContent, context, constraints);
532
966
  debugPrompt(prompt);
533
967
  for (const temp of temperatures) {
534
968
  debug(`Trying temperature: ${temp}`);
535
969
  try {
536
970
  const rawMessage = await backend.generate(prompt, temp);
537
971
  debugResponse(rawMessage);
538
- const message = cleanMessage(rawMessage);
972
+ let message = cleanMessage(rawMessage);
973
+ if (constraints?.breaking) {
974
+ message = ensureBreakingMarker(message);
975
+ }
539
976
  const isValid = validateMessage(message);
540
977
  debugValidation(message, isValid);
541
978
  if (isValid) {
@@ -544,7 +981,7 @@ async function generateMessage(backend, diffContent, context, temperatures) {
544
981
  const fixed = fixMessage(message);
545
982
  if (validateMessage(fixed)) {
546
983
  debugValidation(fixed, true, fixed);
547
- return fixed;
984
+ return constraints?.breaking ? ensureBreakingMarker(fixed) : fixed;
548
985
  }
549
986
  } catch (e) {
550
987
  const error = e;
@@ -570,13 +1007,13 @@ async function promptAction(message) {
570
1007
  ["c", "e", "r", "a"]
571
1008
  );
572
1009
  }
573
- async function runCommitFlow(backend, cfg, diffContent, context, skipConfirm) {
1010
+ async function runCommitFlow(backend, cfg, diffContent, context, skipConfirm, constraints) {
574
1011
  const temperatures = [cfg.temperature, ...cfg.retry_temperatures];
575
1012
  const spinner = ora("Generating commit message...").start();
576
1013
  while (true) {
577
1014
  let message;
578
1015
  try {
579
- message = await generateMessage(backend, diffContent, context, temperatures);
1016
+ message = await generateMessage(backend, diffContent, context, temperatures, constraints);
580
1017
  } finally {
581
1018
  spinner.stop();
582
1019
  }
@@ -602,25 +1039,58 @@ async function runCommitFlow(backend, cfg, diffContent, context, skipConfirm) {
602
1039
  }
603
1040
  }
604
1041
  }
605
- async function handleSingleCommit(backend, cfg, skipConfirm) {
606
- const diffResult = getStagedDiff();
607
- if (diffResult.isEmpty) {
608
- console.log(chalk2.yellow("No changes to commit."));
609
- process.exit(0);
1042
+ async function handleSingleCommit(backend, cfg, options) {
1043
+ let diffResult;
1044
+ if (options.amend) {
1045
+ diffResult = getLastCommitDiff();
1046
+ if (diffResult.isEmpty) {
1047
+ console.log(chalk2.yellow("No previous commit to amend."));
1048
+ process.exit(1);
1049
+ }
1050
+ console.log(chalk2.dim("Amending last commit..."));
1051
+ } else {
1052
+ diffResult = getStagedDiff();
1053
+ if (diffResult.isEmpty) {
1054
+ console.log(chalk2.yellow("No changes to commit."));
1055
+ process.exit(0);
1056
+ }
610
1057
  }
611
- debugDiff(diffResult.diff, diffResult.files);
612
- const context = `Files changed:
613
- ${diffResult.files.slice(0, 5).join("\n")}
614
- Stats: ${diffResult.stats}`;
615
- const message = await runCommitFlow(backend, cfg, diffResult.diff, context, skipConfirm);
1058
+ let diff = diffResult.diff;
1059
+ if (cfg.ignore_patterns && cfg.ignore_patterns.length > 0) {
1060
+ diff = filterDiffByPatterns(diff, cfg.ignore_patterns);
1061
+ if (!diff.trim()) {
1062
+ console.log(chalk2.yellow("All changes are ignored by ignore_patterns."));
1063
+ process.exit(0);
1064
+ }
1065
+ }
1066
+ debugDiff(diff, diffResult.files);
1067
+ const context = buildDiffContext(diffResult);
1068
+ let message = await runCommitFlow(backend, cfg, diff, context, options.skipConfirm, options.constraints);
616
1069
  if (message === null) {
617
1070
  console.log(chalk2.yellow("Aborted."));
618
1071
  process.exit(0);
619
1072
  }
1073
+ if (options.issue) {
1074
+ message = addIssueReference(message, options.issue);
1075
+ }
1076
+ if (options.coAuthors && options.coAuthors.length > 0) {
1077
+ message = addCoAuthors(message, options.coAuthors);
1078
+ }
1079
+ if (options.dryRun) {
1080
+ console.log(chalk2.cyan("Dry run - message not committed:"));
1081
+ console.log(message);
1082
+ return;
1083
+ }
620
1084
  try {
621
- commit(message);
622
- debug(`Commit successful: ${message}`);
623
- console.log(chalk2.green("\u2713 Committed:"), message);
1085
+ if (options.amend) {
1086
+ commitAmend(message);
1087
+ debug(`Amend successful: ${message}`);
1088
+ console.log(chalk2.green("\u2713 Amended:"), message.split("\n")[0]);
1089
+ } else {
1090
+ commit(message);
1091
+ debug(`Commit successful: ${message}`);
1092
+ console.log(chalk2.green("\u2713 Committed:"), message.split("\n")[0]);
1093
+ }
624
1094
  } catch (e) {
625
1095
  const error = e;
626
1096
  debug(`Commit failed: ${error.message}`);
@@ -628,7 +1098,7 @@ Stats: ${diffResult.stats}`;
628
1098
  process.exit(1);
629
1099
  }
630
1100
  }
631
- async function handleIndividualCommits(backend, cfg, skipConfirm) {
1101
+ async function handleIndividualCommits(backend, cfg, options) {
632
1102
  const stagedFiles = getStagedFiles();
633
1103
  if (stagedFiles.length === 0) {
634
1104
  console.log(chalk2.yellow("No staged files to commit."));
@@ -638,6 +1108,13 @@ async function handleIndividualCommits(backend, cfg, skipConfirm) {
638
1108
  console.log(chalk2.dim(`Found ${stagedFiles.length} files to commit individually.`));
639
1109
  resetStaged();
640
1110
  for (const filePath of stagedFiles) {
1111
+ if (cfg.ignore_patterns && cfg.ignore_patterns.length > 0) {
1112
+ const { shouldIgnoreFile } = await import("./git-F4ZHBA3B.js");
1113
+ if (shouldIgnoreFile(filePath, cfg.ignore_patterns)) {
1114
+ console.log(chalk2.dim(`Skipping ignored file: ${filePath}`));
1115
+ continue;
1116
+ }
1117
+ }
641
1118
  const added = addFiles(filePath);
642
1119
  if (!added) {
643
1120
  continue;
@@ -648,16 +1125,26 @@ async function handleIndividualCommits(backend, cfg, skipConfirm) {
648
1125
  }
649
1126
  console.log(chalk2.bold(`
650
1127
  Processing: ${filePath}`));
651
- const context = `File: ${filePath}
652
- Stats: ${diffResult.stats}`;
653
- const message = await runCommitFlow(backend, cfg, diffResult.diff, context, skipConfirm);
1128
+ const context = buildDiffContext(diffResult);
1129
+ let message = await runCommitFlow(backend, cfg, diffResult.diff, context, options.skipConfirm, options.constraints);
654
1130
  if (message === null) {
655
1131
  console.log(chalk2.yellow(`Skipped: ${filePath}`));
656
1132
  continue;
657
1133
  }
1134
+ if (options.issue) {
1135
+ message = addIssueReference(message, options.issue);
1136
+ }
1137
+ if (options.coAuthors && options.coAuthors.length > 0) {
1138
+ message = addCoAuthors(message, options.coAuthors);
1139
+ }
1140
+ if (options.dryRun) {
1141
+ console.log(chalk2.cyan(`Dry run - ${filePath}:`));
1142
+ console.log(message);
1143
+ continue;
1144
+ }
658
1145
  try {
659
1146
  commit(message);
660
- console.log(chalk2.green("\u2713 Committed:"), message);
1147
+ console.log(chalk2.green("\u2713 Committed:"), message.split("\n")[0]);
661
1148
  } catch (e) {
662
1149
  const error = e;
663
1150
  console.log(chalk2.red(`Error committing ${filePath}: ${error.message}`));
@@ -666,46 +1153,124 @@ Stats: ${diffResult.stats}`;
666
1153
  }
667
1154
  function createProgram() {
668
1155
  const program2 = new Command();
669
- program2.name("git-commit-ai").description("Generate commit messages using local LLMs").version("0.2.0").option("-p, --push", "Push after commit").option("-y, --yes", "Skip confirmation").option("-i, --individual", "Commit files individually").option("-d, --debug", "Enable debug output").option("--hook-mode", "Called by git hook (outputs message only)").action(async (options) => {
1156
+ program2.name("git-commit-ai").description("Generate commit messages using LLMs (Ollama, OpenAI, Anthropic, Groq, llama.cpp)").version("0.3.0").option("-p, --push", "Push after commit").option("-y, --yes", "Skip confirmation").option("-i, --individual", "Commit files individually").option("-d, --debug", "Enable debug output").option("--dry-run", "Show generated message without committing").option("-b, --backend <backend>", "Backend to use (ollama, openai, anthropic, groq, llamacpp)").option("-m, --model <model>", "Override model from config").option("-t, --temperature <temp>", "Override temperature (0.0-1.0)", parseFloat).option("--hook-mode", "Called by git hook (outputs message only)").option("--amend", "Regenerate and amend the last commit message").option("-s, --scope <scope>", "Force a specific scope (e.g., auth, api)").option("--type <type>", `Force commit type (${getValidTypes().join(", ")})`).option("-c, --context <text>", "Provide additional context for message generation").option("-l, --lang <code>", "Language for commit message (en, pt, es, fr, de, etc.)").option("--issue <ref>", "Reference an issue (e.g., 123 or #123)").option("--breaking", "Mark as breaking change (adds ! to type)").option("--co-author <author>", "Add co-author (can be used multiple times)", (val, prev) => prev.concat([val]), []).action(async (options) => {
670
1157
  if (options.debug) {
671
1158
  enableDebug();
672
1159
  debug("Debug mode enabled");
673
1160
  }
674
1161
  const cfg = loadConfig();
1162
+ if (options.type && !isValidType(options.type)) {
1163
+ console.log(chalk2.red(`Error: Invalid commit type "${options.type}"`));
1164
+ console.log(chalk2.dim(`Valid types: ${getValidTypes().join(", ")}`));
1165
+ process.exit(1);
1166
+ }
1167
+ if (options.backend) {
1168
+ const validBackends = ["ollama", "openai", "anthropic", "groq", "llamacpp"];
1169
+ if (validBackends.includes(options.backend)) {
1170
+ cfg.backend = options.backend;
1171
+ if (!options.model && cfg.model === "llama3.1:8b") {
1172
+ cfg.model = DEFAULT_MODELS[cfg.backend];
1173
+ }
1174
+ debug(`Backend overridden to: ${cfg.backend}`);
1175
+ } else {
1176
+ console.log(chalk2.red(`Error: Invalid backend "${options.backend}"`));
1177
+ console.log(chalk2.dim(`Valid backends: ${validBackends.join(", ")}`));
1178
+ process.exit(1);
1179
+ }
1180
+ }
1181
+ if (options.model) {
1182
+ cfg.model = options.model;
1183
+ debug(`Model overridden to: ${cfg.model}`);
1184
+ }
1185
+ if (options.temperature !== void 0 && !isNaN(options.temperature)) {
1186
+ cfg.temperature = options.temperature;
1187
+ debug(`Temperature overridden to: ${cfg.temperature}`);
1188
+ }
675
1189
  debugConfig(cfg);
676
- const backend = new OllamaBackend(cfg.model, cfg.ollama_url);
1190
+ if (!options.backend && cfg.backend === "ollama") {
1191
+ const detected = await detectBackend();
1192
+ if (detected !== "ollama") {
1193
+ cfg.backend = detected;
1194
+ cfg.model = DEFAULT_MODELS[detected];
1195
+ debug(`Auto-detected backend: ${detected}`);
1196
+ }
1197
+ }
1198
+ const backend = createBackend(cfg);
1199
+ debug(`Using backend: ${cfg.backend} with model: ${cfg.model}`);
677
1200
  const available = await backend.isAvailable();
678
1201
  if (!available) {
679
1202
  if (options.hookMode) {
680
1203
  process.exit(1);
681
1204
  }
682
- console.log(chalk2.red("Error: Ollama is not running."));
683
- console.log(chalk2.dim("Start it with: brew services start ollama"));
1205
+ if (cfg.backend === "ollama") {
1206
+ console.log(chalk2.red("Error: Ollama is not running."));
1207
+ console.log(chalk2.dim("Start it with: brew services start ollama"));
1208
+ } else if (cfg.backend === "llamacpp") {
1209
+ console.log(chalk2.red("Error: llama.cpp server is not running."));
1210
+ console.log(chalk2.dim("Start it with: llama-server -m model.gguf --port 8080"));
1211
+ } else {
1212
+ console.log(chalk2.red(`Error: ${cfg.backend} backend is not available.`));
1213
+ const envVar = {
1214
+ openai: "OPENAI_API_KEY",
1215
+ anthropic: "ANTHROPIC_API_KEY",
1216
+ groq: "GROQ_API_KEY"
1217
+ }[cfg.backend];
1218
+ if (envVar) {
1219
+ console.log(chalk2.dim(`Set ${envVar} environment variable.`));
1220
+ }
1221
+ }
1222
+ const availableBackends = getAvailableBackends();
1223
+ if (availableBackends.length > 1) {
1224
+ console.log(chalk2.dim(`Available backends: ${availableBackends.join(", ")}`));
1225
+ }
684
1226
  process.exit(1);
685
1227
  }
1228
+ const constraints = {
1229
+ type: options.type || cfg.default_type,
1230
+ scope: options.scope || cfg.default_scope,
1231
+ language: options.lang || cfg.default_language,
1232
+ breaking: options.breaking,
1233
+ context: options.context
1234
+ };
686
1235
  if (options.hookMode) {
687
1236
  const diffResult = getStagedDiff();
688
1237
  if (diffResult.isEmpty) {
689
1238
  process.exit(1);
690
1239
  }
691
- const context = `Files changed:
692
- ${diffResult.files.slice(0, 5).join("\n")}
693
- Stats: ${diffResult.stats}`;
1240
+ let diff = diffResult.diff;
1241
+ if (cfg.ignore_patterns && cfg.ignore_patterns.length > 0) {
1242
+ diff = filterDiffByPatterns(diff, cfg.ignore_patterns);
1243
+ }
1244
+ const context = buildDiffContext(diffResult);
694
1245
  const temperatures = [cfg.temperature, ...cfg.retry_temperatures];
695
- const message = await generateMessage(backend, diffResult.diff, context, temperatures);
1246
+ const message = await generateMessage(backend, diff, context, temperatures, constraints);
696
1247
  if (message) {
697
1248
  console.log(message);
698
1249
  process.exit(0);
699
1250
  }
700
1251
  process.exit(1);
701
1252
  }
702
- addFiles(".");
1253
+ const commitOptions = {
1254
+ skipConfirm: options.yes,
1255
+ dryRun: options.dryRun,
1256
+ amend: options.amend,
1257
+ constraints,
1258
+ issue: options.issue,
1259
+ coAuthors: options.coAuthor
1260
+ };
1261
+ if (!options.amend) {
1262
+ addFiles(".");
1263
+ }
703
1264
  if (options.individual) {
704
- await handleIndividualCommits(backend, cfg, options.yes);
1265
+ if (options.amend) {
1266
+ console.log(chalk2.red("Error: --amend cannot be used with --individual"));
1267
+ process.exit(1);
1268
+ }
1269
+ await handleIndividualCommits(backend, cfg, commitOptions);
705
1270
  } else {
706
- await handleSingleCommit(backend, cfg, options.yes);
1271
+ await handleSingleCommit(backend, cfg, commitOptions);
707
1272
  }
708
- if (options.push) {
1273
+ if (options.push && !options.dryRun && !options.amend) {
709
1274
  try {
710
1275
  push();
711
1276
  console.log(chalk2.green("\u2713 Changes pushed to remote."));
@@ -716,7 +1281,41 @@ Stats: ${diffResult.stats}`;
716
1281
  }
717
1282
  }
718
1283
  });
719
- program2.command("config").description("Show or edit configuration").option("-e, --edit", "Create/edit configuration file").action((options) => {
1284
+ program2.command("config").description("Show or edit configuration").option("-e, --edit", "Create/edit configuration file").option("-s, --set <key=value>", "Set a config value (e.g., --set backend=llamacpp)").option("-l, --list-keys", "List all valid config keys").action((options) => {
1285
+ if (options.listKeys) {
1286
+ console.log(chalk2.bold("Valid config keys:"));
1287
+ for (const key of VALID_CONFIG_KEYS) {
1288
+ const alias = Object.entries(CONFIG_ALIASES).find(([, v]) => v === key)?.[0];
1289
+ if (alias) {
1290
+ console.log(` ${key} ${chalk2.dim(`(alias: ${alias})`)}`);
1291
+ } else {
1292
+ console.log(` ${key}`);
1293
+ }
1294
+ }
1295
+ console.log();
1296
+ console.log(chalk2.bold("Short aliases:"));
1297
+ for (const [alias, fullKey] of Object.entries(CONFIG_ALIASES)) {
1298
+ console.log(` ${alias} \u2192 ${fullKey}`);
1299
+ }
1300
+ return;
1301
+ }
1302
+ if (options.set) {
1303
+ const match = options.set.match(/^([^=]+)=(.*)$/);
1304
+ if (!match) {
1305
+ console.log(chalk2.red("Error: Invalid format. Use: --set key=value"));
1306
+ console.log(chalk2.dim("Example: git-commit-ai config --set backend=llamacpp"));
1307
+ process.exit(1);
1308
+ }
1309
+ const [, key, value] = match;
1310
+ const result = updateConfig(key, value);
1311
+ if (result.success) {
1312
+ console.log(chalk2.green(`\u2713 ${result.message}`));
1313
+ } else {
1314
+ console.log(chalk2.red(`Error: ${result.message}`));
1315
+ process.exit(1);
1316
+ }
1317
+ return;
1318
+ }
720
1319
  const cfg = loadConfig();
721
1320
  if (options.edit) {
722
1321
  console.log(chalk2.dim("Creating default config file..."));
@@ -727,16 +1326,34 @@ Stats: ${diffResult.stats}`;
727
1326
  console.log(showConfig(cfg));
728
1327
  }
729
1328
  });
730
- program2.command("summarize").description("Summarize staged changes in plain English").option("--diff", "Also show the raw diff").option("-d, --debug", "Enable debug output").action(async (options) => {
1329
+ program2.command("summarize").description("Summarize staged changes in plain English").option("--diff", "Also show the raw diff").option("-b, --backend <backend>", "Backend to use (ollama, openai, anthropic, groq, llamacpp)").option("-d, --debug", "Enable debug output").action(async (options) => {
731
1330
  if (options.debug) {
732
1331
  enableDebug();
733
1332
  }
734
1333
  const cfg = loadConfig();
735
- const backend = new OllamaBackend(cfg.model, cfg.ollama_url);
1334
+ if (options.backend) {
1335
+ const validBackends = ["ollama", "openai", "anthropic", "groq", "llamacpp"];
1336
+ if (validBackends.includes(options.backend)) {
1337
+ cfg.backend = options.backend;
1338
+ cfg.model = DEFAULT_MODELS[cfg.backend];
1339
+ }
1340
+ }
1341
+ if (cfg.backend === "ollama") {
1342
+ const detected = await detectBackend();
1343
+ if (detected !== "ollama") {
1344
+ cfg.backend = detected;
1345
+ cfg.model = DEFAULT_MODELS[detected];
1346
+ }
1347
+ }
1348
+ const backend = createBackend(cfg);
736
1349
  const available = await backend.isAvailable();
737
1350
  if (!available) {
738
- console.log(chalk2.red("Error: Ollama is not running."));
739
- console.log(chalk2.dim("Start it with: brew services start ollama"));
1351
+ if (cfg.backend === "ollama") {
1352
+ console.log(chalk2.red("Error: Ollama is not running."));
1353
+ console.log(chalk2.dim("Start it with: brew services start ollama"));
1354
+ } else {
1355
+ console.log(chalk2.red(`Error: ${cfg.backend} backend is not available.`));
1356
+ }
740
1357
  process.exit(1);
741
1358
  }
742
1359
  const diffResult = getStagedDiff();
@@ -754,8 +1371,7 @@ Files to summarize: ${diffResult.files.length}`));
754
1371
  if (diffResult.files.length > 10) {
755
1372
  console.log(` ... and ${diffResult.files.length - 10} more`);
756
1373
  }
757
- const context = `Files changed: ${diffResult.files.slice(0, 5).join(", ")}
758
- Stats: ${diffResult.stats}`;
1374
+ const context = buildDiffContext(diffResult);
759
1375
  const prompt = buildSummarizePrompt(diffResult.diff, context);
760
1376
  debugPrompt(prompt);
761
1377
  const spinner = ora("Generating summary...").start();