commit-msg-ai 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3 @@
1
+ """commit-msg-ai - Generate commit messages from staged changes using a local LLM."""
2
+
3
+ __version__ = "0.2.1"
commit_msg_ai/main.py ADDED
@@ -0,0 +1,202 @@
1
+ import argparse
2
+ import json
3
+ import subprocess
4
+ import sys
5
+ from pathlib import Path
6
+
7
+ import httpx
8
+
9
+ SYSTEM_PROMPT = """\
10
+ You are a commit message generator. Given a git diff and file list, write a clear and concise commit message.
11
+
12
+ Allowed prefixes (use ONLY these, never add a scope in parentheses):
13
+ - feat: for new features or functionality
14
+ - fix: for bug fixes
15
+ - bc: for breaking changes
16
+
17
+ Rules:
18
+ - Write a SINGLE line: prefix and short summary, max 72 characters. Example: "feat: add user authentication"
19
+ - NEVER use scopes like feat(app): or fix(core): — just "feat:", "fix:", or "bc:" directly.
20
+ - NEVER write multi-line messages. Output exactly ONE line, no body, no bullets.
21
+ - Always consider the FULL file list to understand the scope, especially if the diff is truncated.
22
+ - The summary must capture the overall intent, not list individual changes.
23
+ - Write in English.
24
+ - Focus on WHY the change was made, not just WHAT changed.
25
+ - Do NOT wrap the message in quotes or markdown.
26
+ - Do NOT include any text other than the commit message itself.\
27
+ """
28
+
29
+ DEFAULT_MODEL = "llama3.2"
30
+ DEFAULT_URL = "http://localhost:11434"
31
+ CONFIG_PATH = Path.home() / ".config" / "commit-msg-ai" / "config.json"
32
+
33
+
34
+ def load_config() -> dict:
35
+ if CONFIG_PATH.exists():
36
+ try:
37
+ return json.loads(CONFIG_PATH.read_text())
38
+ except (json.JSONDecodeError, OSError):
39
+ return {}
40
+ return {}
41
+
42
+
43
+ def save_config(config: dict) -> None:
44
+ CONFIG_PATH.parent.mkdir(parents=True, exist_ok=True)
45
+ CONFIG_PATH.write_text(json.dumps(config, indent=2) + "\n")
46
+
47
+
48
+ def get_staged_diff() -> str:
49
+ result = subprocess.run(
50
+ ["git", "diff", "--staged"],
51
+ capture_output=True,
52
+ text=True,
53
+ )
54
+ if result.returncode != 0:
55
+ print(f"Error running git diff: {result.stderr}", file=sys.stderr)
56
+ sys.exit(1)
57
+ return result.stdout
58
+
59
+
60
+ def get_staged_files() -> str:
61
+ result = subprocess.run(
62
+ ["git", "diff", "--staged", "--name-status"],
63
+ capture_output=True,
64
+ text=True,
65
+ )
66
+ return result.stdout.strip()
67
+
68
+
69
+ def generate_message(diff: str, files: str, model: str, url: str) -> str:
70
+ max_diff_chars = 12000
71
+ truncated = len(diff) > max_diff_chars
72
+
73
+ parts = [f"Files changed:\n{files}\n"]
74
+ if truncated:
75
+ parts.append(f"Diff (truncated, showing first {max_diff_chars} chars):\n{diff[:max_diff_chars]}")
76
+ parts.append("\n\nThe diff was truncated. Use the file list above to understand the full scope of changes.")
77
+ else:
78
+ parts.append(f"Diff:\n{diff}")
79
+
80
+ user_prompt = "Generate a commit message for these changes:\n\n" + "\n".join(parts)
81
+
82
+ try:
83
+ response = httpx.post(
84
+ f"{url}/api/chat",
85
+ json={
86
+ "model": model,
87
+ "messages": [
88
+ {"role": "system", "content": SYSTEM_PROMPT},
89
+ {"role": "user", "content": user_prompt},
90
+ ],
91
+ "stream": False,
92
+ },
93
+ timeout=60.0,
94
+ )
95
+ response.raise_for_status()
96
+ except httpx.ConnectError:
97
+ print(
98
+ "Error: Cannot connect to Ollama. Is it running?\n"
99
+ f"Expected at: {url}",
100
+ file=sys.stderr,
101
+ )
102
+ sys.exit(1)
103
+ except httpx.HTTPStatusError as e:
104
+ print(f"Error from Ollama: {e.response.text}", file=sys.stderr)
105
+ sys.exit(1)
106
+
107
+ return response.json()["message"]["content"].strip()
108
+
109
+
110
+ def confirm(prompt: str) -> bool:
111
+ answer = input(f"{prompt} [Y/n] ").strip().lower()
112
+ return answer in ("", "y", "yes")
113
+
114
+
115
+ def do_commit(message: str) -> None:
116
+ result = subprocess.run(
117
+ ["git", "commit", "-m", message],
118
+ capture_output=True,
119
+ text=True,
120
+ )
121
+ if result.returncode != 0:
122
+ print(f"Commit failed: {result.stderr}", file=sys.stderr)
123
+ sys.exit(1)
124
+ print(result.stdout)
125
+
126
+
127
+ def cmd_config(args):
128
+ config = load_config()
129
+
130
+ if args.key is None:
131
+ if not config:
132
+ print("No config set. Use: commit-msg-ai config model <value>")
133
+ else:
134
+ for k, v in config.items():
135
+ print(f"{k} = {v}")
136
+ return
137
+
138
+ if args.value is None:
139
+ value = config.get(args.key)
140
+ if value is None:
141
+ print(f"{args.key} is not set")
142
+ else:
143
+ print(f"{args.key} = {value}")
144
+ return
145
+
146
+ config[args.key] = args.value
147
+ save_config(config)
148
+ print(f"{args.key} = {args.value}")
149
+
150
+
151
+ def cmd_run(args):
152
+ config = load_config()
153
+ model = args.model or config.get("model", DEFAULT_MODEL)
154
+ url = args.url or config.get("url", DEFAULT_URL)
155
+
156
+ diff = get_staged_diff()
157
+ if not diff.strip():
158
+ print("No staged changes found. Stage your changes with `git add` first.")
159
+ sys.exit(1)
160
+
161
+ files = get_staged_files()
162
+ print(f"Staged files:\n{files}\n")
163
+ print(f"Generating commit message with {model}...\n")
164
+
165
+ message = generate_message(diff, files, model, url)
166
+
167
+ print("─" * 50)
168
+ print(message)
169
+ print("─" * 50)
170
+ print()
171
+
172
+ if confirm("Commit with this message?"):
173
+ do_commit(message)
174
+ else:
175
+ print("Aborted.")
176
+
177
+
178
+ def main():
179
+ parser = argparse.ArgumentParser(
180
+ description="Generate a commit message from staged changes using a local LLM.",
181
+ )
182
+ sub = parser.add_subparsers(dest="command")
183
+
184
+ # config subcommand
185
+ cfg = sub.add_parser("config", help="Get or set config values (model, url)")
186
+ cfg.add_argument("key", nargs="?", help="Config key (model, url)")
187
+ cfg.add_argument("value", nargs="?", help="Value to set")
188
+
189
+ # Default run flags (also work without subcommand)
190
+ parser.add_argument("--model", default=None, help="Ollama model to use")
191
+ parser.add_argument("--url", default=None, help="Ollama server URL")
192
+
193
+ args = parser.parse_args()
194
+
195
+ if args.command == "config":
196
+ cmd_config(args)
197
+ else:
198
+ cmd_run(args)
199
+
200
+
201
+ if __name__ == "__main__":
202
+ main()
@@ -0,0 +1,171 @@
1
+ Metadata-Version: 2.4
2
+ Name: commit-msg-ai
3
+ Version: 0.2.1
4
+ Summary: Generate commit messages from staged changes using a local LLM via Ollama
5
+ Project-URL: Homepage, https://xavimf87.github.io/commit-msg-ai
6
+ Project-URL: Repository, https://github.com/xavimf87/commit-msg-ai
7
+ Project-URL: Issues, https://github.com/xavimf87/commit-msg-ai/issues
8
+ Author: Xavi Martínez
9
+ License-Expression: MIT
10
+ Keywords: ai,cli,commit,git,llm,ollama
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Environment :: Console
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.9
17
+ Classifier: Programming Language :: Python :: 3.10
18
+ Classifier: Programming Language :: Python :: 3.11
19
+ Classifier: Programming Language :: Python :: 3.12
20
+ Classifier: Programming Language :: Python :: 3.13
21
+ Classifier: Topic :: Software Development :: Version Control :: Git
22
+ Requires-Python: >=3.9
23
+ Requires-Dist: httpx>=0.27
24
+ Description-Content-Type: text/markdown
25
+
26
+ # commit-msg-ai
27
+
28
+ Generate commit messages from your staged changes using a local LLM via [Ollama](https://ollama.com). No API keys, no cloud — everything runs on your machine.
29
+
30
+ ## Getting started
31
+
32
+ ### 1. Install and set up Ollama
33
+
34
+ commit-msg-ai requires [Ollama](https://ollama.com) to run language models locally. Install it first:
35
+
36
+ **macOS:**
37
+
38
+ ```bash
39
+ brew install ollama
40
+ ```
41
+
42
+ **Linux:**
43
+
44
+ ```bash
45
+ curl -fsSL https://ollama.com/install.sh | sh
46
+ ```
47
+
48
+ **Windows:** Download the installer from [ollama.com/download](https://ollama.com/download).
49
+
50
+ Once installed, start the Ollama server:
51
+
52
+ ```bash
53
+ ollama serve
54
+ ```
55
+
56
+ > On macOS, Ollama runs automatically in the background after installation. You can skip this step if you see the Ollama icon in your menu bar.
57
+
58
+ ### 2. Choose a model
59
+
60
+ You need at least one model downloaded. See what's available on your machine:
61
+
62
+ ```bash
63
+ ollama list
64
+ ```
65
+
66
+ If the list is empty, pull a model. Some good options for commit message generation:
67
+
68
+ ```bash
69
+ # Lightweight and fast (~2GB)
70
+ ollama pull llama3.2
71
+
72
+ # Good for code understanding (~4.7GB)
73
+ ollama pull qwen2.5-coder
74
+
75
+ # Small and capable (~2.3GB)
76
+ ollama pull mistral
77
+ ```
78
+
79
+ You can browse all available models at [ollama.com/library](https://ollama.com/library).
80
+
81
+ ### 3. Install commit-msg-ai
82
+
83
+ **With pipx (recommended):**
84
+
85
+ ```bash
86
+ pipx install git+https://github.com/YOUR_USER/commit-msg-ai.git
87
+ ```
88
+
89
+ **With pip:**
90
+
91
+ ```bash
92
+ pip install git+https://github.com/YOUR_USER/commit-msg-ai.git
93
+ ```
94
+
95
+ ### 4. Configure your model
96
+
97
+ By default commit-msg-ai uses `llama3.2`. If you pulled a different model, set it as default:
98
+
99
+ ```bash
100
+ commit-msg-ai config model qwen2.5-coder
101
+ ```
102
+
103
+ Verify your config:
104
+
105
+ ```bash
106
+ commit-msg-ai config
107
+ ```
108
+
109
+ ### 5. Use it
110
+
111
+ ```bash
112
+ git add .
113
+ commit-msg-ai
114
+ ```
115
+
116
+ ```
117
+ Staged files:
118
+ M src/auth.py
119
+ A src/middleware.py
120
+
121
+ Generating commit message with qwen2.5-coder...
122
+
123
+ ──────────────────────────────────────────────────
124
+ feat: add JWT authentication middleware
125
+ ──────────────────────────────────────────────────
126
+
127
+ Commit with this message? [Y/n] y
128
+ [main 3a1b2c3] feat: add JWT authentication middleware
129
+ 2 files changed, 45 insertions(+), 3 deletions(-)
130
+ ```
131
+
132
+ That's it.
133
+
134
+ ## Configuration
135
+
136
+ commit-msg-ai stores config in `~/.config/commit-msg-ai/config.json`.
137
+
138
+ ```bash
139
+ # Set default model
140
+ commit-msg-ai config model mistral
141
+
142
+ # Set Ollama server URL (useful for remote setups)
143
+ commit-msg-ai config url http://192.168.1.50:11434
144
+
145
+ # View all config
146
+ commit-msg-ai config
147
+
148
+ # View a single value
149
+ commit-msg-ai config model
150
+ ```
151
+
152
+ Override any config for a single run with flags:
153
+
154
+ ```bash
155
+ commit-msg-ai --model codellama
156
+ commit-msg-ai --url http://other-server:11434
157
+ ```
158
+
159
+ ## Commit message format
160
+
161
+ commit-msg-ai generates messages with only three prefixes:
162
+
163
+ - `feat:` new features
164
+ - `fix:` bug fixes
165
+ - `bc:` breaking changes
166
+
167
+ ## Requirements
168
+
169
+ - Python 3.9+
170
+ - [Ollama](https://ollama.com) running locally (or on a reachable server)
171
+ - At least one model pulled (`ollama pull llama3.2`)
@@ -0,0 +1,6 @@
1
+ commit_msg_ai/__init__.py,sha256=2RngYNd1xDw-wUVQ_Bn-kQ14XuznUSrhvS7nveAhCnE,109
2
+ commit_msg_ai/main.py,sha256=s2ZD_AJJL8Q7Jl6A7g-74F9bdHEMv8_8BXZvKNmB4hs,5967
3
+ commit_msg_ai-0.2.1.dist-info/METADATA,sha256=VG5VdLICyjkjyWppPeCkcHwFpikEoNDQFMAbQRHBMXc,4223
4
+ commit_msg_ai-0.2.1.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
5
+ commit_msg_ai-0.2.1.dist-info/entry_points.txt,sha256=FUYPHqtB2XCNcmavUeuDBnsPXr_eh-A1tl8rsHap2MA,58
6
+ commit_msg_ai-0.2.1.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.29.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ commit-msg-ai = commit_msg_ai.main:main