ollama-git-commit 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ai_commit.py +342 -0
- ollama_git_commit-0.1.0.dist-info/METADATA +394 -0
- ollama_git_commit-0.1.0.dist-info/RECORD +7 -0
- ollama_git_commit-0.1.0.dist-info/WHEEL +5 -0
- ollama_git_commit-0.1.0.dist-info/entry_points.txt +2 -0
- ollama_git_commit-0.1.0.dist-info/licenses/LICENSE +21 -0
- ollama_git_commit-0.1.0.dist-info/top_level.txt +1 -0
ai_commit.py
ADDED
|
@@ -0,0 +1,342 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
AI Commit - Generate commit messages using local Ollama
|
|
4
|
+
|
|
5
|
+
Author: Himanshu Kumar
|
|
6
|
+
GitHub: https://github.com/himanshu231204/ai-commit
|
|
7
|
+
Email: himanshu231204@gmail.com
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import json
|
|
11
|
+
import subprocess
|
|
12
|
+
import sys
|
|
13
|
+
from typing import Optional
|
|
14
|
+
import requests
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Colors:
|
|
18
|
+
"""ANSI color codes for terminal output"""
|
|
19
|
+
HEADER = '\033[95m'
|
|
20
|
+
BLUE = '\033[94m'
|
|
21
|
+
CYAN = '\033[96m'
|
|
22
|
+
GREEN = '\033[92m'
|
|
23
|
+
YELLOW = '\033[93m'
|
|
24
|
+
RED = '\033[91m'
|
|
25
|
+
BOLD = '\033[1m'
|
|
26
|
+
UNDERLINE = '\033[4m'
|
|
27
|
+
END = '\033[0m'
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class OllamaClient:
|
|
31
|
+
"""Client for interacting with Ollama API"""
|
|
32
|
+
|
|
33
|
+
def __init__(self, base_url: str = "http://localhost:11434", model: str = "llama2"):
|
|
34
|
+
self.base_url = base_url.rstrip('/')
|
|
35
|
+
self.model = model
|
|
36
|
+
|
|
37
|
+
def is_available(self) -> bool:
|
|
38
|
+
"""Check if Ollama server is running"""
|
|
39
|
+
try:
|
|
40
|
+
response = requests.get(f"{self.base_url}/api/tags", timeout=5)
|
|
41
|
+
return response.status_code == 200
|
|
42
|
+
except requests.exceptions.RequestException:
|
|
43
|
+
return False
|
|
44
|
+
|
|
45
|
+
def list_models(self) -> list:
|
|
46
|
+
"""List available models"""
|
|
47
|
+
try:
|
|
48
|
+
response = requests.get(f"{self.base_url}/api/tags", timeout=5)
|
|
49
|
+
if response.status_code == 200:
|
|
50
|
+
data = response.json()
|
|
51
|
+
return [model['name'] for model in data.get('models', [])]
|
|
52
|
+
return []
|
|
53
|
+
except requests.exceptions.RequestException:
|
|
54
|
+
return []
|
|
55
|
+
|
|
56
|
+
def generate(self, prompt: str) -> Optional[str]:
|
|
57
|
+
"""Generate text using Ollama"""
|
|
58
|
+
try:
|
|
59
|
+
response = requests.post(
|
|
60
|
+
f"{self.base_url}/api/generate",
|
|
61
|
+
json={
|
|
62
|
+
"model": self.model,
|
|
63
|
+
"prompt": prompt,
|
|
64
|
+
"stream": False
|
|
65
|
+
},
|
|
66
|
+
timeout=30
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
if response.status_code == 200:
|
|
70
|
+
return response.json().get('response', '').strip()
|
|
71
|
+
return None
|
|
72
|
+
except requests.exceptions.RequestException as e:
|
|
73
|
+
print(f"{Colors.RED}Error connecting to Ollama: {e}{Colors.END}")
|
|
74
|
+
return None
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class GitService:
|
|
78
|
+
"""Service for Git operations"""
|
|
79
|
+
|
|
80
|
+
@staticmethod
|
|
81
|
+
def is_git_repo() -> bool:
|
|
82
|
+
"""Check if current directory is a git repository"""
|
|
83
|
+
try:
|
|
84
|
+
subprocess.run(
|
|
85
|
+
['git', 'rev-parse', '--git-dir'],
|
|
86
|
+
capture_output=True,
|
|
87
|
+
check=True
|
|
88
|
+
)
|
|
89
|
+
return True
|
|
90
|
+
except subprocess.CalledProcessError:
|
|
91
|
+
return False
|
|
92
|
+
|
|
93
|
+
@staticmethod
|
|
94
|
+
def get_staged_diff() -> Optional[str]:
|
|
95
|
+
"""Get diff of staged changes"""
|
|
96
|
+
try:
|
|
97
|
+
result = subprocess.run(
|
|
98
|
+
['git', 'diff', '--cached'],
|
|
99
|
+
capture_output=True,
|
|
100
|
+
text=True,
|
|
101
|
+
check=True
|
|
102
|
+
)
|
|
103
|
+
return result.stdout
|
|
104
|
+
except subprocess.CalledProcessError:
|
|
105
|
+
return None
|
|
106
|
+
|
|
107
|
+
@staticmethod
|
|
108
|
+
def has_staged_changes() -> bool:
|
|
109
|
+
"""Check if there are staged changes"""
|
|
110
|
+
diff = GitService.get_staged_diff()
|
|
111
|
+
return bool(diff and diff.strip())
|
|
112
|
+
|
|
113
|
+
@staticmethod
|
|
114
|
+
def commit(message: str) -> bool:
|
|
115
|
+
"""Create a git commit with the given message"""
|
|
116
|
+
try:
|
|
117
|
+
subprocess.run(
|
|
118
|
+
['git', 'commit', '-m', message],
|
|
119
|
+
check=True
|
|
120
|
+
)
|
|
121
|
+
return True
|
|
122
|
+
except subprocess.CalledProcessError:
|
|
123
|
+
return False
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
class CommitGenerator:
|
|
127
|
+
"""Generate commit messages using AI"""
|
|
128
|
+
|
|
129
|
+
def __init__(self, ollama_client: OllamaClient):
|
|
130
|
+
self.ollama = ollama_client
|
|
131
|
+
|
|
132
|
+
def generate_message(self, diff: str, style: str = "conventional") -> Optional[str]:
|
|
133
|
+
"""Generate a commit message from git diff"""
|
|
134
|
+
|
|
135
|
+
prompts = {
|
|
136
|
+
"conventional": """You are a git commit message expert. Analyze the following git diff and generate a commit message following the Conventional Commits format.
|
|
137
|
+
|
|
138
|
+
Rules:
|
|
139
|
+
- Use format: <type>(<scope>): <subject>
|
|
140
|
+
- Types: feat, fix, docs, style, refactor, test, chore
|
|
141
|
+
- Subject should be lowercase, no period at end
|
|
142
|
+
- Keep it concise (max 50 characters for subject)
|
|
143
|
+
- If needed, add a body explaining what and why (not how)
|
|
144
|
+
|
|
145
|
+
Git diff:
|
|
146
|
+
{diff}
|
|
147
|
+
|
|
148
|
+
Generate ONLY the commit message, nothing else:""",
|
|
149
|
+
|
|
150
|
+
"semantic": """You are a git commit message expert. Analyze the following git diff and generate a clear, semantic commit message.
|
|
151
|
+
|
|
152
|
+
Rules:
|
|
153
|
+
- Start with a verb (Add, Update, Fix, Remove, etc.)
|
|
154
|
+
- Be specific about what changed
|
|
155
|
+
- Keep it concise (max 72 characters)
|
|
156
|
+
- Use present tense
|
|
157
|
+
|
|
158
|
+
Git diff:
|
|
159
|
+
{diff}
|
|
160
|
+
|
|
161
|
+
Generate ONLY the commit message, nothing else:""",
|
|
162
|
+
|
|
163
|
+
"detailed": """You are a git commit message expert. Analyze the following git diff and generate a detailed commit message.
|
|
164
|
+
|
|
165
|
+
Rules:
|
|
166
|
+
- First line: Brief summary (max 50 chars)
|
|
167
|
+
- Blank line
|
|
168
|
+
- Body: Explain what and why (not how)
|
|
169
|
+
- Use bullet points if multiple changes
|
|
170
|
+
|
|
171
|
+
Git diff:
|
|
172
|
+
{diff}
|
|
173
|
+
|
|
174
|
+
Generate ONLY the commit message, nothing else:"""
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
prompt = prompts.get(style, prompts["conventional"]).format(diff=diff[:3000]) # Limit diff size
|
|
178
|
+
|
|
179
|
+
message = self.ollama.generate(prompt)
|
|
180
|
+
|
|
181
|
+
if message:
|
|
182
|
+
# Clean up the message
|
|
183
|
+
message = message.strip()
|
|
184
|
+
# Remove any markdown code blocks
|
|
185
|
+
if message.startswith('```'):
|
|
186
|
+
lines = message.split('\n')
|
|
187
|
+
message = '\n'.join(lines[1:-1] if len(lines) > 2 else lines)
|
|
188
|
+
message = message.strip()
|
|
189
|
+
|
|
190
|
+
return message
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def print_banner():
|
|
194
|
+
"""Print application banner"""
|
|
195
|
+
banner = f"""
|
|
196
|
+
{Colors.CYAN}{Colors.BOLD}
|
|
197
|
+
βββββββββββββββββββββββββββββββββββββββββββββ
|
|
198
|
+
β π€ AI Commit Message Tool β
|
|
199
|
+
β Powered by Local Ollama π¦ β
|
|
200
|
+
βββββββββββββββββββββββββββββββββββββββββββββ
|
|
201
|
+
{Colors.END}
|
|
202
|
+
"""
|
|
203
|
+
print(banner)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def print_diff_summary(diff: str):
|
|
207
|
+
"""Print a summary of the diff"""
|
|
208
|
+
lines = diff.split('\n')
|
|
209
|
+
added = sum(1 for line in lines if line.startswith('+') and not line.startswith('+++'))
|
|
210
|
+
removed = sum(1 for line in lines if line.startswith('-') and not line.startswith('---'))
|
|
211
|
+
|
|
212
|
+
print(f"{Colors.YELLOW}π Changes:{Colors.END}")
|
|
213
|
+
print(f" {Colors.GREEN}+ {added} lines added{Colors.END}")
|
|
214
|
+
print(f" {Colors.RED}- {removed} lines removed{Colors.END}")
|
|
215
|
+
print()
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def get_user_choice(message: str) -> str:
|
|
219
|
+
"""Get user input for yes/no questions"""
|
|
220
|
+
while True:
|
|
221
|
+
choice = input(f"{message} {Colors.CYAN}[y/n/r/e]{Colors.END}: ").lower()
|
|
222
|
+
if choice in ['y', 'n', 'r', 'e']:
|
|
223
|
+
return choice
|
|
224
|
+
print(f"{Colors.RED}Invalid choice. Use y (yes), n (no), r (regenerate), or e (edit){Colors.END}")
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def main():
|
|
228
|
+
"""Main application entry point"""
|
|
229
|
+
print_banner()
|
|
230
|
+
|
|
231
|
+
# Check if we're in a git repository
|
|
232
|
+
if not GitService.is_git_repo():
|
|
233
|
+
print(f"{Colors.RED}β Error: Not a git repository{Colors.END}")
|
|
234
|
+
print(f"{Colors.YELLOW}Please run this command in a git repository{Colors.END}")
|
|
235
|
+
sys.exit(1)
|
|
236
|
+
|
|
237
|
+
# Check for staged changes
|
|
238
|
+
if not GitService.has_staged_changes():
|
|
239
|
+
print(f"{Colors.YELLOW}β οΈ No staged changes found{Colors.END}")
|
|
240
|
+
print(f"{Colors.CYAN}Please stage your changes first:{Colors.END}")
|
|
241
|
+
print(f" git add <files>")
|
|
242
|
+
sys.exit(1)
|
|
243
|
+
|
|
244
|
+
# Initialize Ollama client
|
|
245
|
+
ollama = OllamaClient()
|
|
246
|
+
|
|
247
|
+
# Check if Ollama is running
|
|
248
|
+
print(f"{Colors.CYAN}π Checking Ollama server...{Colors.END}")
|
|
249
|
+
if not ollama.is_available():
|
|
250
|
+
print(f"{Colors.RED}β Error: Cannot connect to Ollama{Colors.END}")
|
|
251
|
+
print(f"{Colors.YELLOW}Please make sure Ollama is running:{Colors.END}")
|
|
252
|
+
print(f" ollama serve")
|
|
253
|
+
sys.exit(1)
|
|
254
|
+
|
|
255
|
+
print(f"{Colors.GREEN}β Ollama server is running{Colors.END}\n")
|
|
256
|
+
|
|
257
|
+
# List available models
|
|
258
|
+
models = ollama.list_models()
|
|
259
|
+
if models:
|
|
260
|
+
print(f"{Colors.CYAN}Available models:{Colors.END} {', '.join(models)}")
|
|
261
|
+
print(f"{Colors.CYAN}Using model:{Colors.END} {ollama.model}\n")
|
|
262
|
+
|
|
263
|
+
# Get staged diff
|
|
264
|
+
diff = GitService.get_staged_diff()
|
|
265
|
+
if not diff:
|
|
266
|
+
print(f"{Colors.RED}β Error: Could not get git diff{Colors.END}")
|
|
267
|
+
sys.exit(1)
|
|
268
|
+
|
|
269
|
+
print_diff_summary(diff)
|
|
270
|
+
|
|
271
|
+
# Generate commit message
|
|
272
|
+
generator = CommitGenerator(ollama)
|
|
273
|
+
|
|
274
|
+
print(f"{Colors.CYAN}π€ Generating commit message...{Colors.END}\n")
|
|
275
|
+
|
|
276
|
+
while True:
|
|
277
|
+
commit_message = generator.generate_message(diff, style="conventional")
|
|
278
|
+
|
|
279
|
+
if not commit_message:
|
|
280
|
+
print(f"{Colors.RED}β Failed to generate commit message{Colors.END}")
|
|
281
|
+
sys.exit(1)
|
|
282
|
+
|
|
283
|
+
# Display generated message
|
|
284
|
+
print(f"{Colors.GREEN}{Colors.BOLD}Generated Commit Message:{Colors.END}")
|
|
285
|
+
print(f"{Colors.CYAN}{'β' * 50}{Colors.END}")
|
|
286
|
+
print(f"{Colors.BOLD}{commit_message}{Colors.END}")
|
|
287
|
+
print(f"{Colors.CYAN}{'β' * 50}{Colors.END}\n")
|
|
288
|
+
|
|
289
|
+
# Ask user what to do
|
|
290
|
+
print(f"{Colors.YELLOW}Options:{Colors.END}")
|
|
291
|
+
print(f" {Colors.GREEN}y{Colors.END} - Accept and commit")
|
|
292
|
+
print(f" {Colors.YELLOW}r{Colors.END} - Regenerate message")
|
|
293
|
+
print(f" {Colors.BLUE}e{Colors.END} - Edit message")
|
|
294
|
+
print(f" {Colors.RED}n{Colors.END} - Cancel")
|
|
295
|
+
|
|
296
|
+
choice = get_user_choice("\nWhat would you like to do?")
|
|
297
|
+
|
|
298
|
+
if choice == 'y':
|
|
299
|
+
# Commit with the generated message
|
|
300
|
+
print(f"\n{Colors.CYAN}π Creating commit...{Colors.END}")
|
|
301
|
+
if GitService.commit(commit_message):
|
|
302
|
+
print(f"{Colors.GREEN}β Commit created successfully!{Colors.END}")
|
|
303
|
+
sys.exit(0)
|
|
304
|
+
else:
|
|
305
|
+
print(f"{Colors.RED}β Failed to create commit{Colors.END}")
|
|
306
|
+
sys.exit(1)
|
|
307
|
+
|
|
308
|
+
elif choice == 'r':
|
|
309
|
+
# Regenerate
|
|
310
|
+
print(f"\n{Colors.CYAN}π Regenerating...{Colors.END}\n")
|
|
311
|
+
continue
|
|
312
|
+
|
|
313
|
+
elif choice == 'e':
|
|
314
|
+
# Edit message
|
|
315
|
+
print(f"\n{Colors.CYAN}βοΈ Enter your commit message (press Ctrl+D when done):{Colors.END}")
|
|
316
|
+
try:
|
|
317
|
+
lines = []
|
|
318
|
+
while True:
|
|
319
|
+
line = input()
|
|
320
|
+
lines.append(line)
|
|
321
|
+
except EOFError:
|
|
322
|
+
edited_message = '\n'.join(lines).strip()
|
|
323
|
+
if edited_message:
|
|
324
|
+
if GitService.commit(edited_message):
|
|
325
|
+
print(f"{Colors.GREEN}β Commit created successfully!{Colors.END}")
|
|
326
|
+
sys.exit(0)
|
|
327
|
+
else:
|
|
328
|
+
print(f"{Colors.RED}β Failed to create commit{Colors.END}")
|
|
329
|
+
sys.exit(1)
|
|
330
|
+
|
|
331
|
+
elif choice == 'n':
|
|
332
|
+
# Cancel
|
|
333
|
+
print(f"\n{Colors.YELLOW}Operation cancelled{Colors.END}")
|
|
334
|
+
sys.exit(0)
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
if __name__ == "__main__":
|
|
338
|
+
try:
|
|
339
|
+
main()
|
|
340
|
+
except KeyboardInterrupt:
|
|
341
|
+
print(f"\n\n{Colors.YELLOW}Operation cancelled by user{Colors.END}")
|
|
342
|
+
sys.exit(0)
|
|
@@ -0,0 +1,394 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: ollama-git-commit
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Generate AI-powered git commit messages using local Ollama
|
|
5
|
+
Home-page: https://github.com/himanshu231204/ai-commit
|
|
6
|
+
Author: Himanshu Kumar
|
|
7
|
+
Author-email: Himanshu Kumar <himanshu231204@gmail.com>
|
|
8
|
+
License: MIT
|
|
9
|
+
Project-URL: Homepage, https://github.com/himanshu231204/ai-commit
|
|
10
|
+
Project-URL: Documentation, https://github.com/himanshu231204/ai-commit#readme
|
|
11
|
+
Project-URL: Repository, https://github.com/himanshu231204/ai-commit
|
|
12
|
+
Project-URL: Bug Tracker, https://github.com/himanshu231204/ai-commit/issues
|
|
13
|
+
Project-URL: Funding, https://github.com/sponsors/himanshu231204
|
|
14
|
+
Keywords: git,commit,ai,ollama,llm,cli
|
|
15
|
+
Classifier: Development Status :: 3 - Alpha
|
|
16
|
+
Classifier: Intended Audience :: Developers
|
|
17
|
+
Classifier: Topic :: Software Development :: Version Control :: Git
|
|
18
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
19
|
+
Classifier: Programming Language :: Python :: 3
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
24
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
25
|
+
Classifier: Operating System :: OS Independent
|
|
26
|
+
Requires-Python: >=3.8
|
|
27
|
+
Description-Content-Type: text/markdown
|
|
28
|
+
License-File: LICENSE
|
|
29
|
+
Requires-Dist: requests>=2.28.0
|
|
30
|
+
Dynamic: author
|
|
31
|
+
Dynamic: home-page
|
|
32
|
+
Dynamic: license-file
|
|
33
|
+
Dynamic: requires-python
|
|
34
|
+
|
|
35
|
+
# π€ AI Commit - AI-Powered Git Commit Messages
|
|
36
|
+
|
|
37
|
+
Generate intelligent git commit messages using your local Ollama instance. No API keys, completely free, and runs offline!
|
|
38
|
+
|
|
39
|
+
[](https://opensource.org/licenses/MIT)
|
|
40
|
+
[](https://www.python.org/downloads/)
|
|
41
|
+
[](https://github.com/himanshu231204/ai-commit)
|
|
42
|
+
|
|
43
|
+
---
|
|
44
|
+
|
|
45
|
+
## β¨ Features
|
|
46
|
+
|
|
47
|
+
- π€ **AI-Powered**: Uses local Ollama models to generate commit messages
|
|
48
|
+
- π **Privacy First**: Everything runs locally, no data sent to external APIs
|
|
49
|
+
- π― **Multiple Styles**: Conventional commits, semantic, or detailed formats
|
|
50
|
+
- π° **Free**: No API costs, uses your local Ollama instance
|
|
51
|
+
- β‘ **Fast**: Quick generation with local models
|
|
52
|
+
- π¨ **Interactive**: Review, regenerate, or edit messages before committing
|
|
53
|
+
- π **Offline**: Works completely offline
|
|
54
|
+
|
|
55
|
+
---
|
|
56
|
+
|
|
57
|
+
## π Prerequisites
|
|
58
|
+
|
|
59
|
+
Before installing AI Commit, you need:
|
|
60
|
+
|
|
61
|
+
1. **Python 3.8+**
|
|
62
|
+
```bash
|
|
63
|
+
python --version
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
2. **Git**
|
|
67
|
+
```bash
|
|
68
|
+
git --version
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
3. **Ollama** - [Install Ollama](https://ollama.ai)
|
|
72
|
+
```bash
|
|
73
|
+
# Install Ollama (macOS/Linux)
|
|
74
|
+
curl -fsSL https://ollama.ai/install.sh | sh
|
|
75
|
+
|
|
76
|
+
# Pull a model (e.g., llama2)
|
|
77
|
+
ollama pull llama2
|
|
78
|
+
|
|
79
|
+
# Start Ollama server
|
|
80
|
+
ollama serve
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
---
|
|
84
|
+
|
|
85
|
+
## π Installation
|
|
86
|
+
|
|
87
|
+
### Option 1: Install from PyPI (Coming Soon)
|
|
88
|
+
```bash
|
|
89
|
+
pip install ai-commit
|
|
90
|
+
```
|
|
91
|
+
|
|
92
|
+
### Option 2: Install from Source
|
|
93
|
+
```bash
|
|
94
|
+
# Clone the repository
|
|
95
|
+
git clone https://github.com/himanshu231204/ai-commit.git
|
|
96
|
+
cd ai-commit
|
|
97
|
+
|
|
98
|
+
# Install
|
|
99
|
+
pip install -e .
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
### Option 3: Quick Install Script
|
|
103
|
+
```bash
|
|
104
|
+
# Clone and run install script
|
|
105
|
+
git clone https://github.com/himanshu231204/ai-commit.git
|
|
106
|
+
cd ai-commit
|
|
107
|
+
bash install.sh
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
---
|
|
111
|
+
|
|
112
|
+
## π‘ Usage
|
|
113
|
+
|
|
114
|
+
### Basic Usage
|
|
115
|
+
|
|
116
|
+
1. **Stage your changes**:
|
|
117
|
+
```bash
|
|
118
|
+
git add .
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
2. **Generate commit message**:
|
|
122
|
+
```bash
|
|
123
|
+
ai-commit
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
3. **Review and choose**:
|
|
127
|
+
- `y` - Accept and commit
|
|
128
|
+
- `r` - Regenerate message
|
|
129
|
+
- `e` - Edit message manually
|
|
130
|
+
- `n` - Cancel
|
|
131
|
+
|
|
132
|
+
### Example Workflow
|
|
133
|
+
|
|
134
|
+
```bash
|
|
135
|
+
# Make some changes to your code
|
|
136
|
+
echo "print('Hello World')" > hello.py
|
|
137
|
+
|
|
138
|
+
# Stage the changes
|
|
139
|
+
git add hello.py
|
|
140
|
+
|
|
141
|
+
# Generate AI commit message
|
|
142
|
+
ai-commit
|
|
143
|
+
|
|
144
|
+
# Output:
|
|
145
|
+
# π€ AI Commit Message Tool
|
|
146
|
+
# βββββββββββββββββββββββββββ
|
|
147
|
+
# Generated Commit Message:
|
|
148
|
+
# βββββββββββββββββββββββββββ
|
|
149
|
+
# feat: add hello world script
|
|
150
|
+
# βββββββββββββββββββββββββββ
|
|
151
|
+
#
|
|
152
|
+
# Options:
|
|
153
|
+
# y - Accept and commit
|
|
154
|
+
# r - Regenerate message
|
|
155
|
+
# e - Edit message
|
|
156
|
+
# n - Cancel
|
|
157
|
+
```
|
|
158
|
+
|
|
159
|
+
---
|
|
160
|
+
|
|
161
|
+
## βοΈ Configuration
|
|
162
|
+
|
|
163
|
+
### Custom Ollama Server
|
|
164
|
+
|
|
165
|
+
If your Ollama server is running on a different host/port, edit `ai_commit.py`:
|
|
166
|
+
|
|
167
|
+
```python
|
|
168
|
+
ollama = OllamaClient(base_url="http://192.168.1.100:11434")
|
|
169
|
+
```
|
|
170
|
+
|
|
171
|
+
### Change AI Model
|
|
172
|
+
|
|
173
|
+
```python
|
|
174
|
+
ollama = OllamaClient(model="codellama") # or "mistral", "llama2", etc.
|
|
175
|
+
```
|
|
176
|
+
|
|
177
|
+
### Commit Message Styles
|
|
178
|
+
|
|
179
|
+
The tool supports three commit message styles:
|
|
180
|
+
|
|
181
|
+
1. **Conventional Commits** (default):
|
|
182
|
+
```
|
|
183
|
+
feat(auth): add user login functionality
|
|
184
|
+
```
|
|
185
|
+
|
|
186
|
+
2. **Semantic**:
|
|
187
|
+
```
|
|
188
|
+
Add user login functionality
|
|
189
|
+
```
|
|
190
|
+
|
|
191
|
+
3. **Detailed**:
|
|
192
|
+
```
|
|
193
|
+
Add user authentication system
|
|
194
|
+
|
|
195
|
+
- Implement JWT-based authentication
|
|
196
|
+
- Add login and logout endpoints
|
|
197
|
+
- Create user session management
|
|
198
|
+
```
|
|
199
|
+
|
|
200
|
+
---
|
|
201
|
+
|
|
202
|
+
## π¨ Supported Ollama Models
|
|
203
|
+
|
|
204
|
+
Any Ollama model works, but these are recommended:
|
|
205
|
+
|
|
206
|
+
- **llama2** - Best overall performance
|
|
207
|
+
- **codellama** - Optimized for code
|
|
208
|
+
- **mistral** - Fast and efficient
|
|
209
|
+
- **phi** - Lightweight option
|
|
210
|
+
- **llama3** - Latest and most powerful
|
|
211
|
+
|
|
212
|
+
```bash
|
|
213
|
+
# Pull and use different models
|
|
214
|
+
ollama pull codellama
|
|
215
|
+
ollama pull mistral
|
|
216
|
+
ollama pull phi
|
|
217
|
+
```
|
|
218
|
+
|
|
219
|
+
---
|
|
220
|
+
|
|
221
|
+
## π οΈ Development
|
|
222
|
+
|
|
223
|
+
### Project Structure
|
|
224
|
+
|
|
225
|
+
```
|
|
226
|
+
ai-commit/
|
|
227
|
+
βββ ai_commit.py # Main script
|
|
228
|
+
βββ setup.py # Installation config
|
|
229
|
+
βββ requirements.txt # Dependencies
|
|
230
|
+
βββ README.md # This file
|
|
231
|
+
βββ LICENSE # MIT License
|
|
232
|
+
βββ .gitignore # Git ignore rules
|
|
233
|
+
βββ examples/ # Example usage
|
|
234
|
+
```
|
|
235
|
+
|
|
236
|
+
### Running Tests
|
|
237
|
+
|
|
238
|
+
```bash
|
|
239
|
+
# Install dev dependencies
|
|
240
|
+
pip install -e ".[dev]"
|
|
241
|
+
|
|
242
|
+
# Run tests (coming soon)
|
|
243
|
+
pytest tests/
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
---
|
|
247
|
+
|
|
248
|
+
## π€ Contributing
|
|
249
|
+
|
|
250
|
+
Contributions are what make the open-source community amazing! Any contributions you make are **greatly appreciated**.
|
|
251
|
+
|
|
252
|
+
### How to Contribute:
|
|
253
|
+
|
|
254
|
+
1. Fork the repository
|
|
255
|
+
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
|
|
256
|
+
3. Make your changes
|
|
257
|
+
4. Run tests (when available)
|
|
258
|
+
5. Commit using ai-commit! π
|
|
259
|
+
6. Push to your branch (`git push origin feature/amazing-feature`)
|
|
260
|
+
7. Open a Pull Request
|
|
261
|
+
|
|
262
|
+
See [CONTRIBUTING.md](CONTRIBUTING.md) for detailed guidelines.
|
|
263
|
+
|
|
264
|
+
---
|
|
265
|
+
|
|
266
|
+
## π Examples
|
|
267
|
+
|
|
268
|
+
### Example 1: Adding a New Feature
|
|
269
|
+
|
|
270
|
+
```bash
|
|
271
|
+
$ git add new_feature.py
|
|
272
|
+
$ ai-commit
|
|
273
|
+
|
|
274
|
+
Generated: feat: add user profile management feature
|
|
275
|
+
```
|
|
276
|
+
|
|
277
|
+
### Example 2: Bug Fix
|
|
278
|
+
|
|
279
|
+
```bash
|
|
280
|
+
$ git add bug_fix.py
|
|
281
|
+
$ ai-commit
|
|
282
|
+
|
|
283
|
+
Generated: fix: resolve null pointer exception in login
|
|
284
|
+
```
|
|
285
|
+
|
|
286
|
+
### Example 3: Documentation
|
|
287
|
+
|
|
288
|
+
```bash
|
|
289
|
+
$ git add README.md
|
|
290
|
+
$ ai-commit
|
|
291
|
+
|
|
292
|
+
Generated: docs: update installation instructions
|
|
293
|
+
```
|
|
294
|
+
|
|
295
|
+
---
|
|
296
|
+
|
|
297
|
+
## π Troubleshooting
|
|
298
|
+
|
|
299
|
+
### Ollama Not Running
|
|
300
|
+
```
|
|
301
|
+
Error: Cannot connect to Ollama
|
|
302
|
+
Solution: Start Ollama server with `ollama serve`
|
|
303
|
+
```
|
|
304
|
+
|
|
305
|
+
### No Staged Changes
|
|
306
|
+
```
|
|
307
|
+
Error: No staged changes found
|
|
308
|
+
Solution: Stage your changes with `git add <files>`
|
|
309
|
+
```
|
|
310
|
+
|
|
311
|
+
### Model Not Found
|
|
312
|
+
```
|
|
313
|
+
Error: Model not found
|
|
314
|
+
Solution: Pull the model with `ollama pull llama2`
|
|
315
|
+
```
|
|
316
|
+
|
|
317
|
+
---
|
|
318
|
+
|
|
319
|
+
## π License
|
|
320
|
+
|
|
321
|
+
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
|
322
|
+
|
|
323
|
+
---
|
|
324
|
+
|
|
325
|
+
## π Acknowledgments
|
|
326
|
+
|
|
327
|
+
- **Ollama** - For making local LLMs accessible
|
|
328
|
+
- **Git** - The best version control system
|
|
329
|
+
- **Python** - For being awesome
|
|
330
|
+
- **You** - For using this tool!
|
|
331
|
+
|
|
332
|
+
---
|
|
333
|
+
|
|
334
|
+
## πΊοΈ Roadmap
|
|
335
|
+
|
|
336
|
+
- [ ] Configuration file support (`.ai-commit.yml`)
|
|
337
|
+
- [ ] More commit message formats
|
|
338
|
+
- [ ] Interactive model selection
|
|
339
|
+
- [ ] Emoji support in commits π
|
|
340
|
+
- [ ] Multiple language support
|
|
341
|
+
- [ ] Git hooks integration
|
|
342
|
+
- [ ] VSCode extension
|
|
343
|
+
- [ ] Custom prompt templates
|
|
344
|
+
- [ ] Commit message history
|
|
345
|
+
- [ ] Auto-detect commit type from files
|
|
346
|
+
- [ ] Batch commit support
|
|
347
|
+
- [ ] Integration with GitHub CLI
|
|
348
|
+
|
|
349
|
+
---
|
|
350
|
+
|
|
351
|
+
## π¨βπ» Author
|
|
352
|
+
|
|
353
|
+
**Himanshu Kumar**
|
|
354
|
+
|
|
355
|
+
- π GitHub: [@himanshu231204](https://github.com/himanshu231204)
|
|
356
|
+
- πΌ LinkedIn: [himanshu231204](https://www.linkedin.com/in/himanshu231204)
|
|
357
|
+
- π¦ Twitter/X: [@himanshu231204](https://twitter.com/himanshu231204)
|
|
358
|
+
- π§ Email: himanshu231204@gmail.com
|
|
359
|
+
|
|
360
|
+
---
|
|
361
|
+
|
|
362
|
+
## π Support
|
|
363
|
+
|
|
364
|
+
If you find this project helpful, please consider:
|
|
365
|
+
|
|
366
|
+
- β Starring the repository
|
|
367
|
+
- π Reporting bugs
|
|
368
|
+
- π‘ Suggesting new features
|
|
369
|
+
- π Contributing code
|
|
370
|
+
- β [Buy me a coffee](https://www.buymeacoffee.com/himanshu231204)
|
|
371
|
+
- π [Sponsor on GitHub](https://github.com/sponsors/himanshu231204)
|
|
372
|
+
|
|
373
|
+
---
|
|
374
|
+
|
|
375
|
+
## π Stats
|
|
376
|
+
|
|
377
|
+

|
|
378
|
+

|
|
379
|
+

|
|
380
|
+
|
|
381
|
+
---
|
|
382
|
+
|
|
383
|
+
## π Links
|
|
384
|
+
|
|
385
|
+
- **Documentation**: [GitHub Wiki](https://github.com/himanshu231204/ai-commit/wiki)
|
|
386
|
+
- **Issues**: [GitHub Issues](https://github.com/himanshu231204/ai-commit/issues)
|
|
387
|
+
- **Discussions**: [GitHub Discussions](https://github.com/himanshu231204/ai-commit/discussions)
|
|
388
|
+
- **Releases**: [GitHub Releases](https://github.com/himanshu231204/ai-commit/releases)
|
|
389
|
+
|
|
390
|
+
---
|
|
391
|
+
|
|
392
|
+
**Made with β€οΈ by [Himanshu Kumar](https://github.com/himanshu231204)**
|
|
393
|
+
|
|
394
|
+
β **Star this repo if you find it useful!** β
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
ai_commit.py,sha256=rXgCBk-I9wNP5o_1FKgHq9Y1qxIajH0n_d8-jQffJUk,11521
|
|
2
|
+
ollama_git_commit-0.1.0.dist-info/licenses/LICENSE,sha256=6xnQqPUHIgSjn-A6ZaUfq8P8lUNZo6KIm-iPH_ovJWo,1071
|
|
3
|
+
ollama_git_commit-0.1.0.dist-info/METADATA,sha256=Nwb45XhGmW5H6VjWnmvomRQrf_m5tjtjRZEBjJT3j9Y,9353
|
|
4
|
+
ollama_git_commit-0.1.0.dist-info/WHEEL,sha256=YCfwYGOYMi5Jhw2fU4yNgwErybb2IX5PEwBKV4ZbdBo,91
|
|
5
|
+
ollama_git_commit-0.1.0.dist-info/entry_points.txt,sha256=s3xBxIoK_SSyeq9e0OqCB31N77T6DPI1bXw_-8Rs1bk,45
|
|
6
|
+
ollama_git_commit-0.1.0.dist-info/top_level.txt,sha256=xzo4o4ZjM0T1PlzuuOmsa21x-DJLJKnzk4svgoYI0PM,10
|
|
7
|
+
ollama_git_commit-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 Himanshu Kumar
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ai_commit
|