devcommit 0.1.2__tar.gz → 0.1.4.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- devcommit-0.1.4.4/PKG-INFO +19 -0
- devcommit-0.1.4.4/devcommit/app/ai_providers.py +280 -0
- devcommit-0.1.4.4/devcommit/app/gemini_ai.py +91 -0
- {devcommit-0.1.2 → devcommit-0.1.4.4}/devcommit/app/prompt.py +22 -10
- devcommit-0.1.4.4/devcommit/main.py +396 -0
- {devcommit-0.1.2 → devcommit-0.1.4.4}/devcommit/utils/git.py +81 -7
- {devcommit-0.1.2 → devcommit-0.1.4.4}/devcommit/utils/logger.py +16 -5
- {devcommit-0.1.2 → devcommit-0.1.4.4}/devcommit/utils/parser.py +6 -0
- devcommit-0.1.4.4/pyproject.toml +45 -0
- devcommit-0.1.2/PKG-INFO +0 -37
- devcommit-0.1.2/README.md +0 -11
- devcommit-0.1.2/devcommit/app/gemini_ai.py +0 -68
- devcommit-0.1.2/devcommit/main.py +0 -136
- devcommit-0.1.2/pyproject.toml +0 -25
- {devcommit-0.1.2 → devcommit-0.1.4.4}/COPYING +0 -0
- {devcommit-0.1.2 → devcommit-0.1.4.4}/devcommit/__init__.py +0 -0
- {devcommit-0.1.2 → devcommit-0.1.4.4}/devcommit/app/__init__.py +0 -0
- {devcommit-0.1.2 → devcommit-0.1.4.4}/devcommit/utils/__init__.py +0 -0
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: devcommit
|
|
3
|
+
Version: 0.1.4.4
|
|
4
|
+
Summary: AI-powered git commit message generator
|
|
5
|
+
License-File: COPYING
|
|
6
|
+
Author: Hordunlarmy
|
|
7
|
+
Author-email: Hordunlarmy@gmail.com
|
|
8
|
+
Requires-Python: >=3.10
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
13
|
+
Requires-Dist: anthropic (>=0.25.0)
|
|
14
|
+
Requires-Dist: google-generativeai (>=0.7.1)
|
|
15
|
+
Requires-Dist: inquirerpy (>=0.3.4)
|
|
16
|
+
Requires-Dist: openai (>=1.0.0)
|
|
17
|
+
Requires-Dist: python-decouple (>=3.8)
|
|
18
|
+
Requires-Dist: requests (>=2.31.0)
|
|
19
|
+
Requires-Dist: rich (>=13.7.1)
|
|
@@ -0,0 +1,280 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""AI provider abstraction layer for multiple AI models"""
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
import sys
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
# Suppress stderr for all AI imports
|
|
10
|
+
_stderr = sys.stderr
|
|
11
|
+
_devnull = open(os.devnull, 'w')
|
|
12
|
+
sys.stderr = _devnull
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
import google.generativeai as genai
|
|
16
|
+
except ImportError:
|
|
17
|
+
genai = None
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
import openai
|
|
21
|
+
except ImportError:
|
|
22
|
+
openai = None
|
|
23
|
+
|
|
24
|
+
try:
|
|
25
|
+
import anthropic
|
|
26
|
+
except ImportError:
|
|
27
|
+
anthropic = None
|
|
28
|
+
|
|
29
|
+
try:
|
|
30
|
+
import requests
|
|
31
|
+
except ImportError:
|
|
32
|
+
requests = None
|
|
33
|
+
|
|
34
|
+
sys.stderr = _stderr
|
|
35
|
+
_devnull.close()
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class AIProvider(ABC):
|
|
39
|
+
"""Base class for AI providers"""
|
|
40
|
+
|
|
41
|
+
@abstractmethod
|
|
42
|
+
def generate_commit_message(self, diff: str, prompt: str, max_tokens: int) -> str:
|
|
43
|
+
"""Generate commit message from diff"""
|
|
44
|
+
pass
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class GeminiProvider(AIProvider):
|
|
48
|
+
"""Google Gemini AI provider"""
|
|
49
|
+
|
|
50
|
+
def __init__(self, api_key: str, model: str = "gemini-1.5-flash"):
|
|
51
|
+
if not genai:
|
|
52
|
+
raise ImportError("google-generativeai not installed. Run: pip install google-generativeai")
|
|
53
|
+
|
|
54
|
+
# Suppress stderr during configuration
|
|
55
|
+
_stderr = sys.stderr
|
|
56
|
+
_devnull = open(os.devnull, 'w')
|
|
57
|
+
sys.stderr = _devnull
|
|
58
|
+
genai.configure(api_key=api_key)
|
|
59
|
+
sys.stderr = _stderr
|
|
60
|
+
_devnull.close()
|
|
61
|
+
|
|
62
|
+
self.model_name = model
|
|
63
|
+
|
|
64
|
+
def generate_commit_message(self, diff: str, prompt: str, max_tokens: int) -> str:
|
|
65
|
+
generation_config = {
|
|
66
|
+
"response_mime_type": "text/plain",
|
|
67
|
+
"max_output_tokens": max_tokens,
|
|
68
|
+
"top_k": 40,
|
|
69
|
+
"top_p": 0.9,
|
|
70
|
+
"temperature": 0.7,
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
model = genai.GenerativeModel(
|
|
74
|
+
generation_config=generation_config,
|
|
75
|
+
model_name=self.model_name,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
chat_session = model.start_chat(
|
|
79
|
+
history=[
|
|
80
|
+
{
|
|
81
|
+
"role": "user",
|
|
82
|
+
"parts": [prompt],
|
|
83
|
+
},
|
|
84
|
+
]
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# Suppress stderr during API call
|
|
88
|
+
_stderr = sys.stderr
|
|
89
|
+
_devnull = open(os.devnull, 'w')
|
|
90
|
+
sys.stderr = _devnull
|
|
91
|
+
response = chat_session.send_message(diff)
|
|
92
|
+
sys.stderr = _stderr
|
|
93
|
+
_devnull.close()
|
|
94
|
+
|
|
95
|
+
if response and hasattr(response, "text"):
|
|
96
|
+
return response.text.strip()
|
|
97
|
+
return "No valid commit message generated."
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class OpenAIProvider(AIProvider):
|
|
101
|
+
"""OpenAI GPT provider"""
|
|
102
|
+
|
|
103
|
+
def __init__(self, api_key: str, model: str = "gpt-4o-mini"):
|
|
104
|
+
if not openai:
|
|
105
|
+
raise ImportError("openai not installed. Run: pip install openai")
|
|
106
|
+
self.client = openai.OpenAI(api_key=api_key)
|
|
107
|
+
self.model = model
|
|
108
|
+
|
|
109
|
+
def generate_commit_message(self, diff: str, prompt: str, max_tokens: int) -> str:
|
|
110
|
+
response = self.client.chat.completions.create(
|
|
111
|
+
model=self.model,
|
|
112
|
+
messages=[
|
|
113
|
+
{"role": "system", "content": prompt},
|
|
114
|
+
{"role": "user", "content": diff}
|
|
115
|
+
],
|
|
116
|
+
max_tokens=max_tokens,
|
|
117
|
+
temperature=0.7
|
|
118
|
+
)
|
|
119
|
+
return response.choices[0].message.content.strip()
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
class GroqProvider(AIProvider):
|
|
123
|
+
"""Groq AI provider (OpenAI-compatible)"""
|
|
124
|
+
|
|
125
|
+
def __init__(self, api_key: str, model: str = "llama-3.3-70b-versatile"):
|
|
126
|
+
if not openai:
|
|
127
|
+
raise ImportError("openai not installed. Run: pip install openai")
|
|
128
|
+
self.client = openai.OpenAI(
|
|
129
|
+
api_key=api_key,
|
|
130
|
+
base_url="https://api.groq.com/openai/v1"
|
|
131
|
+
)
|
|
132
|
+
self.model = model
|
|
133
|
+
|
|
134
|
+
def generate_commit_message(self, diff: str, prompt: str, max_tokens: int) -> str:
|
|
135
|
+
response = self.client.chat.completions.create(
|
|
136
|
+
model=self.model,
|
|
137
|
+
messages=[
|
|
138
|
+
{"role": "system", "content": prompt},
|
|
139
|
+
{"role": "user", "content": diff}
|
|
140
|
+
],
|
|
141
|
+
max_tokens=max_tokens,
|
|
142
|
+
temperature=0.7
|
|
143
|
+
)
|
|
144
|
+
return response.choices[0].message.content.strip()
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
class AnthropicProvider(AIProvider):
|
|
148
|
+
"""Anthropic Claude provider"""
|
|
149
|
+
|
|
150
|
+
def __init__(self, api_key: str, model: str = "claude-3-haiku-20240307"):
|
|
151
|
+
if not anthropic:
|
|
152
|
+
raise ImportError("anthropic not installed. Run: pip install anthropic")
|
|
153
|
+
self.client = anthropic.Anthropic(api_key=api_key)
|
|
154
|
+
self.model = model
|
|
155
|
+
|
|
156
|
+
def generate_commit_message(self, diff: str, prompt: str, max_tokens: int) -> str:
|
|
157
|
+
message = self.client.messages.create(
|
|
158
|
+
model=self.model,
|
|
159
|
+
max_tokens=max_tokens,
|
|
160
|
+
system=prompt,
|
|
161
|
+
messages=[
|
|
162
|
+
{"role": "user", "content": diff}
|
|
163
|
+
]
|
|
164
|
+
)
|
|
165
|
+
return message.content[0].text.strip()
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class OllamaProvider(AIProvider):
|
|
169
|
+
"""Ollama local model provider"""
|
|
170
|
+
|
|
171
|
+
def __init__(self, base_url: str = "http://localhost:11434", model: str = "llama3"):
|
|
172
|
+
if not requests:
|
|
173
|
+
raise ImportError("requests not installed. Run: pip install requests")
|
|
174
|
+
self.base_url = base_url.rstrip('/')
|
|
175
|
+
self.model = model
|
|
176
|
+
|
|
177
|
+
def generate_commit_message(self, diff: str, prompt: str, max_tokens: int) -> str:
|
|
178
|
+
url = f"{self.base_url}/api/generate"
|
|
179
|
+
data = {
|
|
180
|
+
"model": self.model,
|
|
181
|
+
"prompt": f"{prompt}\n\n{diff}",
|
|
182
|
+
"stream": False,
|
|
183
|
+
"options": {
|
|
184
|
+
"temperature": 0.7,
|
|
185
|
+
"num_predict": max_tokens
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
response = requests.post(url, json=data, timeout=60)
|
|
190
|
+
response.raise_for_status()
|
|
191
|
+
result = response.json()["response"].strip()
|
|
192
|
+
|
|
193
|
+
# Return raw result - normalization is done centrally in gemini_ai.py
|
|
194
|
+
return result
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
class CustomProvider(AIProvider):
|
|
198
|
+
"""Custom OpenAI-compatible API provider"""
|
|
199
|
+
|
|
200
|
+
def __init__(self, api_url: str, api_key: Optional[str] = None, model: str = "default"):
|
|
201
|
+
if not openai:
|
|
202
|
+
raise ImportError("openai not installed. Run: pip install openai")
|
|
203
|
+
|
|
204
|
+
# Extract base URL (remove /chat/completions if present)
|
|
205
|
+
base_url = api_url.replace('/chat/completions', '').replace('/v1/chat/completions', '')
|
|
206
|
+
if not base_url.endswith('/v1'):
|
|
207
|
+
base_url = base_url.rstrip('/') + '/v1'
|
|
208
|
+
|
|
209
|
+
self.client = openai.OpenAI(
|
|
210
|
+
api_key=api_key or "dummy-key",
|
|
211
|
+
base_url=base_url
|
|
212
|
+
)
|
|
213
|
+
self.model = model
|
|
214
|
+
|
|
215
|
+
def generate_commit_message(self, diff: str, prompt: str, max_tokens: int) -> str:
|
|
216
|
+
response = self.client.chat.completions.create(
|
|
217
|
+
model=self.model,
|
|
218
|
+
messages=[
|
|
219
|
+
{"role": "system", "content": prompt},
|
|
220
|
+
{"role": "user", "content": diff}
|
|
221
|
+
],
|
|
222
|
+
max_tokens=max_tokens,
|
|
223
|
+
temperature=0.7
|
|
224
|
+
)
|
|
225
|
+
return response.choices[0].message.content.strip()
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def get_ai_provider(config) -> AIProvider:
|
|
229
|
+
"""Factory function to get the appropriate AI provider based on config"""
|
|
230
|
+
|
|
231
|
+
provider_name = config("AI_PROVIDER", default="gemini").lower()
|
|
232
|
+
|
|
233
|
+
if provider_name == "gemini":
|
|
234
|
+
api_key = config("GEMINI_API_KEY", default=None)
|
|
235
|
+
if not api_key:
|
|
236
|
+
raise ValueError("GEMINI_API_KEY not set")
|
|
237
|
+
# Support legacy MODEL_NAME for backward compatibility
|
|
238
|
+
model = config("GEMINI_MODEL", default=None) or config("MODEL_NAME", default="gemini-1.5-flash")
|
|
239
|
+
return GeminiProvider(api_key, model)
|
|
240
|
+
|
|
241
|
+
elif provider_name == "openai":
|
|
242
|
+
api_key = config("OPENAI_API_KEY", default=None)
|
|
243
|
+
if not api_key:
|
|
244
|
+
raise ValueError("OPENAI_API_KEY not set")
|
|
245
|
+
model = config("OPENAI_MODEL", default="gpt-4o-mini")
|
|
246
|
+
return OpenAIProvider(api_key, model)
|
|
247
|
+
|
|
248
|
+
elif provider_name == "groq":
|
|
249
|
+
api_key = config("GROQ_API_KEY", default=None)
|
|
250
|
+
if not api_key:
|
|
251
|
+
raise ValueError("GROQ_API_KEY not set")
|
|
252
|
+
model = config("GROQ_MODEL", default="llama-3.3-70b-versatile")
|
|
253
|
+
return GroqProvider(api_key, model)
|
|
254
|
+
|
|
255
|
+
elif provider_name == "anthropic":
|
|
256
|
+
api_key = config("ANTHROPIC_API_KEY", default=None)
|
|
257
|
+
if not api_key:
|
|
258
|
+
raise ValueError("ANTHROPIC_API_KEY not set")
|
|
259
|
+
model = config("ANTHROPIC_MODEL", default="claude-3-haiku-20240307")
|
|
260
|
+
return AnthropicProvider(api_key, model)
|
|
261
|
+
|
|
262
|
+
elif provider_name == "ollama":
|
|
263
|
+
base_url = config("OLLAMA_BASE_URL", default="http://localhost:11434")
|
|
264
|
+
model = config("OLLAMA_MODEL", default="llama3")
|
|
265
|
+
return OllamaProvider(base_url, model)
|
|
266
|
+
|
|
267
|
+
elif provider_name == "custom":
|
|
268
|
+
api_url = config("CUSTOM_API_URL", default=None)
|
|
269
|
+
if not api_url:
|
|
270
|
+
raise ValueError("CUSTOM_API_URL not set for custom provider")
|
|
271
|
+
api_key = config("CUSTOM_API_KEY", default=None)
|
|
272
|
+
model = config("CUSTOM_MODEL", default="default")
|
|
273
|
+
return CustomProvider(api_url, api_key, model)
|
|
274
|
+
|
|
275
|
+
else:
|
|
276
|
+
raise ValueError(
|
|
277
|
+
f"Unknown AI provider: {provider_name}. "
|
|
278
|
+
f"Supported: gemini, openai, groq, anthropic, ollama, custom"
|
|
279
|
+
)
|
|
280
|
+
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""Generate a git commit message using AI"""
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
import re
|
|
6
|
+
import sys
|
|
7
|
+
import warnings
|
|
8
|
+
|
|
9
|
+
# Suppress warnings from AI libraries
|
|
10
|
+
os.environ['GRPC_ENABLE_FORK_SUPPORT'] = '1'
|
|
11
|
+
os.environ['GRPC_VERBOSITY'] = 'ERROR'
|
|
12
|
+
os.environ['GLOG_minloglevel'] = '3'
|
|
13
|
+
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
|
|
14
|
+
|
|
15
|
+
warnings.filterwarnings('ignore', message='.*ALTS.*')
|
|
16
|
+
warnings.filterwarnings('ignore', category=UserWarning)
|
|
17
|
+
|
|
18
|
+
from devcommit.utils.logger import Logger, config
|
|
19
|
+
from .ai_providers import get_ai_provider
|
|
20
|
+
from .prompt import generate_prompt
|
|
21
|
+
|
|
22
|
+
logger_instance = Logger("__ai__")
|
|
23
|
+
logger = logger_instance.get_logger()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def normalize_commit_response(response: str) -> str:
|
|
27
|
+
"""Normalize AI response to ensure proper formatting of commit messages"""
|
|
28
|
+
result = response.strip()
|
|
29
|
+
|
|
30
|
+
# Remove markdown code fences that some models add
|
|
31
|
+
result = re.sub(r'^```[\w]*\n?', '', result) # Remove opening ```
|
|
32
|
+
result = re.sub(r'\n?```$', '', result) # Remove closing ```
|
|
33
|
+
result = result.strip()
|
|
34
|
+
|
|
35
|
+
# If no pipe separator but has newlines, convert newlines to pipes
|
|
36
|
+
if "|" not in result and "\n" in result:
|
|
37
|
+
messages = []
|
|
38
|
+
for line in result.split("\n"):
|
|
39
|
+
line = line.strip()
|
|
40
|
+
if line:
|
|
41
|
+
# Skip markdown code fences
|
|
42
|
+
if line.startswith('```') or line == '```':
|
|
43
|
+
continue
|
|
44
|
+
# Remove common numbering patterns: "1. ", "1) ", "- ", etc.
|
|
45
|
+
line = re.sub(r'^\d+[\.\)]\s*', '', line) # Remove "1. " or "1) "
|
|
46
|
+
line = re.sub(r'^[-*•]\s*', '', line) # Remove "- " or "* " or "• "
|
|
47
|
+
if line:
|
|
48
|
+
messages.append(line)
|
|
49
|
+
result = "|".join(messages)
|
|
50
|
+
|
|
51
|
+
return result
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def generateCommitMessage(diff: str) -> str:
|
|
55
|
+
"""Return a generated commit message using configured AI provider"""
|
|
56
|
+
# Suppress stderr to hide warnings during API calls
|
|
57
|
+
_stderr = sys.stderr
|
|
58
|
+
_devnull_out = open(os.devnull, 'w')
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
# Load Configuration Values
|
|
62
|
+
max_no = config("MAX_NO", default=1, cast=int)
|
|
63
|
+
locale = config("LOCALE", default="en-US")
|
|
64
|
+
commit_type = config("COMMIT_TYPE", default="general")
|
|
65
|
+
max_tokens = config("MAX_TOKENS", default=8192, cast=int)
|
|
66
|
+
|
|
67
|
+
# Generate prompt
|
|
68
|
+
prompt_text = generate_prompt(max_tokens, max_no, locale, commit_type)
|
|
69
|
+
|
|
70
|
+
# Get AI provider based on configuration
|
|
71
|
+
sys.stderr = _devnull_out
|
|
72
|
+
provider = get_ai_provider(config)
|
|
73
|
+
sys.stderr = _stderr
|
|
74
|
+
|
|
75
|
+
# Generate commit message
|
|
76
|
+
sys.stderr = _devnull_out
|
|
77
|
+
response = provider.generate_commit_message(diff, prompt_text, max_tokens)
|
|
78
|
+
sys.stderr = _stderr
|
|
79
|
+
|
|
80
|
+
# Normalize response to handle different formatting from various providers
|
|
81
|
+
normalized_response = normalize_commit_response(response)
|
|
82
|
+
|
|
83
|
+
return normalized_response
|
|
84
|
+
|
|
85
|
+
except Exception as e:
|
|
86
|
+
logger.error(f"Error generating commit message: {e}")
|
|
87
|
+
return f"Error generating commit message: {str(e)}"
|
|
88
|
+
finally:
|
|
89
|
+
# Restore stderr and close devnull
|
|
90
|
+
sys.stderr = _stderr
|
|
91
|
+
_devnull_out.close()
|
|
@@ -56,18 +56,30 @@ def specify_commit_format(commit_type: CommitType) -> str:
|
|
|
56
56
|
def generate_prompt(
|
|
57
57
|
max_length: int, max_no: int, locale: str, commit_type: CommitType
|
|
58
58
|
) -> str:
|
|
59
|
-
"""
|
|
59
|
+
"""Generate a detailed prompt for Gemini to create a strict Git commit message."""
|
|
60
60
|
|
|
61
61
|
prompt_parts = [
|
|
62
|
-
"
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
"
|
|
66
|
-
|
|
67
|
-
f"
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
62
|
+
"You are tasked with generating Git commit messages based solely on the following code diff.",
|
|
63
|
+
"Please adhere to the following specifications meticulously:",
|
|
64
|
+
# Language of the commit message
|
|
65
|
+
f"1. The language of the commit message should be: {locale}. This specifies the linguistic format of the message.",
|
|
66
|
+
# Number of commit messages
|
|
67
|
+
f"2. You MUST generate EXACTLY {max_no} commit message(s). No more, no less. This is mandatory.",
|
|
68
|
+
# Line and message formatting
|
|
69
|
+
"3. Each commit message must be succinct and limited to a single line. Do not exceed one line per message.",
|
|
70
|
+
# Separator specifications
|
|
71
|
+
f"4. IMPORTANT: If generating {max_no} > 1 messages, separate them with '|' OR put each on a new line. Example for 3 messages:",
|
|
72
|
+
" - Option 1: message1|message2|message3",
|
|
73
|
+
" - Option 2: message1\\nmessage2\\nmessage3",
|
|
74
|
+
# Length restrictions
|
|
75
|
+
f"5. Each individual commit message must not exceed {max_length} characters in length. This is a strict upper limit.",
|
|
76
|
+
# Exclusions from response
|
|
77
|
+
"6. Generate ONLY the commit message(s) as specified. Do NOT include: markdown code blocks (```), numbering, bullet points, explanations, or any formatting.",
|
|
78
|
+
# Commit Type instructions
|
|
79
|
+
f"7. Refer to the following commit type specification: {commit_types[commit_type]}. This will guide the nature of the commit messages you produce.",
|
|
80
|
+
# Formatting requirements
|
|
81
|
+
f"8. Follow the specific format required for the given commit type, which is defined as follows: {specify_commit_format(commit_type)}.",
|
|
71
82
|
]
|
|
72
83
|
|
|
84
|
+
# Return the fully constructed prompt as a single formatted string
|
|
73
85
|
return "\n".join(filter(bool, prompt_parts))
|
|
@@ -0,0 +1,396 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import subprocess
|
|
3
|
+
|
|
4
|
+
# Suppress Google gRPC/ALTS warnings before any imports
|
|
5
|
+
os.environ['GRPC_VERBOSITY'] = 'ERROR'
|
|
6
|
+
os.environ['GLOG_minloglevel'] = '3'
|
|
7
|
+
os.environ['GRPC_ENABLE_FORK_SUPPORT'] = '1'
|
|
8
|
+
|
|
9
|
+
from InquirerPy import get_style, inquirer
|
|
10
|
+
from rich.console import Console
|
|
11
|
+
|
|
12
|
+
from devcommit.app.gemini_ai import generateCommitMessage
|
|
13
|
+
from devcommit.utils.git import (KnownError, assert_git_repo,
|
|
14
|
+
get_detected_message, get_diff_for_files,
|
|
15
|
+
get_staged_diff, group_files_by_directory)
|
|
16
|
+
from devcommit.utils.logger import Logger, config
|
|
17
|
+
from devcommit.utils.parser import CommitFlag, parse_arguments
|
|
18
|
+
|
|
19
|
+
logger_instance = Logger("__devcommit__")
|
|
20
|
+
logger = logger_instance.get_logger()
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# Function to check if any commits exist
|
|
24
|
+
def has_commits() -> bool:
|
|
25
|
+
result = subprocess.run(
|
|
26
|
+
["git", "rev-parse", "HEAD"],
|
|
27
|
+
stdout=subprocess.PIPE,
|
|
28
|
+
stderr=subprocess.PIPE,
|
|
29
|
+
)
|
|
30
|
+
return result.returncode == 0
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
# Main function
|
|
34
|
+
def main(flags: CommitFlag = None):
|
|
35
|
+
if flags is None:
|
|
36
|
+
flags = parse_arguments()
|
|
37
|
+
|
|
38
|
+
try:
|
|
39
|
+
assert_git_repo()
|
|
40
|
+
console = Console()
|
|
41
|
+
|
|
42
|
+
# Print stylish header with gradient effect
|
|
43
|
+
console.print()
|
|
44
|
+
console.print("╭" + "─" * 60 + "╮", style="bold cyan")
|
|
45
|
+
console.print("│" + " " * 60 + "│", style="bold cyan")
|
|
46
|
+
console.print("│" + " 🚀 [bold white on cyan] DevCommit [/bold white on cyan] [bold white]AI-Powered Commit Generator[/bold white]".ljust(76) + "│", style="bold cyan")
|
|
47
|
+
console.print("│" + " " * 60 + "│", style="bold cyan")
|
|
48
|
+
console.print("╰" + "─" * 60 + "╯", style="bold cyan")
|
|
49
|
+
|
|
50
|
+
# Display provider and model info
|
|
51
|
+
provider = config("AI_PROVIDER", default="gemini").lower()
|
|
52
|
+
model = ""
|
|
53
|
+
|
|
54
|
+
if provider == "ollama":
|
|
55
|
+
model = config("OLLAMA_MODEL", default="llama3")
|
|
56
|
+
elif provider == "gemini":
|
|
57
|
+
model = config("GEMINI_MODEL", default=None) or config("MODEL_NAME", default="gemini-1.5-flash")
|
|
58
|
+
elif provider == "openai":
|
|
59
|
+
model = config("OPENAI_MODEL", default="gpt-4o-mini")
|
|
60
|
+
elif provider == "groq":
|
|
61
|
+
model = config("GROQ_MODEL", default="llama-3.3-70b-versatile")
|
|
62
|
+
elif provider == "anthropic":
|
|
63
|
+
model = config("ANTHROPIC_MODEL", default="claude-3-haiku-20240307")
|
|
64
|
+
elif provider == "custom":
|
|
65
|
+
model = config("CUSTOM_MODEL", default="default")
|
|
66
|
+
|
|
67
|
+
console.print(f"[dim]Provider:[/dim] [bold magenta]{provider}[/bold magenta] [dim]│[/dim] [dim]Model:[/dim] [bold magenta]{model}[/bold magenta]")
|
|
68
|
+
console.print()
|
|
69
|
+
|
|
70
|
+
if flags["stageAll"]:
|
|
71
|
+
stage_changes(console)
|
|
72
|
+
|
|
73
|
+
staged = detect_staged_files(console, flags["excludeFiles"])
|
|
74
|
+
|
|
75
|
+
# Determine commit strategy
|
|
76
|
+
# Priority: CLI flag > config (file or env) > interactive prompt
|
|
77
|
+
use_per_directory = flags.get("directory", False)
|
|
78
|
+
|
|
79
|
+
# If not explicitly set via CLI, check config (file or environment variable)
|
|
80
|
+
if not use_per_directory:
|
|
81
|
+
commit_mode = config("COMMIT_MODE", default="auto").lower()
|
|
82
|
+
if commit_mode == "directory":
|
|
83
|
+
use_per_directory = True
|
|
84
|
+
elif commit_mode == "global":
|
|
85
|
+
use_per_directory = False
|
|
86
|
+
# If "auto" or not set, fall through to interactive prompt
|
|
87
|
+
|
|
88
|
+
# If still not set, check if there are multiple directories and prompt
|
|
89
|
+
if not use_per_directory and config("COMMIT_MODE", default="auto").lower() == "auto":
|
|
90
|
+
grouped = group_files_by_directory(staged["files"])
|
|
91
|
+
if len(grouped) > 1:
|
|
92
|
+
use_per_directory = prompt_commit_strategy(console, grouped)
|
|
93
|
+
|
|
94
|
+
if use_per_directory:
|
|
95
|
+
process_per_directory_commits(console, staged, flags)
|
|
96
|
+
else:
|
|
97
|
+
process_global_commit(console, flags)
|
|
98
|
+
|
|
99
|
+
# Print stylish completion message
|
|
100
|
+
console.print()
|
|
101
|
+
console.print("╭" + "─" * 60 + "╮", style="bold green")
|
|
102
|
+
console.print("│" + " " * 60 + "│", style="bold green")
|
|
103
|
+
console.print("│" + " ✨ [bold white]All commits completed successfully![/bold white] ✨ ".ljust(68) + "│", style="bold green")
|
|
104
|
+
console.print("│" + " " * 60 + "│", style="bold green")
|
|
105
|
+
console.print("╰" + "─" * 60 + "╯", style="bold green")
|
|
106
|
+
console.print()
|
|
107
|
+
|
|
108
|
+
except KeyboardInterrupt:
|
|
109
|
+
console.print("\n\n[bold yellow]⚠️ Operation cancelled by user[/bold yellow]\n")
|
|
110
|
+
return
|
|
111
|
+
except KnownError as error:
|
|
112
|
+
logger.error(str(error))
|
|
113
|
+
console.print(f"\n[bold red]❌ Error:[/bold red] [red]{error}[/red]\n")
|
|
114
|
+
except subprocess.CalledProcessError as error:
|
|
115
|
+
logger.error(str(error))
|
|
116
|
+
console.print(f"\n[bold red]❌ Git command failed:[/bold red] [red]{error}[/red]\n")
|
|
117
|
+
except Exception as error:
|
|
118
|
+
logger.error(str(error))
|
|
119
|
+
console.print(f"\n[bold red]❌ Unexpected error:[/bold red] [red]{error}[/red]\n")
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def stage_changes(console):
|
|
123
|
+
with console.status(
|
|
124
|
+
"[cyan]🔄 Staging changes...[/cyan]",
|
|
125
|
+
spinner="dots",
|
|
126
|
+
spinner_style="cyan"
|
|
127
|
+
):
|
|
128
|
+
subprocess.run(["git", "add", "--update"], check=True)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def detect_staged_files(console, exclude_files):
|
|
132
|
+
with console.status(
|
|
133
|
+
"[cyan]🔍 Detecting staged files...[/cyan]",
|
|
134
|
+
spinner="dots",
|
|
135
|
+
spinner_style="cyan"
|
|
136
|
+
):
|
|
137
|
+
staged = get_staged_diff(exclude_files)
|
|
138
|
+
if not staged:
|
|
139
|
+
raise KnownError(
|
|
140
|
+
"No staged changes found. Stage your changes manually, or "
|
|
141
|
+
"automatically stage all changes with the `--stageAll` flag."
|
|
142
|
+
)
|
|
143
|
+
console.print(
|
|
144
|
+
f"\n[bold green]✅ {get_detected_message(staged['files'])}[/bold green]"
|
|
145
|
+
)
|
|
146
|
+
console.print("[dim]" + "─" * 60 + "[/dim]")
|
|
147
|
+
for file in staged["files"]:
|
|
148
|
+
console.print(f" [cyan]▸[/cyan] [white]{file}[/white]")
|
|
149
|
+
console.print("[dim]" + "─" * 60 + "[/dim]")
|
|
150
|
+
return staged
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def analyze_changes(console):
|
|
154
|
+
import sys
|
|
155
|
+
|
|
156
|
+
with console.status(
|
|
157
|
+
"[magenta]🤖 AI analyzing changes...[/magenta]",
|
|
158
|
+
spinner="dots",
|
|
159
|
+
spinner_style="magenta"
|
|
160
|
+
):
|
|
161
|
+
diff = subprocess.run(
|
|
162
|
+
["git", "diff", "--staged"],
|
|
163
|
+
stdout=subprocess.PIPE,
|
|
164
|
+
text=True,
|
|
165
|
+
).stdout
|
|
166
|
+
|
|
167
|
+
if not diff:
|
|
168
|
+
raise KnownError(
|
|
169
|
+
"No diff could be generated. Ensure you have changes staged."
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
# Suppress stderr during AI call to hide ALTS warnings
|
|
173
|
+
_stderr = sys.stderr
|
|
174
|
+
_devnull = open(os.devnull, 'w')
|
|
175
|
+
sys.stderr = _devnull
|
|
176
|
+
|
|
177
|
+
try:
|
|
178
|
+
commit_message = generateCommitMessage(diff)
|
|
179
|
+
finally:
|
|
180
|
+
sys.stderr = _stderr
|
|
181
|
+
_devnull.close()
|
|
182
|
+
|
|
183
|
+
if isinstance(commit_message, str):
|
|
184
|
+
commit_message = commit_message.split("|")
|
|
185
|
+
|
|
186
|
+
if not commit_message:
|
|
187
|
+
raise KnownError("No commit messages were generated. Try again.")
|
|
188
|
+
|
|
189
|
+
return commit_message
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def prompt_commit_message(console, commit_message):
|
|
193
|
+
tag = (
|
|
194
|
+
"Select commit message"
|
|
195
|
+
if len(commit_message) > 1
|
|
196
|
+
else "Confirm commit message"
|
|
197
|
+
)
|
|
198
|
+
style = get_style({
|
|
199
|
+
"question": "#00d7ff bold",
|
|
200
|
+
"questionmark": "#00d7ff bold",
|
|
201
|
+
"pointer": "#00d7ff bold",
|
|
202
|
+
"instruction": "#7f7f7f",
|
|
203
|
+
"answer": "#00d7ff bold",
|
|
204
|
+
"fuzzy_info": "" # Hide the counter
|
|
205
|
+
}, style_override=False)
|
|
206
|
+
|
|
207
|
+
console.print()
|
|
208
|
+
console.print("[bold cyan]📝 Generated Commit Messages:[/bold cyan]")
|
|
209
|
+
console.print()
|
|
210
|
+
|
|
211
|
+
# Add numbered options (plain text since InquirerPy doesn't support ANSI in choices)
|
|
212
|
+
numbered_choices = []
|
|
213
|
+
for idx, msg in enumerate(commit_message, 1):
|
|
214
|
+
if isinstance(msg, str):
|
|
215
|
+
numbered_choices.append({"name": f" {idx}. {msg}", "value": msg})
|
|
216
|
+
else:
|
|
217
|
+
numbered_choices.append(msg)
|
|
218
|
+
|
|
219
|
+
choices = [
|
|
220
|
+
*numbered_choices,
|
|
221
|
+
{"name": " ❌ [Cancel]", "value": "cancel"}
|
|
222
|
+
]
|
|
223
|
+
|
|
224
|
+
action = inquirer.fuzzy(
|
|
225
|
+
message=tag,
|
|
226
|
+
style=style,
|
|
227
|
+
choices=choices,
|
|
228
|
+
default=None,
|
|
229
|
+
instruction="(Type to filter or use arrows)",
|
|
230
|
+
qmark="❯",
|
|
231
|
+
info=False # Disable info/counter
|
|
232
|
+
).execute()
|
|
233
|
+
|
|
234
|
+
if action == "cancel":
|
|
235
|
+
console.print("\n[bold yellow]⚠️ Commit cancelled[/bold yellow]\n")
|
|
236
|
+
return None
|
|
237
|
+
return action
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def commit_changes(console, commit, raw_argv):
|
|
241
|
+
subprocess.run(["git", "commit", "-m", commit, *raw_argv])
|
|
242
|
+
console.print("\n[bold green]✅ Committed successfully![/bold green]")
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def prompt_commit_strategy(console, grouped):
|
|
246
|
+
"""Prompt user to choose between global or directory-based commits."""
|
|
247
|
+
console.print()
|
|
248
|
+
console.print("╭" + "─" * 60 + "╮", style="bold yellow")
|
|
249
|
+
console.print("│" + " 📂 [bold white]Multiple directories detected[/bold white]".ljust(70) + "│", style="bold yellow")
|
|
250
|
+
console.print("╰" + "─" * 60 + "╯", style="bold yellow")
|
|
251
|
+
console.print()
|
|
252
|
+
|
|
253
|
+
for directory, files in grouped.items():
|
|
254
|
+
console.print(f" [yellow]▸[/yellow] [bold white]{directory}[/bold white] [dim]({len(files)} file(s))[/dim]")
|
|
255
|
+
console.print()
|
|
256
|
+
|
|
257
|
+
style = get_style({
|
|
258
|
+
"question": "#00d7ff bold",
|
|
259
|
+
"questionmark": "#00d7ff bold",
|
|
260
|
+
"pointer": "#00d7ff bold",
|
|
261
|
+
"instruction": "#7f7f7f",
|
|
262
|
+
"answer": "#00d7ff bold"
|
|
263
|
+
}, style_override=False)
|
|
264
|
+
|
|
265
|
+
strategy = inquirer.select(
|
|
266
|
+
message="Commit strategy",
|
|
267
|
+
style=style,
|
|
268
|
+
choices=[
|
|
269
|
+
{"name": " 🌐 One commit for all changes", "value": False},
|
|
270
|
+
{"name": " 📁 Separate commits per directory", "value": True},
|
|
271
|
+
],
|
|
272
|
+
default=None,
|
|
273
|
+
instruction="(Use arrow keys)",
|
|
274
|
+
qmark="❯"
|
|
275
|
+
).execute()
|
|
276
|
+
|
|
277
|
+
return strategy
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def process_global_commit(console, flags):
|
|
281
|
+
"""Process a single global commit for all changes."""
|
|
282
|
+
commit_message = analyze_changes(console)
|
|
283
|
+
selected_commit = prompt_commit_message(console, commit_message)
|
|
284
|
+
if selected_commit:
|
|
285
|
+
commit_changes(console, selected_commit, flags["rawArgv"])
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def process_per_directory_commits(console, staged, flags):
|
|
289
|
+
"""Process separate commits for each directory."""
|
|
290
|
+
grouped = group_files_by_directory(staged["files"])
|
|
291
|
+
|
|
292
|
+
console.print()
|
|
293
|
+
console.print("╭" + "─" * 60 + "╮", style="bold magenta")
|
|
294
|
+
console.print("│" + f" 🔮 [bold white]Processing {len(grouped)} directories[/bold white]".ljust(71) + "│", style="bold magenta")
|
|
295
|
+
console.print("╰" + "─" * 60 + "╯", style="bold magenta")
|
|
296
|
+
console.print()
|
|
297
|
+
|
|
298
|
+
# Ask if user wants to commit all or select specific directories
|
|
299
|
+
style = get_style({
|
|
300
|
+
"question": "#00d7ff bold",
|
|
301
|
+
"questionmark": "#00d7ff bold",
|
|
302
|
+
"pointer": "#00d7ff bold",
|
|
303
|
+
"instruction": "#7f7f7f",
|
|
304
|
+
"answer": "#00d7ff bold",
|
|
305
|
+
"checkbox": "#00d7ff bold"
|
|
306
|
+
}, style_override=False)
|
|
307
|
+
|
|
308
|
+
if len(grouped) > 1:
|
|
309
|
+
commit_all = inquirer.confirm(
|
|
310
|
+
message="Commit all directories?",
|
|
311
|
+
style=style,
|
|
312
|
+
default=True,
|
|
313
|
+
instruction="(y/n)",
|
|
314
|
+
qmark="❯"
|
|
315
|
+
).execute()
|
|
316
|
+
|
|
317
|
+
if commit_all:
|
|
318
|
+
selected_directories = list(grouped.keys())
|
|
319
|
+
else:
|
|
320
|
+
# Let user select which directories to commit
|
|
321
|
+
directory_choices = [
|
|
322
|
+
{"name": f"{directory} ({len(files)} file(s))", "value": directory}
|
|
323
|
+
for directory, files in grouped.items()
|
|
324
|
+
]
|
|
325
|
+
|
|
326
|
+
selected_directories = inquirer.checkbox(
|
|
327
|
+
message="Select directories to commit",
|
|
328
|
+
style=style,
|
|
329
|
+
choices=directory_choices,
|
|
330
|
+
default=list(grouped.keys()),
|
|
331
|
+
instruction="(Space to select, Enter to confirm)",
|
|
332
|
+
qmark="❯"
|
|
333
|
+
).execute()
|
|
334
|
+
else:
|
|
335
|
+
selected_directories = list(grouped.keys())
|
|
336
|
+
|
|
337
|
+
if not selected_directories:
|
|
338
|
+
console.print("\n[bold yellow]⚠️ No directories selected[/bold yellow]\n")
|
|
339
|
+
return
|
|
340
|
+
|
|
341
|
+
# Process each selected directory
|
|
342
|
+
for idx, directory in enumerate(selected_directories, 1):
|
|
343
|
+
files = grouped[directory]
|
|
344
|
+
console.print()
|
|
345
|
+
console.print("┌" + "─" * 60 + "┐", style="bold cyan")
|
|
346
|
+
console.print("│" + f" 📂 [{idx}/{len(selected_directories)}] [bold white]{directory}[/bold white]".ljust(69) + "│", style="bold cyan")
|
|
347
|
+
console.print("└" + "─" * 60 + "┘", style="bold cyan")
|
|
348
|
+
console.print()
|
|
349
|
+
|
|
350
|
+
for file in files:
|
|
351
|
+
console.print(f" [cyan]▸[/cyan] [white]{file}[/white]")
|
|
352
|
+
|
|
353
|
+
# Get diff for this directory's files
|
|
354
|
+
with console.status(
|
|
355
|
+
f"[magenta]🤖 Analyzing {directory}...[/magenta]",
|
|
356
|
+
spinner="dots",
|
|
357
|
+
spinner_style="magenta"
|
|
358
|
+
):
|
|
359
|
+
diff = get_diff_for_files(files, flags["excludeFiles"])
|
|
360
|
+
|
|
361
|
+
if not diff:
|
|
362
|
+
console.print(f"\n[bold yellow]⚠️ No diff for {directory}, skipping[/bold yellow]\n")
|
|
363
|
+
continue
|
|
364
|
+
|
|
365
|
+
# Suppress stderr during AI call to hide ALTS warnings
|
|
366
|
+
import sys
|
|
367
|
+
_stderr = sys.stderr
|
|
368
|
+
_devnull = open(os.devnull, 'w')
|
|
369
|
+
sys.stderr = _devnull
|
|
370
|
+
|
|
371
|
+
try:
|
|
372
|
+
commit_message = generateCommitMessage(diff)
|
|
373
|
+
finally:
|
|
374
|
+
sys.stderr = _stderr
|
|
375
|
+
_devnull.close()
|
|
376
|
+
|
|
377
|
+
if isinstance(commit_message, str):
|
|
378
|
+
commit_message = commit_message.split("|")
|
|
379
|
+
|
|
380
|
+
if not commit_message:
|
|
381
|
+
console.print(f"\n[bold yellow]⚠️ No commit message generated for {directory}, skipping[/bold yellow]\n")
|
|
382
|
+
continue
|
|
383
|
+
|
|
384
|
+
# Prompt for commit message selection
|
|
385
|
+
selected_commit = prompt_commit_message(console, commit_message)
|
|
386
|
+
|
|
387
|
+
if selected_commit:
|
|
388
|
+
# Commit only the files in this directory
|
|
389
|
+
subprocess.run(["git", "commit", "-m", selected_commit, *flags["rawArgv"], "--"] + files)
|
|
390
|
+
console.print(f"\n[bold green]✅ Committed {directory}[/bold green]")
|
|
391
|
+
else:
|
|
392
|
+
console.print(f"\n[bold yellow]⊘ Skipped {directory}[/bold yellow]")
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
if __name__ == "__main__":
|
|
396
|
+
main()
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
2
|
"""Git utilities"""
|
|
3
3
|
|
|
4
|
+
import os
|
|
4
5
|
import subprocess
|
|
5
|
-
from
|
|
6
|
+
from collections import defaultdict
|
|
7
|
+
from typing import Dict, List, Optional
|
|
6
8
|
|
|
7
9
|
|
|
8
10
|
class KnownError(Exception):
|
|
@@ -36,12 +38,35 @@ def exclude_from_diff(path: str) -> str:
|
|
|
36
38
|
return f':(exclude){path}'
|
|
37
39
|
|
|
38
40
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
41
|
+
def get_default_excludes() -> List[str]:
|
|
42
|
+
"""
|
|
43
|
+
Get list of files to exclude from diff.
|
|
44
|
+
Priority: Config > Defaults
|
|
45
|
+
"""
|
|
46
|
+
try:
|
|
47
|
+
from devcommit.utils.logger import config
|
|
48
|
+
|
|
49
|
+
# Get from config (supports comma-separated list)
|
|
50
|
+
exclude_config = config("EXCLUDE_FILES", default="")
|
|
51
|
+
|
|
52
|
+
if exclude_config:
|
|
53
|
+
# Parse comma-separated values and strip whitespace
|
|
54
|
+
config_excludes = [f.strip() for f in exclude_config.split(",") if f.strip()]
|
|
55
|
+
return config_excludes
|
|
56
|
+
except:
|
|
57
|
+
pass
|
|
58
|
+
|
|
59
|
+
# Default exclusions
|
|
60
|
+
return [
|
|
61
|
+
'package-lock.json',
|
|
62
|
+
'pnpm-lock.yaml',
|
|
63
|
+
'yarn.lock',
|
|
64
|
+
'*.lock'
|
|
65
|
+
]
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
# Get default files to exclude (can be overridden via config)
|
|
69
|
+
files_to_exclude = get_default_excludes()
|
|
45
70
|
|
|
46
71
|
|
|
47
72
|
def get_staged_diff(
|
|
@@ -94,3 +119,52 @@ def get_detected_message(files: List[str]) -> str:
|
|
|
94
119
|
return (
|
|
95
120
|
f"Detected {len(files):,} staged file{'s' if len(files) > 1 else ''}"
|
|
96
121
|
)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def group_files_by_directory(files: List[str]) -> Dict[str, List[str]]:
|
|
125
|
+
"""
|
|
126
|
+
Groups files by their root directory (first-level directory).
|
|
127
|
+
Files in the repository root are grouped under 'root'.
|
|
128
|
+
"""
|
|
129
|
+
grouped = defaultdict(list)
|
|
130
|
+
|
|
131
|
+
for file_path in files:
|
|
132
|
+
# Get the first directory in the path
|
|
133
|
+
parts = file_path.split(os.sep)
|
|
134
|
+
if len(parts) > 1:
|
|
135
|
+
root_dir = parts[0]
|
|
136
|
+
else:
|
|
137
|
+
root_dir = 'root'
|
|
138
|
+
grouped[root_dir].append(file_path)
|
|
139
|
+
|
|
140
|
+
return dict(grouped)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def get_diff_for_files(files: List[str], exclude_files: Optional[List[str]] = None) -> str:
|
|
144
|
+
"""
|
|
145
|
+
Gets the diff for specific files.
|
|
146
|
+
"""
|
|
147
|
+
exclude_files = exclude_files or []
|
|
148
|
+
|
|
149
|
+
# Filter out excluded files from the list
|
|
150
|
+
all_excluded = files_to_exclude + exclude_files
|
|
151
|
+
filtered_files = [
|
|
152
|
+
f for f in files
|
|
153
|
+
if not any(f.endswith(excl.replace('*', '')) or excl.replace(':(exclude)', '') in f
|
|
154
|
+
for excl in all_excluded)
|
|
155
|
+
]
|
|
156
|
+
|
|
157
|
+
if not filtered_files:
|
|
158
|
+
return ""
|
|
159
|
+
|
|
160
|
+
try:
|
|
161
|
+
diff = subprocess.run(
|
|
162
|
+
['git', 'diff', '--cached', '--diff-algorithm=minimal', '--'] + filtered_files,
|
|
163
|
+
check=True,
|
|
164
|
+
stdout=subprocess.PIPE,
|
|
165
|
+
stderr=subprocess.PIPE,
|
|
166
|
+
text=True
|
|
167
|
+
)
|
|
168
|
+
return diff.stdout.strip()
|
|
169
|
+
except subprocess.CalledProcessError:
|
|
170
|
+
return ""
|
|
@@ -1,12 +1,23 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import os
|
|
3
3
|
|
|
4
|
-
from decouple import Config, RepositoryEnv
|
|
4
|
+
from decouple import Config, RepositoryEnv, AutoConfig
|
|
5
5
|
|
|
6
|
-
#
|
|
7
|
-
|
|
6
|
+
# Determine config file path
|
|
7
|
+
if "VIRTUAL_ENV" in os.environ:
|
|
8
|
+
config_path = os.path.join(os.environ["VIRTUAL_ENV"], "config", ".dcommit")
|
|
9
|
+
else:
|
|
10
|
+
config_path = os.path.expanduser("~/.dcommit")
|
|
8
11
|
|
|
9
|
-
config
|
|
12
|
+
# Create config with priority: .dcommit file > environment variables > defaults
|
|
13
|
+
if os.path.isfile(config_path):
|
|
14
|
+
# Use file-based config with environment variable fallback
|
|
15
|
+
config = Config(RepositoryEnv(config_path))
|
|
16
|
+
print("Configuration loaded from:", config_path)
|
|
17
|
+
else:
|
|
18
|
+
# Use environment variables only
|
|
19
|
+
config = AutoConfig()
|
|
20
|
+
print("Using environment variables and defaults (no .dcommit file found)")
|
|
10
21
|
|
|
11
22
|
|
|
12
23
|
class Logger:
|
|
@@ -15,7 +26,7 @@ class Logger:
|
|
|
15
26
|
def __init__(
|
|
16
27
|
self,
|
|
17
28
|
logger_name: str,
|
|
18
|
-
log_file: str = "
|
|
29
|
+
log_file: str = "devcommits.logs",
|
|
19
30
|
log_level: int = logging.DEBUG,
|
|
20
31
|
):
|
|
21
32
|
self.logger = logging.getLogger(logger_name)
|
|
@@ -8,6 +8,7 @@ class CommitFlag(TypedDict):
|
|
|
8
8
|
excludeFiles: List[str]
|
|
9
9
|
stageAll: bool
|
|
10
10
|
commitType: Optional[str]
|
|
11
|
+
directory: bool
|
|
11
12
|
rawArgv: List[str]
|
|
12
13
|
|
|
13
14
|
|
|
@@ -37,6 +38,10 @@ def parse_arguments() -> CommitFlag:
|
|
|
37
38
|
parser.add_argument(
|
|
38
39
|
"--commitType", "-t", type=str, default=None, help="Type of commit"
|
|
39
40
|
)
|
|
41
|
+
parser.add_argument(
|
|
42
|
+
"--directory", "-d", action="store_true",
|
|
43
|
+
help="Generate separate commits per root directory"
|
|
44
|
+
)
|
|
40
45
|
parser.add_argument(
|
|
41
46
|
"rawArgv", nargs="*", help="Additional arguments for git commit"
|
|
42
47
|
)
|
|
@@ -48,5 +53,6 @@ def parse_arguments() -> CommitFlag:
|
|
|
48
53
|
excludeFiles=args.excludeFiles,
|
|
49
54
|
stageAll=args.stageAll,
|
|
50
55
|
commitType=args.commitType,
|
|
56
|
+
directory=args.directory,
|
|
51
57
|
rawArgv=args.rawArgv,
|
|
52
58
|
)
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "devcommit"
|
|
3
|
+
version = "0.1.4.4"
|
|
4
|
+
description = "AI-powered git commit message generator"
|
|
5
|
+
authors = [
|
|
6
|
+
{name = "Hordunlarmy", email = "Hordunlarmy@gmail.com"}
|
|
7
|
+
]
|
|
8
|
+
requires-python = ">=3.10"
|
|
9
|
+
dependencies = [
|
|
10
|
+
"inquirerpy>=0.3.4",
|
|
11
|
+
"google-generativeai>=0.7.1",
|
|
12
|
+
"openai>=1.0.0",
|
|
13
|
+
"anthropic>=0.25.0",
|
|
14
|
+
"rich>=13.7.1",
|
|
15
|
+
"python-decouple>=3.8",
|
|
16
|
+
"requests>=2.31.0",
|
|
17
|
+
]
|
|
18
|
+
|
|
19
|
+
[project.optional-dependencies]
|
|
20
|
+
|
|
21
|
+
[project.scripts]
|
|
22
|
+
devcommit = "devcommit.main:main"
|
|
23
|
+
create-dcommit = "scripts.create_dcommit:create_dcommit"
|
|
24
|
+
|
|
25
|
+
[tool.poetry]
|
|
26
|
+
name = "devcommit"
|
|
27
|
+
version = "0.1.4.4"
|
|
28
|
+
description = "AI-powered git commit message generator"
|
|
29
|
+
authors = ["Hordunlarmy <Hordunlarmy@gmail.com>"]
|
|
30
|
+
|
|
31
|
+
[tool.poetry.dependencies]
|
|
32
|
+
python = ">=3.10,<3.13"
|
|
33
|
+
inquirerpy = "0.3.4"
|
|
34
|
+
google-generativeai = "0.8.5"
|
|
35
|
+
openai = "2.3.0"
|
|
36
|
+
anthropic = "0.69.0"
|
|
37
|
+
rich = "14.2.0"
|
|
38
|
+
python-decouple = "3.8"
|
|
39
|
+
requests = "2.32.5"
|
|
40
|
+
|
|
41
|
+
[tool.poetry.group.dev.dependencies]
|
|
42
|
+
|
|
43
|
+
[build-system]
|
|
44
|
+
requires = ["poetry-core>=1.0.0"]
|
|
45
|
+
build-backend = "poetry.core.masonry.api"
|
devcommit-0.1.2/PKG-INFO
DELETED
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: devcommit
|
|
3
|
-
Version: 0.1.2
|
|
4
|
-
Summary: A command-line AI tool for autocommits
|
|
5
|
-
Home-page: https://github.com/hordunlarmy/DevCommit
|
|
6
|
-
License: MIT
|
|
7
|
-
Author: HordunTech
|
|
8
|
-
Author-email: horduntech@gmail.com
|
|
9
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
10
|
-
Classifier: Programming Language :: Python :: 2
|
|
11
|
-
Classifier: Programming Language :: Python :: 2.7
|
|
12
|
-
Classifier: Programming Language :: Python :: 3
|
|
13
|
-
Classifier: Programming Language :: Python :: 3.4
|
|
14
|
-
Classifier: Programming Language :: Python :: 3.5
|
|
15
|
-
Classifier: Programming Language :: Python :: 3.6
|
|
16
|
-
Classifier: Programming Language :: Python :: 3.7
|
|
17
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
18
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
19
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
20
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
21
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
22
|
-
Classifier: Programming Language :: Python :: 3.13
|
|
23
|
-
Project-URL: Repository, https://github.com/hordunlarmy/DevCommit
|
|
24
|
-
Description-Content-Type: text/markdown
|
|
25
|
-
|
|
26
|
-
# DevCommit
|
|
27
|
-
|
|
28
|
-
A command-line AI tool for autocommits.
|
|
29
|
-
|
|
30
|
-
## Features
|
|
31
|
-
|
|
32
|
-
- Automatic commit generation using AI.
|
|
33
|
-
- Easy integration with your Git workflow.
|
|
34
|
-
- Customizable options for commit messages.
|
|
35
|
-
|
|
36
|
-

|
|
37
|
-
|
devcommit-0.1.2/README.md
DELETED
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
# DevCommit
|
|
2
|
-
|
|
3
|
-
A command-line AI tool for autocommits.
|
|
4
|
-
|
|
5
|
-
## Features
|
|
6
|
-
|
|
7
|
-
- Automatic commit generation using AI.
|
|
8
|
-
- Easy integration with your Git workflow.
|
|
9
|
-
- Customizable options for commit messages.
|
|
10
|
-
|
|
11
|
-

|
|
@@ -1,68 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
"""Generate a git commit message using Gemini AI"""
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
import google.generativeai as genai
|
|
6
|
-
|
|
7
|
-
from devcommit.utils.logger import Logger, config
|
|
8
|
-
|
|
9
|
-
from .prompt import generate_prompt
|
|
10
|
-
|
|
11
|
-
logger_instance = Logger("__gemini_ai__")
|
|
12
|
-
logger = logger_instance.get_logger()
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def generateCommitMessage(diff: str) -> str:
|
|
16
|
-
"""Return a generated commit message using Gemini AI"""
|
|
17
|
-
try:
|
|
18
|
-
# Configure API Key
|
|
19
|
-
api_key = config("GEMINI_API_KEY")
|
|
20
|
-
if not api_key:
|
|
21
|
-
raise ValueError("GEMINI_API_KEY not set.")
|
|
22
|
-
genai.configure(api_key=api_key)
|
|
23
|
-
|
|
24
|
-
# Load Configuration Values
|
|
25
|
-
max_no = config("MAX_NO", default=1)
|
|
26
|
-
locale = config("LOCALE", default="en-US")
|
|
27
|
-
commit_type = config("COMMIT_TYPE", default="general")
|
|
28
|
-
model_name = config("MODEL_NAME", default="gemini-1.5-flash")
|
|
29
|
-
if not model_name:
|
|
30
|
-
raise ValueError("MODEL_NAME not set.")
|
|
31
|
-
|
|
32
|
-
generation_config = {
|
|
33
|
-
"response_mime_type": "text/plain",
|
|
34
|
-
"max_output_tokens": 8192,
|
|
35
|
-
"top_k": 64,
|
|
36
|
-
"top_p": 0.95,
|
|
37
|
-
"temperature": 0.7,
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
# Create Model and Start Chat
|
|
41
|
-
model = genai.GenerativeModel(
|
|
42
|
-
generation_config=generation_config,
|
|
43
|
-
model_name=model_name,
|
|
44
|
-
)
|
|
45
|
-
|
|
46
|
-
prompt_text = generate_prompt(8192, max_no, locale, commit_type)
|
|
47
|
-
# logger.info(f"Prompt: {prompt_text}")
|
|
48
|
-
chat_session = model.start_chat(
|
|
49
|
-
history=[
|
|
50
|
-
{
|
|
51
|
-
"role": "user",
|
|
52
|
-
"parts": [prompt_text],
|
|
53
|
-
},
|
|
54
|
-
]
|
|
55
|
-
)
|
|
56
|
-
|
|
57
|
-
# Send the Diff as Message
|
|
58
|
-
# logger.info(f"Diff: {diff}")
|
|
59
|
-
response = chat_session.send_message(diff)
|
|
60
|
-
if response and hasattr(response, "text"):
|
|
61
|
-
return response.text.strip()
|
|
62
|
-
else:
|
|
63
|
-
logger.error("No valid response received from Gemini AI.")
|
|
64
|
-
return "No valid commit message generated."
|
|
65
|
-
|
|
66
|
-
except Exception as e:
|
|
67
|
-
logger.error(f"Error generating commit message: {e}")
|
|
68
|
-
return f"Error generating commit message: {str(e)}"
|
|
@@ -1,136 +0,0 @@
|
|
|
1
|
-
import subprocess
|
|
2
|
-
|
|
3
|
-
from InquirerPy import get_style, inquirer
|
|
4
|
-
from rich.console import Console
|
|
5
|
-
|
|
6
|
-
from devcommit.app.gemini_ai import generateCommitMessage
|
|
7
|
-
from devcommit.utils.git import (KnownError, assert_git_repo,
|
|
8
|
-
get_detected_message, get_staged_diff)
|
|
9
|
-
from devcommit.utils.logger import Logger
|
|
10
|
-
from devcommit.utils.parser import CommitFlag, parse_arguments
|
|
11
|
-
|
|
12
|
-
logger_instance = Logger("__devcommit__")
|
|
13
|
-
logger = logger_instance.get_logger()
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
# Function to check if any commits exist
|
|
17
|
-
def has_commits() -> bool:
|
|
18
|
-
result = subprocess.run(
|
|
19
|
-
["git", "rev-parse", "HEAD"],
|
|
20
|
-
stdout=subprocess.PIPE,
|
|
21
|
-
stderr=subprocess.PIPE,
|
|
22
|
-
)
|
|
23
|
-
return result.returncode == 0
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
# Main function
|
|
27
|
-
def main(flags: CommitFlag = None):
|
|
28
|
-
if flags is None:
|
|
29
|
-
flags = parse_arguments()
|
|
30
|
-
|
|
31
|
-
try:
|
|
32
|
-
assert_git_repo()
|
|
33
|
-
console = Console()
|
|
34
|
-
|
|
35
|
-
if flags["stageAll"]:
|
|
36
|
-
stage_changes(console)
|
|
37
|
-
|
|
38
|
-
detect_staged_files(console, flags["excludeFiles"])
|
|
39
|
-
commit_message = analyze_changes(console)
|
|
40
|
-
|
|
41
|
-
selected_commit = prompt_commit_message(console, commit_message)
|
|
42
|
-
if selected_commit:
|
|
43
|
-
commit_changes(console, selected_commit, flags["rawArgv"])
|
|
44
|
-
|
|
45
|
-
except KnownError as error:
|
|
46
|
-
logger.error(str(error))
|
|
47
|
-
console.print(f"[bold red]✖ {error}[/bold red]")
|
|
48
|
-
except subprocess.CalledProcessError as error:
|
|
49
|
-
logger.error(str(error))
|
|
50
|
-
console.print(f"[bold red]✖ Git command failed: {error}[/bold red]")
|
|
51
|
-
except Exception as error:
|
|
52
|
-
logger.error(str(error))
|
|
53
|
-
console.print(f"[bold red]✖ {error}[/bold red]")
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
def stage_changes(console):
|
|
57
|
-
with console.status(
|
|
58
|
-
"[bold green]Staging all changes...[/bold green]",
|
|
59
|
-
spinner="dots",
|
|
60
|
-
):
|
|
61
|
-
subprocess.run(["git", "add", "--update"], check=True)
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
def detect_staged_files(console, exclude_files):
|
|
65
|
-
with console.status(
|
|
66
|
-
"[bold green]Detecting staged files...[/bold green]",
|
|
67
|
-
spinner="dots",
|
|
68
|
-
):
|
|
69
|
-
staged = get_staged_diff(exclude_files)
|
|
70
|
-
if not staged:
|
|
71
|
-
raise KnownError(
|
|
72
|
-
"No staged changes found. Stage your changes manually, or "
|
|
73
|
-
"automatically stage all changes with the `--stageAll` flag."
|
|
74
|
-
)
|
|
75
|
-
console.print(
|
|
76
|
-
f"[bold green]{get_detected_message(staged['files'])}:"
|
|
77
|
-
f"[/bold green]"
|
|
78
|
-
)
|
|
79
|
-
for file in staged["files"]:
|
|
80
|
-
console.print(f" - {file}")
|
|
81
|
-
return staged
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
def analyze_changes(console):
|
|
85
|
-
with console.status(
|
|
86
|
-
"[bold green]The AI is analyzing your changes...[/bold green]",
|
|
87
|
-
spinner="dots",
|
|
88
|
-
):
|
|
89
|
-
diff = subprocess.run(
|
|
90
|
-
["git", "diff", "--staged"],
|
|
91
|
-
stdout=subprocess.PIPE,
|
|
92
|
-
text=True,
|
|
93
|
-
).stdout
|
|
94
|
-
|
|
95
|
-
if not diff:
|
|
96
|
-
raise KnownError(
|
|
97
|
-
"No diff could be generated. Ensure you have changes staged."
|
|
98
|
-
)
|
|
99
|
-
|
|
100
|
-
commit_message = generateCommitMessage(diff)
|
|
101
|
-
if isinstance(commit_message, str):
|
|
102
|
-
commit_message = commit_message.split("|")
|
|
103
|
-
|
|
104
|
-
if not commit_message:
|
|
105
|
-
raise KnownError("No commit messages were generated. Try again.")
|
|
106
|
-
|
|
107
|
-
return commit_message
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
def prompt_commit_message(console, commit_message):
|
|
111
|
-
tag = (
|
|
112
|
-
"Use This Commit Message? "
|
|
113
|
-
if len(commit_message) == 1
|
|
114
|
-
else "Select A Commit Message:"
|
|
115
|
-
)
|
|
116
|
-
style = get_style({"instruction": "#abb2bf"}, style_override=False)
|
|
117
|
-
action = inquirer.fuzzy(
|
|
118
|
-
message=tag,
|
|
119
|
-
style=style,
|
|
120
|
-
choices=[*commit_message, "cancel"],
|
|
121
|
-
default=None,
|
|
122
|
-
).execute()
|
|
123
|
-
|
|
124
|
-
if action == "cancel":
|
|
125
|
-
console.print("[bold red]Commit cancelled[/bold red]")
|
|
126
|
-
return None
|
|
127
|
-
return action
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
def commit_changes(console, commit, raw_argv):
|
|
131
|
-
subprocess.run(["git", "commit", "-m", commit, *raw_argv])
|
|
132
|
-
console.print("[bold green]✔ Successfully committed![/bold green]")
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
if __name__ == "__main__":
|
|
136
|
-
main()
|
devcommit-0.1.2/pyproject.toml
DELETED
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
[tool.poetry]
|
|
2
|
-
name = "devcommit"
|
|
3
|
-
version = "0.1.2"
|
|
4
|
-
description = "A command-line AI tool for autocommits"
|
|
5
|
-
authors = ["HordunTech <horduntech@gmail.com>"]
|
|
6
|
-
license = "MIT"
|
|
7
|
-
readme = "README.md"
|
|
8
|
-
homepage = "https://github.com/hordunlarmy/DevCommit"
|
|
9
|
-
repository = "https://github.com/hordunlarmy/DevCommit"
|
|
10
|
-
|
|
11
|
-
[project.dependencies]
|
|
12
|
-
python = "^3.7"
|
|
13
|
-
inquirerpy = "^0.3.4"
|
|
14
|
-
google-generativeai = "^0.7.1"
|
|
15
|
-
rich = "^13.7.1"
|
|
16
|
-
python-decouple = "^3.8"
|
|
17
|
-
|
|
18
|
-
[project.scripts]
|
|
19
|
-
devcommit = "devcommit.main:main"
|
|
20
|
-
create-dcommit = "scripts.create_dcommit:create_dcommit"
|
|
21
|
-
|
|
22
|
-
[build-system]
|
|
23
|
-
requires = ["poetry-core>=1.0.0"]
|
|
24
|
-
build-backend = "poetry.core.masonry.api"
|
|
25
|
-
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|