enhanced-git 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- enhanced_git-1.0.0.dist-info/METADATA +349 -0
- enhanced_git-1.0.0.dist-info/RECORD +18 -0
- enhanced_git-1.0.0.dist-info/WHEEL +4 -0
- enhanced_git-1.0.0.dist-info/entry_points.txt +2 -0
- enhanced_git-1.0.0.dist-info/licenses/LICENSE +21 -0
- gitai/__init__.py +3 -0
- gitai/changelog.py +251 -0
- gitai/cli.py +166 -0
- gitai/commit.py +338 -0
- gitai/config.py +120 -0
- gitai/constants.py +134 -0
- gitai/diff.py +167 -0
- gitai/hook.py +81 -0
- gitai/providers/__init__.py +1 -0
- gitai/providers/base.py +71 -0
- gitai/providers/ollama_provider.py +86 -0
- gitai/providers/openai_provider.py +78 -0
- gitai/util.py +137 -0
gitai/cli.py
ADDED
@@ -0,0 +1,166 @@
|
|
1
|
+
"""Command-line interface for GitAI."""
|
2
|
+
|
3
|
+
from pathlib import Path
|
4
|
+
from typing import Annotated, Optional
|
5
|
+
|
6
|
+
import typer
|
7
|
+
from rich.console import Console
|
8
|
+
|
9
|
+
from .changelog import ChangelogGenerator
|
10
|
+
from .commit import CommitGenerator
|
11
|
+
from .config import Config
|
12
|
+
from .diff import get_staged_diff
|
13
|
+
from .hook import install_commit_msg_hook, uninstall_commit_msg_hook
|
14
|
+
from .util import exit_with_error, print_info, print_success
|
15
|
+
|
16
|
+
app = typer.Typer(
|
17
|
+
name="git-ai",
|
18
|
+
help="Generate Conventional Commit messages and changelog sections using AI",
|
19
|
+
add_completion=False,
|
20
|
+
)
|
21
|
+
|
22
|
+
console = Console()
|
23
|
+
|
24
|
+
|
25
|
+
@app.command()
|
26
|
+
def commit(
|
27
|
+
hook: Annotated[
|
28
|
+
Optional[str],
|
29
|
+
typer.Option(
|
30
|
+
"--hook",
|
31
|
+
help="Path to COMMIT_EDITMSG file (used by Git hook)",
|
32
|
+
),
|
33
|
+
] = None,
|
34
|
+
dry_run: Annotated[
|
35
|
+
bool,
|
36
|
+
typer.Option("--dry-run", help="Preview commit message without committing"),
|
37
|
+
] = False,
|
38
|
+
no_body: Annotated[
|
39
|
+
bool,
|
40
|
+
typer.Option("--no-body", help="Generate subject line only"),
|
41
|
+
] = False,
|
42
|
+
style: Annotated[
|
43
|
+
Optional[str],
|
44
|
+
typer.Option(
|
45
|
+
"--style",
|
46
|
+
help="Commit style: conventional or plain",
|
47
|
+
),
|
48
|
+
] = None,
|
49
|
+
) -> None:
|
50
|
+
"""Generate a commit message from staged changes."""
|
51
|
+
try:
|
52
|
+
config = Config.load()
|
53
|
+
diff = get_staged_diff()
|
54
|
+
|
55
|
+
if diff.is_empty():
|
56
|
+
exit_with_error(
|
57
|
+
"No staged changes found. Did you forget to run 'git add'?\n"
|
58
|
+
"Use 'git status' to see unstaged changes."
|
59
|
+
)
|
60
|
+
|
61
|
+
generator = CommitGenerator(config)
|
62
|
+
message = generator.generate_commit_message(
|
63
|
+
diff=diff,
|
64
|
+
hook_path=Path(hook) if hook else None,
|
65
|
+
dry_run=dry_run,
|
66
|
+
no_body=no_body,
|
67
|
+
style=style,
|
68
|
+
)
|
69
|
+
|
70
|
+
if hook:
|
71
|
+
# Write to hook file
|
72
|
+
hook_path = Path(hook)
|
73
|
+
hook_path.write_text(message + "\n")
|
74
|
+
print_success(f"Generated commit message written to {hook_path}")
|
75
|
+
elif dry_run:
|
76
|
+
print_info("Generated commit message (dry run):")
|
77
|
+
console.print(message)
|
78
|
+
else:
|
79
|
+
# print to stdout for git commit -m "$(gitai commit)"
|
80
|
+
print(message)
|
81
|
+
|
82
|
+
except KeyboardInterrupt:
|
83
|
+
exit_with_error("Operation cancelled")
|
84
|
+
except Exception as e:
|
85
|
+
exit_with_error(f"Error generating commit message: {e}")
|
86
|
+
|
87
|
+
|
88
|
+
@app.command()
|
89
|
+
def hook(
|
90
|
+
action: Annotated[
|
91
|
+
str,
|
92
|
+
typer.Argument(help="Action to perform: install or uninstall"),
|
93
|
+
],
|
94
|
+
force: Annotated[
|
95
|
+
bool,
|
96
|
+
typer.Option("--force", help="Force overwrite existing hook"),
|
97
|
+
] = False,
|
98
|
+
) -> None:
|
99
|
+
"""Install or uninstall Git hooks."""
|
100
|
+
if action == "install":
|
101
|
+
install_commit_msg_hook(force=force)
|
102
|
+
elif action == "uninstall":
|
103
|
+
uninstall_commit_msg_hook()
|
104
|
+
else:
|
105
|
+
exit_with_error("Invalid action. Use 'install' or 'uninstall'")
|
106
|
+
|
107
|
+
|
108
|
+
@app.command()
|
109
|
+
def changelog(
|
110
|
+
since: Annotated[
|
111
|
+
str,
|
112
|
+
typer.Option("--since", help="Starting reference (tag or commit)"),
|
113
|
+
],
|
114
|
+
to: Annotated[
|
115
|
+
str,
|
116
|
+
typer.Option(
|
117
|
+
"--to",
|
118
|
+
help="Ending reference (defaults to HEAD)",
|
119
|
+
),
|
120
|
+
] = "HEAD",
|
121
|
+
version: Annotated[
|
122
|
+
Optional[str],
|
123
|
+
typer.Option(
|
124
|
+
"--version",
|
125
|
+
help="Version for changelog section header",
|
126
|
+
),
|
127
|
+
] = None,
|
128
|
+
output: Annotated[
|
129
|
+
Optional[str],
|
130
|
+
typer.Option(
|
131
|
+
"--output",
|
132
|
+
help="Output file path (defaults to CHANGELOG.md)",
|
133
|
+
),
|
134
|
+
] = None,
|
135
|
+
) -> None:
|
136
|
+
"""Generate changelog section from commit history."""
|
137
|
+
if not since:
|
138
|
+
exit_with_error("--since is required")
|
139
|
+
|
140
|
+
try:
|
141
|
+
config = Config.load()
|
142
|
+
generator = ChangelogGenerator(config)
|
143
|
+
|
144
|
+
result = generator.generate_changelog(
|
145
|
+
since_ref=since,
|
146
|
+
to_ref=to,
|
147
|
+
version=version,
|
148
|
+
output_path=Path(output) if output else None,
|
149
|
+
)
|
150
|
+
|
151
|
+
print_success(result)
|
152
|
+
|
153
|
+
except KeyboardInterrupt:
|
154
|
+
exit_with_error("Operation cancelled")
|
155
|
+
except Exception as e:
|
156
|
+
exit_with_error(f"Error generating changelog: {e}")
|
157
|
+
|
158
|
+
|
159
|
+
@app.callback()
|
160
|
+
def main() -> None:
|
161
|
+
"""GitAI - Generate Conventional Commit messages and changelog sections using AI."""
|
162
|
+
pass
|
163
|
+
|
164
|
+
|
165
|
+
if __name__ == "__main__":
|
166
|
+
app()
|
gitai/commit.py
ADDED
@@ -0,0 +1,338 @@
|
|
1
|
+
"""Commit message generation functionality."""
|
2
|
+
|
3
|
+
import re
|
4
|
+
from pathlib import Path
|
5
|
+
|
6
|
+
from rich.console import Console
|
7
|
+
|
8
|
+
from .config import Config
|
9
|
+
from .constants import (
|
10
|
+
BODY_WRAP_WIDTH,
|
11
|
+
COMMIT_SYSTEM_PROMPT,
|
12
|
+
COMMIT_USER_PROMPT_MERGE,
|
13
|
+
COMMIT_USER_PROMPT_SINGLE,
|
14
|
+
MAX_SUBJECT_LENGTH,
|
15
|
+
TYPE_HINTS_CONTENT,
|
16
|
+
TYPE_HINTS_PATH,
|
17
|
+
)
|
18
|
+
from .diff import StagedDiff
|
19
|
+
from .providers.base import create_provider
|
20
|
+
from .util import extract_scope_from_path, truncate_subject, wrap_text
|
21
|
+
|
22
|
+
console = Console()
|
23
|
+
|
24
|
+
|
25
|
+
class CommitGenerator:
|
26
|
+
"""Generates commit messages from staged diffs."""
|
27
|
+
|
28
|
+
def __init__(self, config: Config):
|
29
|
+
self.config = config
|
30
|
+
self.provider = None
|
31
|
+
|
32
|
+
if config.is_llm_available():
|
33
|
+
try:
|
34
|
+
if config.llm.provider == "openai":
|
35
|
+
self.provider = create_provider(
|
36
|
+
"openai",
|
37
|
+
api_key=config.llm.api_key,
|
38
|
+
base_url=config.llm.base_url,
|
39
|
+
model=config.llm.model,
|
40
|
+
timeout=config.llm.timeout_seconds,
|
41
|
+
)
|
42
|
+
elif config.llm.provider == "ollama":
|
43
|
+
self.provider = create_provider(
|
44
|
+
"ollama",
|
45
|
+
base_url=config.llm.base_url,
|
46
|
+
model=config.llm.model,
|
47
|
+
timeout=config.llm.timeout_seconds,
|
48
|
+
)
|
49
|
+
except Exception:
|
50
|
+
self.provider = None
|
51
|
+
|
52
|
+
def generate_commit_message(
|
53
|
+
self,
|
54
|
+
diff: StagedDiff,
|
55
|
+
hook_path: Path | None = None,
|
56
|
+
dry_run: bool = False,
|
57
|
+
no_body: bool = False,
|
58
|
+
style: str | None = None,
|
59
|
+
) -> str:
|
60
|
+
"""Generate a commit message from staged diff."""
|
61
|
+
if diff.is_empty():
|
62
|
+
return "No staged changes"
|
63
|
+
|
64
|
+
commit_style = style or self.config.commit.style
|
65
|
+
|
66
|
+
if commit_style == "plain":
|
67
|
+
return self._generate_plain_message(diff)
|
68
|
+
|
69
|
+
if self.provider:
|
70
|
+
try:
|
71
|
+
return self._generate_with_llm(diff, no_body)
|
72
|
+
except Exception:
|
73
|
+
pass
|
74
|
+
|
75
|
+
return self._generate_with_heuristics(diff, no_body)
|
76
|
+
|
77
|
+
def _generate_with_llm(self, diff: StagedDiff, no_body: bool) -> str:
|
78
|
+
"""Generate commit message using LLM."""
|
79
|
+
if not self.provider:
|
80
|
+
raise RuntimeError("No LLM provider available")
|
81
|
+
|
82
|
+
with console.status("[bold yellow]Analyzing staged changes...", spinner="dots"):
|
83
|
+
chunks = diff.chunk_by_files()
|
84
|
+
|
85
|
+
if len(chunks) == 1:
|
86
|
+
# single chunk so do direct generation
|
87
|
+
prompt = COMMIT_USER_PROMPT_SINGLE.format(diff=chunks[0].content)
|
88
|
+
if self.config.debug_settings.debug_mode:
|
89
|
+
print("Sending to LLM (single chunk):")
|
90
|
+
print(f"System: {COMMIT_SYSTEM_PROMPT}")
|
91
|
+
print(f"User: {prompt}")
|
92
|
+
print("-" * 50)
|
93
|
+
response = self.provider.generate(
|
94
|
+
system=COMMIT_SYSTEM_PROMPT,
|
95
|
+
user=prompt,
|
96
|
+
max_tokens=self.config.llm.max_tokens,
|
97
|
+
temperature=self.config.llm.temperature,
|
98
|
+
timeout=self.config.llm.timeout_seconds,
|
99
|
+
)
|
100
|
+
if self.config.debug_settings.debug_mode:
|
101
|
+
print(f"LLM Response: {response}")
|
102
|
+
print("-" * 50)
|
103
|
+
else:
|
104
|
+
# multiple chunks so summarize each first, then merge
|
105
|
+
if self.config.debug_settings.debug_mode:
|
106
|
+
print(f"Processing {len(chunks)} chunks:")
|
107
|
+
chunk_summaries = []
|
108
|
+
for i, chunk in enumerate(chunks):
|
109
|
+
if i >= 5: # Limit to first 5 chunks to avoid token limits
|
110
|
+
break
|
111
|
+
|
112
|
+
chunk_prompt = COMMIT_USER_PROMPT_SINGLE.format(diff=chunk.content)
|
113
|
+
if self.config.debug_settings.debug_mode:
|
114
|
+
print(f"Chunk {i + 1} prompt: {chunk_prompt[:200]}...")
|
115
|
+
summary = self.provider.generate(
|
116
|
+
system=COMMIT_SYSTEM_PROMPT,
|
117
|
+
user=chunk_prompt,
|
118
|
+
max_tokens=150, # Shorter for summaries
|
119
|
+
temperature=self.config.llm.temperature,
|
120
|
+
timeout=self.config.llm.timeout_seconds,
|
121
|
+
)
|
122
|
+
if self.config.debug_settings.debug_mode:
|
123
|
+
print(f"Chunk {i + 1} summary: {summary}")
|
124
|
+
chunk_summaries.append(f"- {summary}")
|
125
|
+
|
126
|
+
merge_prompt = COMMIT_USER_PROMPT_MERGE.format(
|
127
|
+
chunk_summaries="\n".join(chunk_summaries)
|
128
|
+
)
|
129
|
+
if self.config.debug_settings.debug_mode:
|
130
|
+
print("Sending merge prompt to LLM:")
|
131
|
+
print(f"System: {COMMIT_SYSTEM_PROMPT}")
|
132
|
+
print(f"User: {merge_prompt}")
|
133
|
+
print("-" * 50)
|
134
|
+
response = self.provider.generate(
|
135
|
+
system=COMMIT_SYSTEM_PROMPT,
|
136
|
+
user=merge_prompt,
|
137
|
+
max_tokens=self.config.llm.max_tokens,
|
138
|
+
temperature=self.config.llm.temperature,
|
139
|
+
timeout=self.config.llm.timeout_seconds,
|
140
|
+
)
|
141
|
+
if self.config.debug_settings.debug_mode:
|
142
|
+
print(f"LLM Response: {response}")
|
143
|
+
print("-" * 50)
|
144
|
+
|
145
|
+
return self._clean_and_format_response(response, no_body)
|
146
|
+
|
147
|
+
def _generate_with_heuristics(self, diff: StagedDiff, no_body: bool) -> str:
|
148
|
+
"""Generate commit message using heuristics."""
|
149
|
+
commit_type = self._infer_commit_type(diff)
|
150
|
+
scope = self._infer_scope(diff)
|
151
|
+
subject = self._generate_subject(diff, commit_type, scope)
|
152
|
+
|
153
|
+
if no_body or not self.config.commit.include_body:
|
154
|
+
return subject
|
155
|
+
|
156
|
+
body = self._generate_body(diff)
|
157
|
+
if body:
|
158
|
+
return f"{subject}\n\n{body}"
|
159
|
+
|
160
|
+
return subject
|
161
|
+
|
162
|
+
def _generate_plain_message(self, diff: StagedDiff) -> str:
|
163
|
+
"""Generate plain commit message."""
|
164
|
+
if len(diff.files) == 1:
|
165
|
+
action = self._infer_action_from_diff(diff.raw_diff)
|
166
|
+
filename = Path(diff.files[0]).name
|
167
|
+
return f"{action} {filename}"
|
168
|
+
else:
|
169
|
+
action = self._infer_action_from_diff(diff.raw_diff)
|
170
|
+
return f"{action} {len(diff.files)} files"
|
171
|
+
|
172
|
+
def _infer_commit_type(self, diff: StagedDiff) -> str:
|
173
|
+
"""Infer conventional commit type from diff."""
|
174
|
+
# check file paths first
|
175
|
+
for file_path in diff.files:
|
176
|
+
for pattern, commit_type in TYPE_HINTS_PATH.items():
|
177
|
+
if pattern in file_path:
|
178
|
+
return commit_type
|
179
|
+
|
180
|
+
# check content hints
|
181
|
+
content_hints = []
|
182
|
+
for line in diff.raw_diff.split("\n"):
|
183
|
+
line_lower = line.lower()
|
184
|
+
for hint, commit_type in TYPE_HINTS_CONTENT.items():
|
185
|
+
if hint in line_lower:
|
186
|
+
content_hints.append(commit_type)
|
187
|
+
|
188
|
+
# return most common hint, default to 'feat'
|
189
|
+
if content_hints:
|
190
|
+
return max(set(content_hints), key=content_hints.count)
|
191
|
+
|
192
|
+
# default based on diff stats
|
193
|
+
if diff.stats["new_files"] > 0:
|
194
|
+
return "feat"
|
195
|
+
elif diff.stats["deletions"] > diff.stats["additions"]:
|
196
|
+
return "fix"
|
197
|
+
else:
|
198
|
+
return "feat"
|
199
|
+
|
200
|
+
def _infer_scope(self, diff: StagedDiff) -> str | None:
|
201
|
+
"""Infer scope from diff files."""
|
202
|
+
if not self.config.commit.scope_detection:
|
203
|
+
return None
|
204
|
+
|
205
|
+
scopes = []
|
206
|
+
for file_path in diff.files:
|
207
|
+
scope = extract_scope_from_path(file_path)
|
208
|
+
if scope:
|
209
|
+
scopes.append(scope)
|
210
|
+
|
211
|
+
if not scopes:
|
212
|
+
return None
|
213
|
+
|
214
|
+
# return most common scope
|
215
|
+
return max(set(scopes), key=scopes.count)
|
216
|
+
|
217
|
+
def _generate_subject(
|
218
|
+
self, diff: StagedDiff, commit_type: str, scope: str | None
|
219
|
+
) -> str:
|
220
|
+
"""Generate commit subject line."""
|
221
|
+
# build type(scope) prefix
|
222
|
+
prefix = commit_type
|
223
|
+
if scope:
|
224
|
+
prefix = f"{commit_type}({scope})"
|
225
|
+
|
226
|
+
# generate description based on files and changes
|
227
|
+
if len(diff.files) == 1:
|
228
|
+
action = self._infer_action_from_diff(diff.raw_diff)
|
229
|
+
filename = Path(diff.files[0]).name
|
230
|
+
description = f"{action} {filename}"
|
231
|
+
else:
|
232
|
+
action = self._infer_action_from_diff(diff.raw_diff)
|
233
|
+
description = f"{action} {len(diff.files)} files"
|
234
|
+
|
235
|
+
subject = f"{prefix}: {description}"
|
236
|
+
return truncate_subject(subject, MAX_SUBJECT_LENGTH)
|
237
|
+
|
238
|
+
def _generate_body(self, diff: StagedDiff) -> str:
|
239
|
+
"""Generate commit body with bullet points."""
|
240
|
+
bullets = []
|
241
|
+
|
242
|
+
# add file changes
|
243
|
+
if len(diff.files) <= 5:
|
244
|
+
for file_path in diff.files:
|
245
|
+
action = self._infer_action_from_file(file_path, diff.raw_diff)
|
246
|
+
bullets.append(f"- {action} {file_path}")
|
247
|
+
else:
|
248
|
+
# group by directory for many files
|
249
|
+
dirs: dict[str, list[str]] = {}
|
250
|
+
for file_path in diff.files:
|
251
|
+
dir_name = str(Path(file_path).parent)
|
252
|
+
if dir_name not in dirs:
|
253
|
+
dirs[dir_name] = []
|
254
|
+
dirs[dir_name].append(file_path)
|
255
|
+
|
256
|
+
for dir_name, files in dirs.items():
|
257
|
+
if len(files) == 1:
|
258
|
+
action = self._infer_action_from_file(files[0], diff.raw_diff)
|
259
|
+
bullets.append(f"- {action} {files[0]}")
|
260
|
+
else:
|
261
|
+
bullets.append(f"- update {len(files)} files in {dir_name}")
|
262
|
+
|
263
|
+
# add stats if significant
|
264
|
+
if diff.stats["additions"] > 10 or diff.stats["deletions"] > 10:
|
265
|
+
bullets.append(
|
266
|
+
f"- {diff.stats['additions']} additions, {diff.stats['deletions']} deletions"
|
267
|
+
)
|
268
|
+
|
269
|
+
return "\n".join(bullets)
|
270
|
+
|
271
|
+
def _infer_action_from_diff(self, diff_content: str) -> str:
|
272
|
+
"""Infer action verb from diff content."""
|
273
|
+
actions = []
|
274
|
+
lines = diff_content.split("\n")
|
275
|
+
|
276
|
+
for line in lines:
|
277
|
+
line_lower = line.lower()
|
278
|
+
if line.startswith("+++ b/") and "(new file)" in line:
|
279
|
+
actions.append("add")
|
280
|
+
elif "rename" in line_lower:
|
281
|
+
actions.append("rename")
|
282
|
+
elif line.startswith("+") and not line.startswith("+++"):
|
283
|
+
actions.append("add")
|
284
|
+
elif line.startswith("-") and not line.startswith("---"):
|
285
|
+
actions.append("update")
|
286
|
+
|
287
|
+
if actions:
|
288
|
+
return max(set(actions), key=actions.count)
|
289
|
+
|
290
|
+
return "update"
|
291
|
+
|
292
|
+
def _infer_action_from_file(self, file_path: str, diff_content: str) -> str:
|
293
|
+
"""Infer action for a specific file."""
|
294
|
+
# check if file is new
|
295
|
+
if f"+++ b/{file_path}" in diff_content and "(new file)" in diff_content:
|
296
|
+
return "add"
|
297
|
+
|
298
|
+
# check for renames
|
299
|
+
if "rename" in diff_content and file_path in diff_content:
|
300
|
+
return "rename"
|
301
|
+
|
302
|
+
return "update"
|
303
|
+
|
304
|
+
def _clean_and_format_response(self, response: str, no_body: bool) -> str:
|
305
|
+
"""Clean and format LLM response."""
|
306
|
+
# remove code fences if present
|
307
|
+
response = re.sub(r"```.*?\n?", "", response)
|
308
|
+
response = response.strip()
|
309
|
+
|
310
|
+
# split into subject and body
|
311
|
+
lines = response.split("\n")
|
312
|
+
subject = lines[0].strip()
|
313
|
+
|
314
|
+
# ensure subject follows conventional format
|
315
|
+
if ":" not in subject:
|
316
|
+
# try to add type if missing
|
317
|
+
subject = f"feat: {subject}"
|
318
|
+
|
319
|
+
# truncate subject if too long
|
320
|
+
subject = truncate_subject(subject, MAX_SUBJECT_LENGTH)
|
321
|
+
|
322
|
+
if no_body or len(lines) == 1:
|
323
|
+
return subject
|
324
|
+
|
325
|
+
# format body
|
326
|
+
body_lines = []
|
327
|
+
for line in lines[1:]:
|
328
|
+
line = line.strip()
|
329
|
+
if line:
|
330
|
+
# wrap long lines
|
331
|
+
if len(line) > BODY_WRAP_WIDTH:
|
332
|
+
line = wrap_text(line, BODY_WRAP_WIDTH)
|
333
|
+
body_lines.append(line)
|
334
|
+
|
335
|
+
if body_lines:
|
336
|
+
return f"{subject}\n\n" + "\n".join(body_lines)
|
337
|
+
|
338
|
+
return subject
|
gitai/config.py
ADDED
@@ -0,0 +1,120 @@
|
|
1
|
+
"""Configuration management for GitAI."""
|
2
|
+
|
3
|
+
import os
|
4
|
+
from dataclasses import dataclass
|
5
|
+
from pathlib import Path
|
6
|
+
|
7
|
+
from .util import find_git_root, load_toml_config
|
8
|
+
|
9
|
+
|
10
|
+
@dataclass
|
11
|
+
class LLMConfig:
|
12
|
+
"""LLM provider configuration."""
|
13
|
+
|
14
|
+
provider: str = "openai"
|
15
|
+
model: str = "gpt-4o-mini"
|
16
|
+
max_tokens: int = 300
|
17
|
+
temperature: float = 0.0
|
18
|
+
timeout_seconds: int = 45
|
19
|
+
api_key: str | None = None
|
20
|
+
base_url: str | None = None
|
21
|
+
|
22
|
+
|
23
|
+
@dataclass
|
24
|
+
class CommitConfig:
|
25
|
+
"""Commit message generation configuration."""
|
26
|
+
|
27
|
+
style: str = "conventional"
|
28
|
+
scope_detection: bool = True
|
29
|
+
include_body: bool = True
|
30
|
+
include_footers: bool = True
|
31
|
+
wrap_width: int = 72
|
32
|
+
|
33
|
+
|
34
|
+
@dataclass
|
35
|
+
class ChangelogConfig:
|
36
|
+
"""Changelog generation configuration."""
|
37
|
+
|
38
|
+
grouping: str = "type"
|
39
|
+
heading_style: str = "keep-a-changelog"
|
40
|
+
|
41
|
+
|
42
|
+
@dataclass
|
43
|
+
class DebugConfig:
|
44
|
+
"""Debug configuration."""
|
45
|
+
|
46
|
+
debug_mode: bool = False
|
47
|
+
|
48
|
+
|
49
|
+
@dataclass
|
50
|
+
class Config:
|
51
|
+
"""Main configuration."""
|
52
|
+
|
53
|
+
llm: LLMConfig
|
54
|
+
commit: CommitConfig
|
55
|
+
changelog: ChangelogConfig
|
56
|
+
git_root: Path
|
57
|
+
debug_settings: DebugConfig
|
58
|
+
|
59
|
+
@classmethod
|
60
|
+
def load(cls, git_root: Path | None = None) -> "Config":
|
61
|
+
"""Load configuration from file and environment."""
|
62
|
+
if git_root is None:
|
63
|
+
git_root = find_git_root()
|
64
|
+
|
65
|
+
config_path = git_root / ".gitai.toml"
|
66
|
+
config_data = load_toml_config(config_path)
|
67
|
+
|
68
|
+
llm_data = config_data.get("llm", {})
|
69
|
+
llm_config = LLMConfig(
|
70
|
+
provider=llm_data.get("provider", "openai"),
|
71
|
+
model=llm_data.get("model", "gpt-4o-mini"),
|
72
|
+
max_tokens=llm_data.get("max_tokens", 300),
|
73
|
+
temperature=llm_data.get("temperature", 0.0),
|
74
|
+
timeout_seconds=llm_data.get("timeout_seconds", 45),
|
75
|
+
api_key=(
|
76
|
+
os.getenv("OPENAI_API_KEY")
|
77
|
+
if llm_data.get("provider") == "openai"
|
78
|
+
else None
|
79
|
+
),
|
80
|
+
base_url=(
|
81
|
+
os.getenv("OLLAMA_BASE_URL", "http://localhost:11434")
|
82
|
+
if llm_data.get("provider") == "ollama"
|
83
|
+
else None
|
84
|
+
),
|
85
|
+
)
|
86
|
+
|
87
|
+
commit_data = config_data.get("commit", {})
|
88
|
+
commit_config = CommitConfig(
|
89
|
+
style=commit_data.get("style", "conventional"),
|
90
|
+
scope_detection=commit_data.get("scope_detection", True),
|
91
|
+
include_body=commit_data.get("include_body", True),
|
92
|
+
include_footers=commit_data.get("include_footers", True),
|
93
|
+
wrap_width=commit_data.get("wrap_width", 72),
|
94
|
+
)
|
95
|
+
|
96
|
+
changelog_data = config_data.get("changelog", {})
|
97
|
+
changelog_config = ChangelogConfig(
|
98
|
+
grouping=changelog_data.get("grouping", "type"),
|
99
|
+
heading_style=changelog_data.get("heading_style", "keep-a-changelog"),
|
100
|
+
)
|
101
|
+
|
102
|
+
debug_settings = config_data.get("debug", {})
|
103
|
+
debug_config = DebugConfig(debug_mode=debug_settings.get("debug_mode", False))
|
104
|
+
|
105
|
+
return cls(
|
106
|
+
llm=llm_config,
|
107
|
+
commit=commit_config,
|
108
|
+
changelog=changelog_config,
|
109
|
+
debug_settings=debug_config,
|
110
|
+
git_root=git_root,
|
111
|
+
)
|
112
|
+
|
113
|
+
def is_llm_available(self) -> bool:
|
114
|
+
"""Check if LLM provider is available."""
|
115
|
+
if self.llm.provider == "openai":
|
116
|
+
return self.llm.api_key is not None
|
117
|
+
elif self.llm.provider == "ollama":
|
118
|
+
# for Ollama, we assume it's available if base_url is set
|
119
|
+
return self.llm.base_url is not None
|
120
|
+
return False
|