cognify-code 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ai_code_assistant/__init__.py +14 -0
- ai_code_assistant/agent/__init__.py +63 -0
- ai_code_assistant/agent/code_agent.py +461 -0
- ai_code_assistant/agent/code_generator.py +388 -0
- ai_code_assistant/agent/code_reviewer.py +365 -0
- ai_code_assistant/agent/diff_engine.py +308 -0
- ai_code_assistant/agent/file_manager.py +300 -0
- ai_code_assistant/agent/intent_classifier.py +284 -0
- ai_code_assistant/chat/__init__.py +11 -0
- ai_code_assistant/chat/agent_session.py +156 -0
- ai_code_assistant/chat/session.py +165 -0
- ai_code_assistant/cli.py +1571 -0
- ai_code_assistant/config.py +149 -0
- ai_code_assistant/editor/__init__.py +8 -0
- ai_code_assistant/editor/diff_handler.py +270 -0
- ai_code_assistant/editor/file_editor.py +350 -0
- ai_code_assistant/editor/prompts.py +146 -0
- ai_code_assistant/generator/__init__.py +7 -0
- ai_code_assistant/generator/code_gen.py +265 -0
- ai_code_assistant/generator/prompts.py +114 -0
- ai_code_assistant/git/__init__.py +6 -0
- ai_code_assistant/git/commit_generator.py +130 -0
- ai_code_assistant/git/manager.py +203 -0
- ai_code_assistant/llm.py +111 -0
- ai_code_assistant/providers/__init__.py +23 -0
- ai_code_assistant/providers/base.py +124 -0
- ai_code_assistant/providers/cerebras.py +97 -0
- ai_code_assistant/providers/factory.py +148 -0
- ai_code_assistant/providers/google.py +103 -0
- ai_code_assistant/providers/groq.py +111 -0
- ai_code_assistant/providers/ollama.py +86 -0
- ai_code_assistant/providers/openai.py +114 -0
- ai_code_assistant/providers/openrouter.py +130 -0
- ai_code_assistant/py.typed +0 -0
- ai_code_assistant/refactor/__init__.py +20 -0
- ai_code_assistant/refactor/analyzer.py +189 -0
- ai_code_assistant/refactor/change_plan.py +172 -0
- ai_code_assistant/refactor/multi_file_editor.py +346 -0
- ai_code_assistant/refactor/prompts.py +175 -0
- ai_code_assistant/retrieval/__init__.py +19 -0
- ai_code_assistant/retrieval/chunker.py +215 -0
- ai_code_assistant/retrieval/indexer.py +236 -0
- ai_code_assistant/retrieval/search.py +239 -0
- ai_code_assistant/reviewer/__init__.py +7 -0
- ai_code_assistant/reviewer/analyzer.py +278 -0
- ai_code_assistant/reviewer/prompts.py +113 -0
- ai_code_assistant/utils/__init__.py +18 -0
- ai_code_assistant/utils/file_handler.py +155 -0
- ai_code_assistant/utils/formatters.py +259 -0
- cognify_code-0.2.0.dist-info/METADATA +383 -0
- cognify_code-0.2.0.dist-info/RECORD +55 -0
- cognify_code-0.2.0.dist-info/WHEEL +5 -0
- cognify_code-0.2.0.dist-info/entry_points.txt +3 -0
- cognify_code-0.2.0.dist-info/licenses/LICENSE +22 -0
- cognify_code-0.2.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
"""Code generator for creating code from specifications."""
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import List, Literal, Optional
|
|
7
|
+
|
|
8
|
+
from ai_code_assistant.config import Config
|
|
9
|
+
from ai_code_assistant.llm import LLMManager
|
|
10
|
+
from ai_code_assistant.generator.prompts import GENERATION_PROMPTS
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
GenerationMode = Literal["function", "class", "script", "test", "generic"]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class GenerationResult:
|
|
18
|
+
"""Result of code generation."""
|
|
19
|
+
code: str
|
|
20
|
+
language: str
|
|
21
|
+
mode: GenerationMode
|
|
22
|
+
description: str
|
|
23
|
+
raw_response: str = ""
|
|
24
|
+
error: Optional[str] = None
|
|
25
|
+
|
|
26
|
+
@property
|
|
27
|
+
def success(self) -> bool:
|
|
28
|
+
return self.error is None and bool(self.code.strip())
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class CodeGenerator:
|
|
32
|
+
"""Generates code from natural language descriptions."""
|
|
33
|
+
|
|
34
|
+
def __init__(self, config: Config, llm_manager: LLMManager):
|
|
35
|
+
self.config = config
|
|
36
|
+
self.llm = llm_manager
|
|
37
|
+
|
|
38
|
+
def generate_function(
|
|
39
|
+
self,
|
|
40
|
+
description: str,
|
|
41
|
+
name: str,
|
|
42
|
+
language: str = "python",
|
|
43
|
+
parameters: str = "",
|
|
44
|
+
return_type: str = "None",
|
|
45
|
+
) -> GenerationResult:
|
|
46
|
+
"""Generate a function from specification."""
|
|
47
|
+
prompt = GENERATION_PROMPTS["function"]
|
|
48
|
+
|
|
49
|
+
try:
|
|
50
|
+
response = self.llm.invoke_with_template(
|
|
51
|
+
prompt,
|
|
52
|
+
language=language,
|
|
53
|
+
description=description,
|
|
54
|
+
name=name,
|
|
55
|
+
parameters=parameters or "None specified",
|
|
56
|
+
return_type=return_type,
|
|
57
|
+
include_type_hints=self.config.generation.include_type_hints,
|
|
58
|
+
include_docstrings=self.config.generation.include_docstrings,
|
|
59
|
+
)
|
|
60
|
+
code = self._extract_code(response, language)
|
|
61
|
+
return GenerationResult(
|
|
62
|
+
code=code,
|
|
63
|
+
language=language,
|
|
64
|
+
mode="function",
|
|
65
|
+
description=description,
|
|
66
|
+
raw_response=response,
|
|
67
|
+
)
|
|
68
|
+
except Exception as e:
|
|
69
|
+
return GenerationResult(
|
|
70
|
+
code="",
|
|
71
|
+
language=language,
|
|
72
|
+
mode="function",
|
|
73
|
+
description=description,
|
|
74
|
+
error=str(e),
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
def generate_class(
|
|
78
|
+
self,
|
|
79
|
+
description: str,
|
|
80
|
+
name: str,
|
|
81
|
+
language: str = "python",
|
|
82
|
+
attributes: str = "",
|
|
83
|
+
methods: str = "",
|
|
84
|
+
) -> GenerationResult:
|
|
85
|
+
"""Generate a class from specification."""
|
|
86
|
+
prompt = GENERATION_PROMPTS["class"]
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
response = self.llm.invoke_with_template(
|
|
90
|
+
prompt,
|
|
91
|
+
language=language,
|
|
92
|
+
description=description,
|
|
93
|
+
name=name,
|
|
94
|
+
attributes=attributes or "None specified",
|
|
95
|
+
methods=methods or "None specified",
|
|
96
|
+
include_type_hints=self.config.generation.include_type_hints,
|
|
97
|
+
include_docstrings=self.config.generation.include_docstrings,
|
|
98
|
+
)
|
|
99
|
+
code = self._extract_code(response, language)
|
|
100
|
+
return GenerationResult(
|
|
101
|
+
code=code,
|
|
102
|
+
language=language,
|
|
103
|
+
mode="class",
|
|
104
|
+
description=description,
|
|
105
|
+
raw_response=response,
|
|
106
|
+
)
|
|
107
|
+
except Exception as e:
|
|
108
|
+
return GenerationResult(
|
|
109
|
+
code="",
|
|
110
|
+
language=language,
|
|
111
|
+
mode="class",
|
|
112
|
+
description=description,
|
|
113
|
+
error=str(e),
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
def generate_script(
|
|
117
|
+
self,
|
|
118
|
+
description: str,
|
|
119
|
+
requirements: List[str],
|
|
120
|
+
language: str = "python",
|
|
121
|
+
) -> GenerationResult:
|
|
122
|
+
"""Generate a complete script or module."""
|
|
123
|
+
prompt = GENERATION_PROMPTS["script"]
|
|
124
|
+
|
|
125
|
+
try:
|
|
126
|
+
response = self.llm.invoke_with_template(
|
|
127
|
+
prompt,
|
|
128
|
+
language=language,
|
|
129
|
+
description=description,
|
|
130
|
+
requirements="\n".join(f"- {r}" for r in requirements),
|
|
131
|
+
include_type_hints=self.config.generation.include_type_hints,
|
|
132
|
+
include_docstrings=self.config.generation.include_docstrings,
|
|
133
|
+
)
|
|
134
|
+
code = self._extract_code(response, language)
|
|
135
|
+
return GenerationResult(
|
|
136
|
+
code=code,
|
|
137
|
+
language=language,
|
|
138
|
+
mode="script",
|
|
139
|
+
description=description,
|
|
140
|
+
raw_response=response,
|
|
141
|
+
)
|
|
142
|
+
except Exception as e:
|
|
143
|
+
return GenerationResult(
|
|
144
|
+
code="",
|
|
145
|
+
language=language,
|
|
146
|
+
mode="script",
|
|
147
|
+
description=description,
|
|
148
|
+
error=str(e),
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
def generate_tests(
|
|
152
|
+
self,
|
|
153
|
+
source_code: str,
|
|
154
|
+
language: str = "python",
|
|
155
|
+
test_framework: str = "pytest",
|
|
156
|
+
) -> GenerationResult:
|
|
157
|
+
"""Generate tests for existing code."""
|
|
158
|
+
prompt = GENERATION_PROMPTS["test"]
|
|
159
|
+
|
|
160
|
+
try:
|
|
161
|
+
response = self.llm.invoke_with_template(
|
|
162
|
+
prompt,
|
|
163
|
+
language=language,
|
|
164
|
+
source_code=source_code,
|
|
165
|
+
test_framework=test_framework,
|
|
166
|
+
)
|
|
167
|
+
code = self._extract_code(response, language)
|
|
168
|
+
return GenerationResult(
|
|
169
|
+
code=code,
|
|
170
|
+
language=language,
|
|
171
|
+
mode="test",
|
|
172
|
+
description=f"Tests for provided {language} code",
|
|
173
|
+
raw_response=response,
|
|
174
|
+
)
|
|
175
|
+
except Exception as e:
|
|
176
|
+
return GenerationResult(
|
|
177
|
+
code="",
|
|
178
|
+
language=language,
|
|
179
|
+
mode="test",
|
|
180
|
+
description="Test generation failed",
|
|
181
|
+
error=str(e),
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
def generate(
|
|
185
|
+
self,
|
|
186
|
+
description: str,
|
|
187
|
+
language: str = "python",
|
|
188
|
+
) -> GenerationResult:
|
|
189
|
+
"""Generate code from a generic description."""
|
|
190
|
+
prompt = GENERATION_PROMPTS["generic"]
|
|
191
|
+
|
|
192
|
+
try:
|
|
193
|
+
response = self.llm.invoke_with_template(
|
|
194
|
+
prompt,
|
|
195
|
+
language=language,
|
|
196
|
+
description=description,
|
|
197
|
+
include_type_hints=self.config.generation.include_type_hints,
|
|
198
|
+
include_docstrings=self.config.generation.include_docstrings,
|
|
199
|
+
)
|
|
200
|
+
code = self._extract_code(response, language)
|
|
201
|
+
return GenerationResult(
|
|
202
|
+
code=code,
|
|
203
|
+
language=language,
|
|
204
|
+
mode="generic",
|
|
205
|
+
description=description,
|
|
206
|
+
raw_response=response,
|
|
207
|
+
)
|
|
208
|
+
except Exception as e:
|
|
209
|
+
return GenerationResult(
|
|
210
|
+
code="",
|
|
211
|
+
language=language,
|
|
212
|
+
mode="generic",
|
|
213
|
+
description=description,
|
|
214
|
+
error=str(e),
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
def _extract_code(self, response: str, language: str) -> str:
|
|
218
|
+
"""Extract code block from LLM response."""
|
|
219
|
+
# Try to find language-specific code block
|
|
220
|
+
pattern = rf"```{language}\s*\n(.*?)```"
|
|
221
|
+
match = re.search(pattern, response, re.DOTALL | re.IGNORECASE)
|
|
222
|
+
if match:
|
|
223
|
+
return match.group(1).strip()
|
|
224
|
+
|
|
225
|
+
# Try generic code block
|
|
226
|
+
pattern = r"```\s*\n(.*?)```"
|
|
227
|
+
match = re.search(pattern, response, re.DOTALL)
|
|
228
|
+
if match:
|
|
229
|
+
return match.group(1).strip()
|
|
230
|
+
|
|
231
|
+
# Try any code block
|
|
232
|
+
pattern = r"```\w*\s*\n(.*?)```"
|
|
233
|
+
match = re.search(pattern, response, re.DOTALL)
|
|
234
|
+
if match:
|
|
235
|
+
return match.group(1).strip()
|
|
236
|
+
|
|
237
|
+
# Look for code that starts with common patterns
|
|
238
|
+
lines = response.strip().split('\n')
|
|
239
|
+
code_lines = []
|
|
240
|
+
in_code = False
|
|
241
|
+
|
|
242
|
+
for line in lines:
|
|
243
|
+
# Detect start of code
|
|
244
|
+
if not in_code:
|
|
245
|
+
if line.strip().startswith(('def ', 'class ', 'import ', 'from ', '#!', 'function ', 'const ', 'let ', 'var ')):
|
|
246
|
+
in_code = True
|
|
247
|
+
|
|
248
|
+
if in_code:
|
|
249
|
+
code_lines.append(line)
|
|
250
|
+
|
|
251
|
+
if code_lines:
|
|
252
|
+
return '\n'.join(code_lines).strip()
|
|
253
|
+
|
|
254
|
+
# Return raw response if no code block found
|
|
255
|
+
return response.strip()
|
|
256
|
+
|
|
257
|
+
def save_to_file(self, result: GenerationResult, output_path: Path) -> bool:
|
|
258
|
+
"""Save generated code to a file."""
|
|
259
|
+
try:
|
|
260
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
261
|
+
output_path.write_text(result.code)
|
|
262
|
+
return True
|
|
263
|
+
except Exception:
|
|
264
|
+
return False
|
|
265
|
+
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
"""Prompt templates for code generation."""
|
|
2
|
+
|
|
3
|
+
from langchain_core.prompts import ChatPromptTemplate
|
|
4
|
+
|
|
5
|
+
# System prompt for code generation
|
|
6
|
+
GENERATION_SYSTEM_PROMPT = """You are an expert software developer who writes clean, efficient, and well-documented code.
|
|
7
|
+
|
|
8
|
+
When generating code:
|
|
9
|
+
1. Follow best practices and design patterns for the language
|
|
10
|
+
2. Include comprehensive docstrings and comments
|
|
11
|
+
3. Add type hints where applicable
|
|
12
|
+
4. Handle edge cases and errors appropriately
|
|
13
|
+
5. Write code that is readable and maintainable
|
|
14
|
+
|
|
15
|
+
Output ONLY the code, wrapped in appropriate markdown code blocks.
|
|
16
|
+
Do not include explanations unless specifically requested."""
|
|
17
|
+
|
|
18
|
+
# Function generation prompt
|
|
19
|
+
FUNCTION_PROMPT = ChatPromptTemplate.from_messages([
|
|
20
|
+
("system", GENERATION_SYSTEM_PROMPT),
|
|
21
|
+
("human", """Generate a {language} function with the following specification:
|
|
22
|
+
|
|
23
|
+
**Description:** {description}
|
|
24
|
+
**Function name:** {name}
|
|
25
|
+
**Parameters:** {parameters}
|
|
26
|
+
**Return type:** {return_type}
|
|
27
|
+
**Additional requirements:**
|
|
28
|
+
- Include type hints: {include_type_hints}
|
|
29
|
+
- Include docstring: {include_docstrings}
|
|
30
|
+
|
|
31
|
+
Generate only the function code, no usage examples.""")
|
|
32
|
+
])
|
|
33
|
+
|
|
34
|
+
# Class generation prompt
|
|
35
|
+
CLASS_PROMPT = ChatPromptTemplate.from_messages([
|
|
36
|
+
("system", GENERATION_SYSTEM_PROMPT),
|
|
37
|
+
("human", """Generate a {language} class with the following specification:
|
|
38
|
+
|
|
39
|
+
**Description:** {description}
|
|
40
|
+
**Class name:** {name}
|
|
41
|
+
**Attributes:** {attributes}
|
|
42
|
+
**Methods:** {methods}
|
|
43
|
+
**Additional requirements:**
|
|
44
|
+
- Include type hints: {include_type_hints}
|
|
45
|
+
- Include docstrings: {include_docstrings}
|
|
46
|
+
- Follow {language} best practices and conventions
|
|
47
|
+
|
|
48
|
+
Generate the complete class implementation.""")
|
|
49
|
+
])
|
|
50
|
+
|
|
51
|
+
# Script/module generation prompt
|
|
52
|
+
SCRIPT_PROMPT = ChatPromptTemplate.from_messages([
|
|
53
|
+
("system", GENERATION_SYSTEM_PROMPT),
|
|
54
|
+
("human", """Generate a complete {language} script/module:
|
|
55
|
+
|
|
56
|
+
**Description:** {description}
|
|
57
|
+
**Features/functionality required:**
|
|
58
|
+
{requirements}
|
|
59
|
+
|
|
60
|
+
**Additional requirements:**
|
|
61
|
+
- Include proper imports
|
|
62
|
+
- Include type hints: {include_type_hints}
|
|
63
|
+
- Include docstrings: {include_docstrings}
|
|
64
|
+
- Include a main entry point if applicable
|
|
65
|
+
- Handle errors appropriately
|
|
66
|
+
|
|
67
|
+
Generate the complete script.""")
|
|
68
|
+
])
|
|
69
|
+
|
|
70
|
+
# Test file generation prompt
|
|
71
|
+
TEST_PROMPT = ChatPromptTemplate.from_messages([
|
|
72
|
+
("system", """You are an expert at writing comprehensive test suites.
|
|
73
|
+
Write tests that cover happy paths, edge cases, and error conditions.
|
|
74
|
+
Use appropriate testing frameworks and assertions."""),
|
|
75
|
+
("human", """Generate {language} tests for the following code:
|
|
76
|
+
|
|
77
|
+
```{language}
|
|
78
|
+
{source_code}
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
**Test framework:** {test_framework}
|
|
82
|
+
**Coverage requirements:**
|
|
83
|
+
- Test all public functions/methods
|
|
84
|
+
- Include edge case tests
|
|
85
|
+
- Include error handling tests
|
|
86
|
+
- Use descriptive test names
|
|
87
|
+
|
|
88
|
+
Generate a complete test file.""")
|
|
89
|
+
])
|
|
90
|
+
|
|
91
|
+
# Generic code generation prompt
|
|
92
|
+
GENERIC_PROMPT = ChatPromptTemplate.from_messages([
|
|
93
|
+
("system", GENERATION_SYSTEM_PROMPT),
|
|
94
|
+
("human", """Generate {language} code for the following request:
|
|
95
|
+
|
|
96
|
+
{description}
|
|
97
|
+
|
|
98
|
+
Requirements:
|
|
99
|
+
- Include type hints: {include_type_hints}
|
|
100
|
+
- Include docstrings: {include_docstrings}
|
|
101
|
+
- Follow best practices
|
|
102
|
+
|
|
103
|
+
Generate clean, production-ready code.""")
|
|
104
|
+
])
|
|
105
|
+
|
|
106
|
+
# Collect all prompts
|
|
107
|
+
GENERATION_PROMPTS = {
|
|
108
|
+
"function": FUNCTION_PROMPT,
|
|
109
|
+
"class": CLASS_PROMPT,
|
|
110
|
+
"script": SCRIPT_PROMPT,
|
|
111
|
+
"test": TEST_PROMPT,
|
|
112
|
+
"generic": GENERIC_PROMPT,
|
|
113
|
+
}
|
|
114
|
+
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
"""AI-powered commit message generator."""
|
|
2
|
+
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from ai_code_assistant.git.manager import GitManager, GitDiff, GitStatus
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
COMMIT_MESSAGE_PROMPT = """Analyze the following git diff and generate a concise, descriptive commit message.
|
|
9
|
+
|
|
10
|
+
Follow these conventions:
|
|
11
|
+
1. Start with a type prefix: feat, fix, docs, style, refactor, test, chore
|
|
12
|
+
2. Use imperative mood ("Add feature" not "Added feature")
|
|
13
|
+
3. Keep the first line under 72 characters
|
|
14
|
+
4. If needed, add a blank line and bullet points for details
|
|
15
|
+
|
|
16
|
+
Git Status:
|
|
17
|
+
- Branch: {branch}
|
|
18
|
+
- Files changed: {files_changed}
|
|
19
|
+
- Insertions: {insertions}
|
|
20
|
+
- Deletions: {deletions}
|
|
21
|
+
|
|
22
|
+
Changed files:
|
|
23
|
+
{changed_files}
|
|
24
|
+
|
|
25
|
+
Diff (truncated to 3000 chars):
|
|
26
|
+
```
|
|
27
|
+
{diff}
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
Generate ONLY the commit message, nothing else. No explanations, no markdown formatting around it.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class CommitMessageGenerator:
|
|
35
|
+
"""Generates commit messages using AI."""
|
|
36
|
+
|
|
37
|
+
def __init__(self, llm_manager):
|
|
38
|
+
self.llm = llm_manager
|
|
39
|
+
|
|
40
|
+
def generate(self, git_manager: GitManager,
|
|
41
|
+
staged_only: bool = True,
|
|
42
|
+
style: str = "conventional") -> str:
|
|
43
|
+
"""Generate a commit message based on the current changes.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
git_manager: GitManager instance
|
|
47
|
+
staged_only: If True, only consider staged changes
|
|
48
|
+
style: Commit message style (conventional, simple)
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
Generated commit message
|
|
52
|
+
"""
|
|
53
|
+
status = git_manager.get_status()
|
|
54
|
+
diff = git_manager.get_diff(staged=staged_only)
|
|
55
|
+
|
|
56
|
+
if not diff.diff_text and not status.untracked:
|
|
57
|
+
return ""
|
|
58
|
+
|
|
59
|
+
# Build list of changed files
|
|
60
|
+
changed_files = []
|
|
61
|
+
if staged_only:
|
|
62
|
+
changed_files = status.staged
|
|
63
|
+
else:
|
|
64
|
+
changed_files = status.staged + status.modified + status.untracked
|
|
65
|
+
|
|
66
|
+
changed_files_str = "\n".join(f" - {f}" for f in changed_files[:20])
|
|
67
|
+
if len(changed_files) > 20:
|
|
68
|
+
changed_files_str += f"\n ... and {len(changed_files) - 20} more files"
|
|
69
|
+
|
|
70
|
+
# Truncate diff to avoid token limits
|
|
71
|
+
diff_text = diff.diff_text[:3000]
|
|
72
|
+
if len(diff.diff_text) > 3000:
|
|
73
|
+
diff_text += "\n... (truncated)"
|
|
74
|
+
|
|
75
|
+
prompt = COMMIT_MESSAGE_PROMPT.format(
|
|
76
|
+
branch=status.branch,
|
|
77
|
+
files_changed=diff.files_changed or len(changed_files),
|
|
78
|
+
insertions=diff.insertions,
|
|
79
|
+
deletions=diff.deletions,
|
|
80
|
+
changed_files=changed_files_str,
|
|
81
|
+
diff=diff_text,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
# Generate message
|
|
85
|
+
response = self.llm.invoke(prompt)
|
|
86
|
+
|
|
87
|
+
# Clean up response
|
|
88
|
+
message = response.strip()
|
|
89
|
+
# Remove any markdown code blocks if present
|
|
90
|
+
if message.startswith("```"):
|
|
91
|
+
lines = message.split("\n")
|
|
92
|
+
message = "\n".join(lines[1:-1] if lines[-1] == "```" else lines[1:])
|
|
93
|
+
|
|
94
|
+
return message.strip()
|
|
95
|
+
|
|
96
|
+
def generate_from_description(self, description: str,
|
|
97
|
+
git_manager: Optional[GitManager] = None) -> str:
|
|
98
|
+
"""Generate a commit message from a description.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
description: User's description of changes
|
|
102
|
+
git_manager: Optional GitManager for context
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
Generated commit message
|
|
106
|
+
"""
|
|
107
|
+
context = ""
|
|
108
|
+
if git_manager:
|
|
109
|
+
status = git_manager.get_status()
|
|
110
|
+
diff = git_manager.get_diff(staged=True)
|
|
111
|
+
context = f"""
|
|
112
|
+
Context:
|
|
113
|
+
- Branch: {status.branch}
|
|
114
|
+
- Files changed: {diff.files_changed}
|
|
115
|
+
- Staged files: {', '.join(status.staged[:5])}
|
|
116
|
+
"""
|
|
117
|
+
|
|
118
|
+
prompt = f"""Convert this description into a proper git commit message.
|
|
119
|
+
|
|
120
|
+
Follow conventional commits format:
|
|
121
|
+
- Start with type: feat, fix, docs, style, refactor, test, chore
|
|
122
|
+
- Use imperative mood
|
|
123
|
+
- Keep first line under 72 characters
|
|
124
|
+
{context}
|
|
125
|
+
Description: {description}
|
|
126
|
+
|
|
127
|
+
Generate ONLY the commit message, nothing else:"""
|
|
128
|
+
|
|
129
|
+
response = self.llm.invoke(prompt)
|
|
130
|
+
return response.strip()
|