cognify-code 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ai_code_assistant/__init__.py +14 -0
- ai_code_assistant/agent/__init__.py +63 -0
- ai_code_assistant/agent/code_agent.py +461 -0
- ai_code_assistant/agent/code_generator.py +388 -0
- ai_code_assistant/agent/code_reviewer.py +365 -0
- ai_code_assistant/agent/diff_engine.py +308 -0
- ai_code_assistant/agent/file_manager.py +300 -0
- ai_code_assistant/agent/intent_classifier.py +284 -0
- ai_code_assistant/chat/__init__.py +11 -0
- ai_code_assistant/chat/agent_session.py +156 -0
- ai_code_assistant/chat/session.py +165 -0
- ai_code_assistant/cli.py +1571 -0
- ai_code_assistant/config.py +149 -0
- ai_code_assistant/editor/__init__.py +8 -0
- ai_code_assistant/editor/diff_handler.py +270 -0
- ai_code_assistant/editor/file_editor.py +350 -0
- ai_code_assistant/editor/prompts.py +146 -0
- ai_code_assistant/generator/__init__.py +7 -0
- ai_code_assistant/generator/code_gen.py +265 -0
- ai_code_assistant/generator/prompts.py +114 -0
- ai_code_assistant/git/__init__.py +6 -0
- ai_code_assistant/git/commit_generator.py +130 -0
- ai_code_assistant/git/manager.py +203 -0
- ai_code_assistant/llm.py +111 -0
- ai_code_assistant/providers/__init__.py +23 -0
- ai_code_assistant/providers/base.py +124 -0
- ai_code_assistant/providers/cerebras.py +97 -0
- ai_code_assistant/providers/factory.py +148 -0
- ai_code_assistant/providers/google.py +103 -0
- ai_code_assistant/providers/groq.py +111 -0
- ai_code_assistant/providers/ollama.py +86 -0
- ai_code_assistant/providers/openai.py +114 -0
- ai_code_assistant/providers/openrouter.py +130 -0
- ai_code_assistant/py.typed +0 -0
- ai_code_assistant/refactor/__init__.py +20 -0
- ai_code_assistant/refactor/analyzer.py +189 -0
- ai_code_assistant/refactor/change_plan.py +172 -0
- ai_code_assistant/refactor/multi_file_editor.py +346 -0
- ai_code_assistant/refactor/prompts.py +175 -0
- ai_code_assistant/retrieval/__init__.py +19 -0
- ai_code_assistant/retrieval/chunker.py +215 -0
- ai_code_assistant/retrieval/indexer.py +236 -0
- ai_code_assistant/retrieval/search.py +239 -0
- ai_code_assistant/reviewer/__init__.py +7 -0
- ai_code_assistant/reviewer/analyzer.py +278 -0
- ai_code_assistant/reviewer/prompts.py +113 -0
- ai_code_assistant/utils/__init__.py +18 -0
- ai_code_assistant/utils/file_handler.py +155 -0
- ai_code_assistant/utils/formatters.py +259 -0
- cognify_code-0.2.0.dist-info/METADATA +383 -0
- cognify_code-0.2.0.dist-info/RECORD +55 -0
- cognify_code-0.2.0.dist-info/WHEEL +5 -0
- cognify_code-0.2.0.dist-info/entry_points.txt +3 -0
- cognify_code-0.2.0.dist-info/licenses/LICENSE +22 -0
- cognify_code-0.2.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,350 @@
|
|
|
1
|
+
"""File editor for AI-powered code modifications."""
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
import shutil
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Literal, Optional
|
|
9
|
+
|
|
10
|
+
from ai_code_assistant.config import Config, get_language_by_extension
|
|
11
|
+
from ai_code_assistant.llm import LLMManager
|
|
12
|
+
from ai_code_assistant.editor.prompts import EDIT_PROMPTS
|
|
13
|
+
from ai_code_assistant.editor.diff_handler import DiffHandler, DiffResult
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
EditMode = Literal["edit", "targeted", "refactor", "fix", "add"]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class EditResult:
|
|
21
|
+
"""Result of a file edit operation."""
|
|
22
|
+
filename: str
|
|
23
|
+
original_content: str
|
|
24
|
+
modified_content: str
|
|
25
|
+
instruction: str
|
|
26
|
+
mode: EditMode
|
|
27
|
+
diff: Optional[DiffResult] = None
|
|
28
|
+
backup_path: Optional[str] = None
|
|
29
|
+
applied: bool = False
|
|
30
|
+
error: Optional[str] = None
|
|
31
|
+
raw_response: str = ""
|
|
32
|
+
|
|
33
|
+
@property
|
|
34
|
+
def success(self) -> bool:
|
|
35
|
+
return self.error is None and bool(self.modified_content)
|
|
36
|
+
|
|
37
|
+
@property
|
|
38
|
+
def has_changes(self) -> bool:
|
|
39
|
+
return self.original_content != self.modified_content
|
|
40
|
+
|
|
41
|
+
def to_dict(self) -> dict:
|
|
42
|
+
return {
|
|
43
|
+
"filename": self.filename,
|
|
44
|
+
"instruction": self.instruction,
|
|
45
|
+
"mode": self.mode,
|
|
46
|
+
"success": self.success,
|
|
47
|
+
"has_changes": self.has_changes,
|
|
48
|
+
"applied": self.applied,
|
|
49
|
+
"backup_path": self.backup_path,
|
|
50
|
+
"error": self.error,
|
|
51
|
+
"diff": self.diff.to_dict() if self.diff else None,
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class FileEditor:
|
|
56
|
+
"""AI-powered file editor."""
|
|
57
|
+
|
|
58
|
+
def __init__(self, config: Config, llm_manager: LLMManager):
|
|
59
|
+
"""Initialize the file editor.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
config: Application configuration
|
|
63
|
+
llm_manager: LLM manager for AI interactions
|
|
64
|
+
"""
|
|
65
|
+
self.config = config
|
|
66
|
+
self.llm = llm_manager
|
|
67
|
+
self.diff_handler = DiffHandler()
|
|
68
|
+
|
|
69
|
+
def edit_file(
|
|
70
|
+
self,
|
|
71
|
+
file_path: Path,
|
|
72
|
+
instruction: str,
|
|
73
|
+
mode: EditMode = "edit",
|
|
74
|
+
preview: bool = False,
|
|
75
|
+
create_backup: bool = True,
|
|
76
|
+
start_line: Optional[int] = None,
|
|
77
|
+
end_line: Optional[int] = None,
|
|
78
|
+
) -> EditResult:
|
|
79
|
+
"""Edit a file using AI based on natural language instructions.
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
file_path: Path to the file to edit
|
|
83
|
+
instruction: Natural language edit instruction
|
|
84
|
+
mode: Edit mode (edit, targeted, refactor, fix, add)
|
|
85
|
+
preview: If True, don't apply changes
|
|
86
|
+
create_backup: If True, create backup before editing
|
|
87
|
+
start_line: Start line for targeted edits
|
|
88
|
+
end_line: End line for targeted edits
|
|
89
|
+
|
|
90
|
+
Returns:
|
|
91
|
+
EditResult with the edit outcome
|
|
92
|
+
"""
|
|
93
|
+
# Validate file exists
|
|
94
|
+
if not file_path.exists():
|
|
95
|
+
return EditResult(
|
|
96
|
+
filename=str(file_path),
|
|
97
|
+
original_content="",
|
|
98
|
+
modified_content="",
|
|
99
|
+
instruction=instruction,
|
|
100
|
+
mode=mode,
|
|
101
|
+
error=f"File not found: {file_path}",
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
# Check file size
|
|
105
|
+
file_size_kb = file_path.stat().st_size / 1024
|
|
106
|
+
max_size = self.config.editor.max_file_size_kb if hasattr(self.config, 'editor') else 500
|
|
107
|
+
if file_size_kb > max_size:
|
|
108
|
+
return EditResult(
|
|
109
|
+
filename=str(file_path),
|
|
110
|
+
original_content="",
|
|
111
|
+
modified_content="",
|
|
112
|
+
instruction=instruction,
|
|
113
|
+
mode=mode,
|
|
114
|
+
error=f"File too large: {file_size_kb:.1f}KB (max: {max_size}KB)",
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
# Read original content
|
|
118
|
+
try:
|
|
119
|
+
original_content = file_path.read_text(encoding="utf-8")
|
|
120
|
+
except Exception as e:
|
|
121
|
+
return EditResult(
|
|
122
|
+
filename=str(file_path),
|
|
123
|
+
original_content="",
|
|
124
|
+
modified_content="",
|
|
125
|
+
instruction=instruction,
|
|
126
|
+
mode=mode,
|
|
127
|
+
error=f"Could not read file: {e}",
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
# Detect language
|
|
131
|
+
language = get_language_by_extension(self.config, file_path) or "text"
|
|
132
|
+
|
|
133
|
+
# Get modified content from LLM
|
|
134
|
+
result = self._get_edited_content(
|
|
135
|
+
original_content=original_content,
|
|
136
|
+
instruction=instruction,
|
|
137
|
+
filename=str(file_path.name),
|
|
138
|
+
language=language,
|
|
139
|
+
mode=mode,
|
|
140
|
+
start_line=start_line,
|
|
141
|
+
end_line=end_line,
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
if result.error:
|
|
145
|
+
return result
|
|
146
|
+
|
|
147
|
+
# Generate diff
|
|
148
|
+
diff = self.diff_handler.generate_diff(
|
|
149
|
+
original=original_content,
|
|
150
|
+
modified=result.modified_content,
|
|
151
|
+
filename=str(file_path.name),
|
|
152
|
+
)
|
|
153
|
+
result.diff = diff
|
|
154
|
+
|
|
155
|
+
# Apply changes if not preview mode
|
|
156
|
+
if not preview and result.has_changes:
|
|
157
|
+
backup_path = None
|
|
158
|
+
|
|
159
|
+
# Create backup if requested
|
|
160
|
+
if create_backup:
|
|
161
|
+
backup_path = self._create_backup(file_path)
|
|
162
|
+
result.backup_path = str(backup_path) if backup_path else None
|
|
163
|
+
|
|
164
|
+
# Write modified content
|
|
165
|
+
try:
|
|
166
|
+
file_path.write_text(result.modified_content, encoding="utf-8")
|
|
167
|
+
result.applied = True
|
|
168
|
+
except Exception as e:
|
|
169
|
+
result.error = f"Could not write file: {e}"
|
|
170
|
+
result.applied = False
|
|
171
|
+
|
|
172
|
+
return result
|
|
173
|
+
|
|
174
|
+
def _get_edited_content(
|
|
175
|
+
self,
|
|
176
|
+
original_content: str,
|
|
177
|
+
instruction: str,
|
|
178
|
+
filename: str,
|
|
179
|
+
language: str,
|
|
180
|
+
mode: EditMode,
|
|
181
|
+
start_line: Optional[int] = None,
|
|
182
|
+
end_line: Optional[int] = None,
|
|
183
|
+
) -> EditResult:
|
|
184
|
+
"""Get edited content from LLM.
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
original_content: Original file content
|
|
188
|
+
instruction: Edit instruction
|
|
189
|
+
filename: Name of the file
|
|
190
|
+
language: Programming language
|
|
191
|
+
mode: Edit mode
|
|
192
|
+
start_line: Start line for targeted edits
|
|
193
|
+
end_line: End line for targeted edits
|
|
194
|
+
|
|
195
|
+
Returns:
|
|
196
|
+
EditResult with modified content
|
|
197
|
+
"""
|
|
198
|
+
# Select appropriate prompt
|
|
199
|
+
if mode == "targeted" and start_line and end_line:
|
|
200
|
+
prompt = EDIT_PROMPTS["targeted"]
|
|
201
|
+
kwargs = {
|
|
202
|
+
"code": original_content,
|
|
203
|
+
"instruction": instruction,
|
|
204
|
+
"filename": filename,
|
|
205
|
+
"language": language,
|
|
206
|
+
"start_line": start_line,
|
|
207
|
+
"end_line": end_line,
|
|
208
|
+
}
|
|
209
|
+
else:
|
|
210
|
+
prompt = EDIT_PROMPTS.get(mode, EDIT_PROMPTS["edit"])
|
|
211
|
+
kwargs = {
|
|
212
|
+
"code": original_content,
|
|
213
|
+
"instruction": instruction,
|
|
214
|
+
"filename": filename,
|
|
215
|
+
"language": language,
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
try:
|
|
219
|
+
response = self.llm.invoke_with_template(prompt, **kwargs)
|
|
220
|
+
modified_content = self._extract_code(response, language)
|
|
221
|
+
|
|
222
|
+
if not modified_content:
|
|
223
|
+
return EditResult(
|
|
224
|
+
filename=filename,
|
|
225
|
+
original_content=original_content,
|
|
226
|
+
modified_content="",
|
|
227
|
+
instruction=instruction,
|
|
228
|
+
mode=mode,
|
|
229
|
+
error="Could not extract code from LLM response",
|
|
230
|
+
raw_response=response,
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
return EditResult(
|
|
234
|
+
filename=filename,
|
|
235
|
+
original_content=original_content,
|
|
236
|
+
modified_content=modified_content,
|
|
237
|
+
instruction=instruction,
|
|
238
|
+
mode=mode,
|
|
239
|
+
raw_response=response,
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
except Exception as e:
|
|
243
|
+
return EditResult(
|
|
244
|
+
filename=filename,
|
|
245
|
+
original_content=original_content,
|
|
246
|
+
modified_content="",
|
|
247
|
+
instruction=instruction,
|
|
248
|
+
mode=mode,
|
|
249
|
+
error=f"LLM error: {str(e)}",
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
def _extract_code(self, response: str, language: str) -> str:
|
|
253
|
+
"""Extract code from LLM response.
|
|
254
|
+
|
|
255
|
+
Args:
|
|
256
|
+
response: Raw LLM response
|
|
257
|
+
language: Expected language
|
|
258
|
+
|
|
259
|
+
Returns:
|
|
260
|
+
Extracted code or empty string
|
|
261
|
+
"""
|
|
262
|
+
# Try to find code block with language tag
|
|
263
|
+
pattern = rf"```{language}\s*\n(.*?)```"
|
|
264
|
+
match = re.search(pattern, response, re.DOTALL | re.IGNORECASE)
|
|
265
|
+
if match:
|
|
266
|
+
return match.group(1).strip()
|
|
267
|
+
|
|
268
|
+
# Try generic code block
|
|
269
|
+
pattern = r"```\w*\s*\n(.*?)```"
|
|
270
|
+
match = re.search(pattern, response, re.DOTALL)
|
|
271
|
+
if match:
|
|
272
|
+
return match.group(1).strip()
|
|
273
|
+
|
|
274
|
+
# Try to find code without explicit blocks (fallback)
|
|
275
|
+
# If response looks like code, return it
|
|
276
|
+
lines = response.strip().split('\n')
|
|
277
|
+
if len(lines) > 1 and not response.startswith('I ') and not response.startswith('Here'):
|
|
278
|
+
return response.strip()
|
|
279
|
+
|
|
280
|
+
return ""
|
|
281
|
+
|
|
282
|
+
def _create_backup(self, file_path: Path) -> Optional[Path]:
|
|
283
|
+
"""Create a backup of the file.
|
|
284
|
+
|
|
285
|
+
Args:
|
|
286
|
+
file_path: Path to the file to backup
|
|
287
|
+
|
|
288
|
+
Returns:
|
|
289
|
+
Path to backup file or None if failed
|
|
290
|
+
"""
|
|
291
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
292
|
+
backup_name = f"{file_path.stem}.{timestamp}.bak{file_path.suffix}"
|
|
293
|
+
backup_path = file_path.parent / backup_name
|
|
294
|
+
|
|
295
|
+
try:
|
|
296
|
+
shutil.copy2(file_path, backup_path)
|
|
297
|
+
return backup_path
|
|
298
|
+
except Exception:
|
|
299
|
+
return None
|
|
300
|
+
|
|
301
|
+
def edit_code(
|
|
302
|
+
self,
|
|
303
|
+
code: str,
|
|
304
|
+
instruction: str,
|
|
305
|
+
language: str = "python",
|
|
306
|
+
mode: EditMode = "edit",
|
|
307
|
+
) -> EditResult:
|
|
308
|
+
"""Edit code string directly without file operations.
|
|
309
|
+
|
|
310
|
+
Args:
|
|
311
|
+
code: Code to edit
|
|
312
|
+
instruction: Edit instruction
|
|
313
|
+
language: Programming language
|
|
314
|
+
mode: Edit mode
|
|
315
|
+
|
|
316
|
+
Returns:
|
|
317
|
+
EditResult with modified code
|
|
318
|
+
"""
|
|
319
|
+
result = self._get_edited_content(
|
|
320
|
+
original_content=code,
|
|
321
|
+
instruction=instruction,
|
|
322
|
+
filename="code_snippet",
|
|
323
|
+
language=language,
|
|
324
|
+
mode=mode,
|
|
325
|
+
)
|
|
326
|
+
|
|
327
|
+
if result.success:
|
|
328
|
+
result.diff = self.diff_handler.generate_diff(
|
|
329
|
+
original=code,
|
|
330
|
+
modified=result.modified_content,
|
|
331
|
+
filename="code_snippet",
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
return result
|
|
335
|
+
|
|
336
|
+
def restore_backup(self, backup_path: Path, original_path: Path) -> bool:
|
|
337
|
+
"""Restore a file from backup.
|
|
338
|
+
|
|
339
|
+
Args:
|
|
340
|
+
backup_path: Path to backup file
|
|
341
|
+
original_path: Path to restore to
|
|
342
|
+
|
|
343
|
+
Returns:
|
|
344
|
+
True if successful, False otherwise
|
|
345
|
+
"""
|
|
346
|
+
try:
|
|
347
|
+
shutil.copy2(backup_path, original_path)
|
|
348
|
+
return True
|
|
349
|
+
except Exception:
|
|
350
|
+
return False
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
"""Prompt templates for code editing."""
|
|
2
|
+
|
|
3
|
+
from langchain_core.prompts import ChatPromptTemplate
|
|
4
|
+
|
|
5
|
+
# System prompt for code editing
|
|
6
|
+
EDIT_SYSTEM_PROMPT = """You are an expert code editor. Your task is to modify existing code based on user instructions.
|
|
7
|
+
|
|
8
|
+
When editing code:
|
|
9
|
+
1. Make ONLY the changes requested - do not refactor or modify unrelated code
|
|
10
|
+
2. Preserve the original code style, indentation, and formatting
|
|
11
|
+
3. Maintain all existing functionality unless explicitly asked to change it
|
|
12
|
+
4. Keep comments and docstrings unless they need updating for the changes
|
|
13
|
+
5. Ensure the edited code is syntactically correct
|
|
14
|
+
|
|
15
|
+
IMPORTANT: Return the COMPLETE modified file content, not just the changed parts.
|
|
16
|
+
Wrap the code in markdown code blocks with the appropriate language tag."""
|
|
17
|
+
|
|
18
|
+
# Main edit prompt template
|
|
19
|
+
EDIT_PROMPT = ChatPromptTemplate.from_messages([
|
|
20
|
+
("system", EDIT_SYSTEM_PROMPT),
|
|
21
|
+
("human", """Edit the following {language} code according to these instructions:
|
|
22
|
+
|
|
23
|
+
**Edit Instructions:** {instruction}
|
|
24
|
+
|
|
25
|
+
**Original Code:**
|
|
26
|
+
```{language}
|
|
27
|
+
{code}
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
**File:** {filename}
|
|
31
|
+
|
|
32
|
+
Apply the requested changes and return the COMPLETE modified file.
|
|
33
|
+
Wrap your response in ```{language} code blocks.""")
|
|
34
|
+
])
|
|
35
|
+
|
|
36
|
+
# Targeted edit prompt (for specific line ranges)
|
|
37
|
+
TARGETED_EDIT_PROMPT = ChatPromptTemplate.from_messages([
|
|
38
|
+
("system", EDIT_SYSTEM_PROMPT),
|
|
39
|
+
("human", """Edit the following {language} code according to these instructions:
|
|
40
|
+
|
|
41
|
+
**Edit Instructions:** {instruction}
|
|
42
|
+
|
|
43
|
+
**Target Lines:** {start_line} to {end_line}
|
|
44
|
+
|
|
45
|
+
**Original Code:**
|
|
46
|
+
```{language}
|
|
47
|
+
{code}
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
**File:** {filename}
|
|
51
|
+
|
|
52
|
+
Focus your changes on lines {start_line}-{end_line}, but return the COMPLETE modified file.
|
|
53
|
+
Wrap your response in ```{language} code blocks.""")
|
|
54
|
+
])
|
|
55
|
+
|
|
56
|
+
# Refactor prompt (for larger structural changes)
|
|
57
|
+
REFACTOR_PROMPT = ChatPromptTemplate.from_messages([
|
|
58
|
+
("system", """You are an expert code refactoring assistant. Your task is to improve code structure while maintaining functionality.
|
|
59
|
+
|
|
60
|
+
When refactoring:
|
|
61
|
+
1. Improve code organization and readability
|
|
62
|
+
2. Apply design patterns where appropriate
|
|
63
|
+
3. Reduce code duplication
|
|
64
|
+
4. Improve naming conventions
|
|
65
|
+
5. Add or update type hints and docstrings
|
|
66
|
+
6. Ensure all tests would still pass
|
|
67
|
+
|
|
68
|
+
Return the COMPLETE refactored file wrapped in markdown code blocks."""),
|
|
69
|
+
("human", """Refactor the following {language} code:
|
|
70
|
+
|
|
71
|
+
**Refactoring Goal:** {instruction}
|
|
72
|
+
|
|
73
|
+
**Original Code:**
|
|
74
|
+
```{language}
|
|
75
|
+
{code}
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
**File:** {filename}
|
|
79
|
+
|
|
80
|
+
Apply the refactoring and return the COMPLETE modified file.
|
|
81
|
+
Wrap your response in ```{language} code blocks.""")
|
|
82
|
+
])
|
|
83
|
+
|
|
84
|
+
# Fix prompt (for bug fixes)
|
|
85
|
+
FIX_PROMPT = ChatPromptTemplate.from_messages([
|
|
86
|
+
("system", """You are an expert debugger. Your task is to fix bugs in code while minimizing changes.
|
|
87
|
+
|
|
88
|
+
When fixing bugs:
|
|
89
|
+
1. Identify the root cause of the issue
|
|
90
|
+
2. Make the minimal change necessary to fix the bug
|
|
91
|
+
3. Do not introduce new features or refactor unrelated code
|
|
92
|
+
4. Add comments explaining the fix if it's not obvious
|
|
93
|
+
5. Ensure the fix doesn't break other functionality
|
|
94
|
+
|
|
95
|
+
Return the COMPLETE fixed file wrapped in markdown code blocks."""),
|
|
96
|
+
("human", """Fix the following issue in this {language} code:
|
|
97
|
+
|
|
98
|
+
**Issue Description:** {instruction}
|
|
99
|
+
|
|
100
|
+
**Original Code:**
|
|
101
|
+
```{language}
|
|
102
|
+
{code}
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
**File:** {filename}
|
|
106
|
+
|
|
107
|
+
Fix the issue and return the COMPLETE modified file.
|
|
108
|
+
Wrap your response in ```{language} code blocks.""")
|
|
109
|
+
])
|
|
110
|
+
|
|
111
|
+
# Add feature prompt
|
|
112
|
+
ADD_FEATURE_PROMPT = ChatPromptTemplate.from_messages([
|
|
113
|
+
("system", """You are an expert software developer. Your task is to add new features to existing code.
|
|
114
|
+
|
|
115
|
+
When adding features:
|
|
116
|
+
1. Follow the existing code style and patterns
|
|
117
|
+
2. Add appropriate error handling
|
|
118
|
+
3. Include type hints and docstrings
|
|
119
|
+
4. Integrate seamlessly with existing code
|
|
120
|
+
5. Do not modify unrelated functionality
|
|
121
|
+
|
|
122
|
+
Return the COMPLETE file with the new feature wrapped in markdown code blocks."""),
|
|
123
|
+
("human", """Add the following feature to this {language} code:
|
|
124
|
+
|
|
125
|
+
**Feature Description:** {instruction}
|
|
126
|
+
|
|
127
|
+
**Original Code:**
|
|
128
|
+
```{language}
|
|
129
|
+
{code}
|
|
130
|
+
```
|
|
131
|
+
|
|
132
|
+
**File:** {filename}
|
|
133
|
+
|
|
134
|
+
Add the feature and return the COMPLETE modified file.
|
|
135
|
+
Wrap your response in ```{language} code blocks.""")
|
|
136
|
+
])
|
|
137
|
+
|
|
138
|
+
# Collect all prompts
|
|
139
|
+
EDIT_PROMPTS = {
|
|
140
|
+
"edit": EDIT_PROMPT,
|
|
141
|
+
"targeted": TARGETED_EDIT_PROMPT,
|
|
142
|
+
"refactor": REFACTOR_PROMPT,
|
|
143
|
+
"fix": FIX_PROMPT,
|
|
144
|
+
"add": ADD_FEATURE_PROMPT,
|
|
145
|
+
}
|
|
146
|
+
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
"""Code generation module for AI Code Assistant."""
|
|
2
|
+
|
|
3
|
+
from ai_code_assistant.generator.code_gen import CodeGenerator, GenerationResult
|
|
4
|
+
from ai_code_assistant.generator.prompts import GENERATION_PROMPTS
|
|
5
|
+
|
|
6
|
+
__all__ = ["CodeGenerator", "GenerationResult", "GENERATION_PROMPTS"]
|
|
7
|
+
|