codegraph-cli 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codegraph_cli/__init__.py +4 -0
- codegraph_cli/agents.py +191 -0
- codegraph_cli/bug_detector.py +386 -0
- codegraph_cli/chat_agent.py +352 -0
- codegraph_cli/chat_session.py +220 -0
- codegraph_cli/cli.py +330 -0
- codegraph_cli/cli_chat.py +367 -0
- codegraph_cli/cli_diagnose.py +133 -0
- codegraph_cli/cli_refactor.py +230 -0
- codegraph_cli/cli_setup.py +470 -0
- codegraph_cli/cli_test.py +177 -0
- codegraph_cli/cli_v2.py +267 -0
- codegraph_cli/codegen_agent.py +265 -0
- codegraph_cli/config.py +31 -0
- codegraph_cli/config_manager.py +341 -0
- codegraph_cli/context_manager.py +500 -0
- codegraph_cli/crew_agents.py +123 -0
- codegraph_cli/crew_chat.py +159 -0
- codegraph_cli/crew_tools.py +497 -0
- codegraph_cli/diff_engine.py +265 -0
- codegraph_cli/embeddings.py +241 -0
- codegraph_cli/graph_export.py +144 -0
- codegraph_cli/llm.py +642 -0
- codegraph_cli/models.py +47 -0
- codegraph_cli/models_v2.py +185 -0
- codegraph_cli/orchestrator.py +49 -0
- codegraph_cli/parser.py +800 -0
- codegraph_cli/performance_analyzer.py +223 -0
- codegraph_cli/project_context.py +230 -0
- codegraph_cli/rag.py +200 -0
- codegraph_cli/refactor_agent.py +452 -0
- codegraph_cli/security_scanner.py +366 -0
- codegraph_cli/storage.py +390 -0
- codegraph_cli/templates/graph_interactive.html +257 -0
- codegraph_cli/testgen_agent.py +316 -0
- codegraph_cli/validation_engine.py +285 -0
- codegraph_cli/vector_store.py +293 -0
- codegraph_cli-2.0.0.dist-info/METADATA +318 -0
- codegraph_cli-2.0.0.dist-info/RECORD +43 -0
- codegraph_cli-2.0.0.dist-info/WHEEL +5 -0
- codegraph_cli-2.0.0.dist-info/entry_points.txt +2 -0
- codegraph_cli-2.0.0.dist-info/licenses/LICENSE +21 -0
- codegraph_cli-2.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
"""CrewAI-based chat agent with multi-agent collaboration + rollback."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import os
|
|
7
|
+
from typing import TYPE_CHECKING, Dict, List
|
|
8
|
+
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
|
|
11
|
+
from crewai import Agent, Crew, Task
|
|
12
|
+
|
|
13
|
+
from .crew_agents import (
|
|
14
|
+
create_code_analysis_agent,
|
|
15
|
+
create_code_gen_agent,
|
|
16
|
+
create_coordinator_agent,
|
|
17
|
+
create_file_ops_agent,
|
|
18
|
+
)
|
|
19
|
+
from .crew_tools import create_tools, list_backups, rollback_file
|
|
20
|
+
from .llm import LocalLLM, create_crewai_llm
|
|
21
|
+
|
|
22
|
+
if TYPE_CHECKING:
|
|
23
|
+
from .project_context import ProjectContext
|
|
24
|
+
from .rag import RAGRetriever
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class CrewChatAgent:
|
|
28
|
+
"""CrewAI-based chat agent with multi-agent collaboration and rollback."""
|
|
29
|
+
|
|
30
|
+
def __init__(
|
|
31
|
+
self,
|
|
32
|
+
context: ProjectContext,
|
|
33
|
+
llm: LocalLLM,
|
|
34
|
+
rag_retriever: RAGRetriever,
|
|
35
|
+
):
|
|
36
|
+
self.context = context
|
|
37
|
+
self.llm = llm
|
|
38
|
+
self.rag_retriever = rag_retriever
|
|
39
|
+
|
|
40
|
+
# ── Silence CrewAI / LiteLLM noise ────────────────
|
|
41
|
+
for logger_name in (
|
|
42
|
+
"crewai", "crewai.agents", "crewai.crews",
|
|
43
|
+
"crewai.tasks", "litellm", "httpx",
|
|
44
|
+
):
|
|
45
|
+
logging.getLogger(logger_name).setLevel(logging.CRITICAL)
|
|
46
|
+
os.environ.setdefault("CREWAI_TELEMETRY_OPT_OUT", "true")
|
|
47
|
+
|
|
48
|
+
# ── Provider env vars for LiteLLM compatibility ───
|
|
49
|
+
if llm.api_key:
|
|
50
|
+
provider = (llm.provider or "").lower()
|
|
51
|
+
if provider == "openrouter" or (llm.endpoint and "openrouter.ai" in llm.endpoint):
|
|
52
|
+
os.environ["OPENROUTER_API_KEY"] = llm.api_key
|
|
53
|
+
if llm.endpoint:
|
|
54
|
+
os.environ["OPENAI_API_BASE"] = llm.endpoint
|
|
55
|
+
elif provider == "gemini":
|
|
56
|
+
os.environ["GEMINI_API_KEY"] = llm.api_key
|
|
57
|
+
elif provider == "anthropic":
|
|
58
|
+
os.environ["ANTHROPIC_API_KEY"] = llm.api_key
|
|
59
|
+
elif provider == "openai":
|
|
60
|
+
os.environ["OPENAI_API_KEY"] = llm.api_key
|
|
61
|
+
elif provider == "groq":
|
|
62
|
+
os.environ["GROQ_API_KEY"] = llm.api_key
|
|
63
|
+
|
|
64
|
+
# Suppress warnings
|
|
65
|
+
import warnings
|
|
66
|
+
warnings.filterwarnings("ignore", module="crewai")
|
|
67
|
+
|
|
68
|
+
import contextlib
|
|
69
|
+
self._stderr_suppress = contextlib.redirect_stderr(open(os.devnull, "w"))
|
|
70
|
+
self._stderr_suppress.__enter__()
|
|
71
|
+
|
|
72
|
+
# ── Create tools & agents ─────────────────────────
|
|
73
|
+
tools = create_tools(context, rag_retriever)
|
|
74
|
+
crew_llm = create_crewai_llm(llm)
|
|
75
|
+
|
|
76
|
+
proj_summary = context.get_project_summary()
|
|
77
|
+
project_ctx = (
|
|
78
|
+
f"{proj_summary.get('project_name', 'Unknown')} "
|
|
79
|
+
f"at {proj_summary.get('source_path', '.')} "
|
|
80
|
+
f"with {proj_summary.get('indexed_files', 0)} indexed files"
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
self.file_ops_agent = create_file_ops_agent(tools["file_ops"], crew_llm, project_ctx)
|
|
84
|
+
self.code_gen_agent = create_code_gen_agent(tools["all"], crew_llm, project_ctx)
|
|
85
|
+
self.code_analysis_agent = create_code_analysis_agent(tools["code_analysis"], crew_llm, project_ctx)
|
|
86
|
+
self.coordinator = create_coordinator_agent(crew_llm, project_ctx)
|
|
87
|
+
|
|
88
|
+
# ── Public API ────────────────────────────────────────
|
|
89
|
+
|
|
90
|
+
def process_message(self, user_message: str) -> str:
|
|
91
|
+
"""Process a user message via CrewAI multi-agent pipeline."""
|
|
92
|
+
try:
|
|
93
|
+
proj_summary = self.context.get_project_summary()
|
|
94
|
+
context_str = (
|
|
95
|
+
f"Project: {proj_summary.get('project_name', 'Unknown')}, "
|
|
96
|
+
f"Root: {proj_summary.get('source_path', '.')}, "
|
|
97
|
+
f"Indexed files: {proj_summary.get('indexed_files', 0)}\n\n"
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
task = Task(
|
|
101
|
+
description=f"{context_str}User Request: {user_message}",
|
|
102
|
+
expected_output=(
|
|
103
|
+
"A specific, concrete answer based on actual project files and code. "
|
|
104
|
+
"Use tools to explore the project. For code changes, use write_file or "
|
|
105
|
+
"patch_file tools which automatically create backups. "
|
|
106
|
+
"Don't give generic explanations."
|
|
107
|
+
),
|
|
108
|
+
agent=self.coordinator,
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
crew = Crew(
|
|
112
|
+
agents=[
|
|
113
|
+
self.coordinator,
|
|
114
|
+
self.file_ops_agent,
|
|
115
|
+
self.code_gen_agent,
|
|
116
|
+
self.code_analysis_agent,
|
|
117
|
+
],
|
|
118
|
+
tasks=[task],
|
|
119
|
+
verbose=False,
|
|
120
|
+
process="sequential",
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
result = crew.kickoff()
|
|
124
|
+
return str(result.raw) if hasattr(result, "raw") else str(result)
|
|
125
|
+
|
|
126
|
+
except Exception as e:
|
|
127
|
+
return f"❌ Error: {e}\n\nPlease try rephrasing your request."
|
|
128
|
+
|
|
129
|
+
# ── Rollback helpers (called from REPL) ───────────────
|
|
130
|
+
|
|
131
|
+
def list_all_backups(self) -> List[Dict]:
|
|
132
|
+
"""Return all available file backups."""
|
|
133
|
+
return list_backups()
|
|
134
|
+
|
|
135
|
+
def rollback(self, file_path: str, timestamp: str | None = None) -> str:
|
|
136
|
+
"""Rollback a file to its backup.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
file_path: The file to rollback (can be relative or absolute).
|
|
140
|
+
timestamp: Specific backup timestamp, or None for latest.
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
Status message.
|
|
144
|
+
"""
|
|
145
|
+
return rollback_file(file_path, timestamp)
|
|
146
|
+
|
|
147
|
+
# ── Stats ─────────────────────────────────────────────
|
|
148
|
+
|
|
149
|
+
def get_stats(self) -> dict:
|
|
150
|
+
return {
|
|
151
|
+
"agents": {
|
|
152
|
+
"coordinator": self.coordinator.role,
|
|
153
|
+
"file_ops": self.file_ops_agent.role,
|
|
154
|
+
"code_gen": self.code_gen_agent.role,
|
|
155
|
+
"code_analysis": self.code_analysis_agent.role,
|
|
156
|
+
},
|
|
157
|
+
"project": self.context.project_name,
|
|
158
|
+
"has_source_access": self.context.has_source_access,
|
|
159
|
+
}
|
|
@@ -0,0 +1,497 @@
|
|
|
1
|
+
"""CrewAI tool wrappers for CodeGraph project context — full capabilities."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
import shutil
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type
|
|
11
|
+
|
|
12
|
+
from crewai.tools import BaseTool
|
|
13
|
+
from pydantic import BaseModel, Field, PrivateAttr
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from .project_context import ProjectContext
|
|
17
|
+
from .rag import RAGRetriever
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
# ═══════════════════════════════════════════════════════════════
|
|
21
|
+
# Backup / Rollback infrastructure
|
|
22
|
+
# ═══════════════════════════════════════════════════════════════
|
|
23
|
+
|
|
24
|
+
BACKUP_DIR = Path.home() / ".codegraph" / "backups"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _backup_file(source_path: Path, rel_path: str, tag: str = "") -> str:
|
|
28
|
+
"""Create a timestamped backup of a file before modification.
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
Backup ID (timestamp-based) for rollback.
|
|
32
|
+
"""
|
|
33
|
+
ts = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
34
|
+
backup_id = f"{ts}_{tag}" if tag else ts
|
|
35
|
+
dest_dir = BACKUP_DIR / backup_id
|
|
36
|
+
dest_dir.mkdir(parents=True, exist_ok=True)
|
|
37
|
+
|
|
38
|
+
src = source_path / rel_path
|
|
39
|
+
if src.exists():
|
|
40
|
+
dest = dest_dir / rel_path
|
|
41
|
+
dest.parent.mkdir(parents=True, exist_ok=True)
|
|
42
|
+
shutil.copy2(src, dest)
|
|
43
|
+
|
|
44
|
+
# Write metadata
|
|
45
|
+
meta = dest_dir / ".backup_meta.json"
|
|
46
|
+
existing: list = []
|
|
47
|
+
if meta.exists():
|
|
48
|
+
existing = json.loads(meta.read_text())
|
|
49
|
+
existing.append({"file": rel_path, "timestamp": ts, "source": str(source_path)})
|
|
50
|
+
meta.write_text(json.dumps(existing, indent=2))
|
|
51
|
+
|
|
52
|
+
return backup_id
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def rollback_file(backup_id: str, rel_path: str, target_root: Path) -> bool:
|
|
56
|
+
"""Restore a file from a backup.
|
|
57
|
+
|
|
58
|
+
Returns True if successfully restored.
|
|
59
|
+
"""
|
|
60
|
+
backup_dir = BACKUP_DIR / backup_id
|
|
61
|
+
src = backup_dir / rel_path
|
|
62
|
+
if not src.exists():
|
|
63
|
+
return False
|
|
64
|
+
dest = target_root / rel_path
|
|
65
|
+
dest.parent.mkdir(parents=True, exist_ok=True)
|
|
66
|
+
shutil.copy2(src, dest)
|
|
67
|
+
return True
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def list_backups(limit: int = 20) -> List[Dict]:
|
|
71
|
+
"""Return recent backups."""
|
|
72
|
+
if not BACKUP_DIR.exists():
|
|
73
|
+
return []
|
|
74
|
+
results = []
|
|
75
|
+
for d in sorted(BACKUP_DIR.iterdir(), reverse=True):
|
|
76
|
+
if not d.is_dir():
|
|
77
|
+
continue
|
|
78
|
+
meta_file = d / ".backup_meta.json"
|
|
79
|
+
files: list = []
|
|
80
|
+
if meta_file.exists():
|
|
81
|
+
files = json.loads(meta_file.read_text())
|
|
82
|
+
results.append({"id": d.name, "files": files})
|
|
83
|
+
if len(results) >= limit:
|
|
84
|
+
break
|
|
85
|
+
return results
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
# ═══════════════════════════════════════════════════════════════
|
|
89
|
+
# Tool input schemas (Pydantic v2)
|
|
90
|
+
# ═══════════════════════════════════════════════════════════════
|
|
91
|
+
|
|
92
|
+
class PathInput(BaseModel):
|
|
93
|
+
path: str = Field(default=".", description="Relative path inside the project")
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
class FilePathInput(BaseModel):
|
|
97
|
+
path: str = Field(..., description="Relative file path inside the project")
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class WriteFileInput(BaseModel):
|
|
101
|
+
path: str = Field(..., description="Relative file path to write")
|
|
102
|
+
content: str = Field(..., description="Full file content to write")
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class PatchFileInput(BaseModel):
|
|
106
|
+
path: str = Field(..., description="Relative file path to patch")
|
|
107
|
+
old_text: str = Field(..., description="Exact text to find in the file")
|
|
108
|
+
new_text: str = Field(..., description="Replacement text")
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class QueryInput(BaseModel):
|
|
112
|
+
query: str = Field(..., description="Natural language search query")
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class GrepInput(BaseModel):
|
|
116
|
+
pattern: str = Field(..., description="Text or pattern to search for in files")
|
|
117
|
+
path: str = Field(default=".", description="Directory to search in (relative)")
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class RollbackInput(BaseModel):
|
|
121
|
+
backup_id: str = Field(..., description="Backup ID to restore from")
|
|
122
|
+
path: str = Field(..., description="Relative file path to restore")
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
# ═══════════════════════════════════════════════════════════════
|
|
126
|
+
# Base tool with context
|
|
127
|
+
# ═══════════════════════════════════════════════════════════════
|
|
128
|
+
|
|
129
|
+
class ContextTool(BaseTool):
|
|
130
|
+
"""Base class that holds a ProjectContext."""
|
|
131
|
+
_context: Any = PrivateAttr()
|
|
132
|
+
|
|
133
|
+
def __init__(self, context: Any, **kwargs):
|
|
134
|
+
super().__init__(**kwargs)
|
|
135
|
+
self._context = context
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
# ═══════════════════════════════════════════════════════════════
|
|
139
|
+
# FILE OPERATIONS TOOLS
|
|
140
|
+
# ═══════════════════════════════════════════════════════════════
|
|
141
|
+
|
|
142
|
+
class ListDirectoryTool(ContextTool):
|
|
143
|
+
name: str = "list_directory"
|
|
144
|
+
description: str = (
|
|
145
|
+
"List files and directories in the project. "
|
|
146
|
+
"Input: {\"path\": \".\"} (relative path, default root). "
|
|
147
|
+
"Returns directory tree with file sizes."
|
|
148
|
+
)
|
|
149
|
+
args_schema: Type[BaseModel] = PathInput
|
|
150
|
+
|
|
151
|
+
def _run(self, path: str = ".") -> str:
|
|
152
|
+
try:
|
|
153
|
+
items = self._context.list_directory(path)
|
|
154
|
+
if not items:
|
|
155
|
+
return f"Directory '{path}' is empty or does not exist."
|
|
156
|
+
|
|
157
|
+
dirs = [i for i in items if i["type"] == "dir"]
|
|
158
|
+
files = [i for i in items if i["type"] == "file"]
|
|
159
|
+
lines = [f"📂 {path}/ ({len(dirs)} dirs, {len(files)} files)"]
|
|
160
|
+
|
|
161
|
+
for d in sorted(dirs, key=lambda x: x["name"]):
|
|
162
|
+
lines.append(f" 📁 {d['name']}/")
|
|
163
|
+
for f in sorted(files, key=lambda x: x["name"]):
|
|
164
|
+
kb = f["size"] / 1024 if f["size"] else 0
|
|
165
|
+
lines.append(f" 📄 {f['name']} ({kb:.1f} KB)")
|
|
166
|
+
return "\n".join(lines)
|
|
167
|
+
except Exception as e:
|
|
168
|
+
return f"Error listing directory: {e}"
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
class ReadFileTool(ContextTool):
|
|
172
|
+
name: str = "read_file"
|
|
173
|
+
description: str = (
|
|
174
|
+
"Read the full contents of a file. "
|
|
175
|
+
"Input: {\"path\": \"relative/file.py\"}. "
|
|
176
|
+
"Returns file content with line numbers."
|
|
177
|
+
)
|
|
178
|
+
args_schema: Type[BaseModel] = FilePathInput
|
|
179
|
+
|
|
180
|
+
def _run(self, path: str) -> str:
|
|
181
|
+
try:
|
|
182
|
+
content = self._context.read_file(path)
|
|
183
|
+
# Add line numbers for easier reference
|
|
184
|
+
numbered = []
|
|
185
|
+
for i, line in enumerate(content.split("\n"), 1):
|
|
186
|
+
numbered.append(f"{i:4d} │ {line}")
|
|
187
|
+
return f"── {path} ──\n" + "\n".join(numbered)
|
|
188
|
+
except Exception as e:
|
|
189
|
+
return f"Error reading file: {e}"
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
class WriteFileTool(ContextTool):
|
|
193
|
+
name: str = "write_file"
|
|
194
|
+
description: str = (
|
|
195
|
+
"Write/create a file in the project (creates backup first). "
|
|
196
|
+
"Input: {\"path\": \"relative/file.py\", \"content\": \"file content\"}. "
|
|
197
|
+
"Use this for creating new files or completely rewriting existing ones."
|
|
198
|
+
)
|
|
199
|
+
args_schema: Type[BaseModel] = WriteFileInput
|
|
200
|
+
|
|
201
|
+
def _run(self, path: str, content: str) -> str:
|
|
202
|
+
try:
|
|
203
|
+
ctx = self._context
|
|
204
|
+
# Backup existing file
|
|
205
|
+
backup_id = ""
|
|
206
|
+
if ctx.has_source_access and ctx.file_exists(path):
|
|
207
|
+
backup_id = _backup_file(ctx.source_path, path, tag="write")
|
|
208
|
+
|
|
209
|
+
ctx.write_file(path, content, create_dirs=True)
|
|
210
|
+
msg = f"✅ Wrote {path} ({len(content)} chars)"
|
|
211
|
+
if backup_id:
|
|
212
|
+
msg += f"\n Backup: {backup_id} (use rollback_file to undo)"
|
|
213
|
+
return msg
|
|
214
|
+
except Exception as e:
|
|
215
|
+
return f"Error writing file: {e}"
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
class PatchFileTool(ContextTool):
|
|
219
|
+
name: str = "patch_file"
|
|
220
|
+
description: str = (
|
|
221
|
+
"Modify a specific part of a file by replacing exact text (creates backup first). "
|
|
222
|
+
"Input: {\"path\": \"file.py\", \"old_text\": \"text to find\", \"new_text\": \"replacement\"}. "
|
|
223
|
+
"Use this for targeted edits instead of rewriting the whole file."
|
|
224
|
+
)
|
|
225
|
+
args_schema: Type[BaseModel] = PatchFileInput
|
|
226
|
+
|
|
227
|
+
def _run(self, path: str, old_text: str, new_text: str) -> str:
|
|
228
|
+
try:
|
|
229
|
+
ctx = self._context
|
|
230
|
+
content = ctx.read_file(path)
|
|
231
|
+
|
|
232
|
+
if old_text not in content:
|
|
233
|
+
return f"❌ Could not find the specified text in {path}. Read the file first to get the exact text."
|
|
234
|
+
|
|
235
|
+
count = content.count(old_text)
|
|
236
|
+
# Backup
|
|
237
|
+
backup_id = _backup_file(ctx.source_path, path, tag="patch")
|
|
238
|
+
|
|
239
|
+
new_content = content.replace(old_text, new_text, 1)
|
|
240
|
+
ctx.write_file(path, new_content)
|
|
241
|
+
|
|
242
|
+
return (
|
|
243
|
+
f"✅ Patched {path} ({count} occurrence(s) found, replaced first)\n"
|
|
244
|
+
f" Backup: {backup_id}"
|
|
245
|
+
)
|
|
246
|
+
except Exception as e:
|
|
247
|
+
return f"Error patching file: {e}"
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
class DeleteFileTool(ContextTool):
|
|
251
|
+
name: str = "delete_file"
|
|
252
|
+
description: str = (
|
|
253
|
+
"Delete a file from the project (creates backup first). "
|
|
254
|
+
"Input: {\"path\": \"relative/file.py\"}."
|
|
255
|
+
)
|
|
256
|
+
args_schema: Type[BaseModel] = FilePathInput
|
|
257
|
+
|
|
258
|
+
def _run(self, path: str) -> str:
|
|
259
|
+
try:
|
|
260
|
+
ctx = self._context
|
|
261
|
+
if not ctx.file_exists(path):
|
|
262
|
+
return f"❌ File not found: {path}"
|
|
263
|
+
|
|
264
|
+
# Backup first
|
|
265
|
+
backup_id = _backup_file(ctx.source_path, path, tag="delete")
|
|
266
|
+
full_path = ctx.source_path / path
|
|
267
|
+
full_path.unlink()
|
|
268
|
+
return f"✅ Deleted {path}\n Backup: {backup_id} (use rollback_file to restore)"
|
|
269
|
+
except Exception as e:
|
|
270
|
+
return f"Error deleting file: {e}"
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
class RollbackFileTool(ContextTool):
|
|
274
|
+
name: str = "rollback_file"
|
|
275
|
+
description: str = (
|
|
276
|
+
"Restore a file from a previous backup. "
|
|
277
|
+
"Input: {\"backup_id\": \"20260213_143000_write\", \"path\": \"file.py\"}."
|
|
278
|
+
)
|
|
279
|
+
args_schema: Type[BaseModel] = RollbackInput
|
|
280
|
+
|
|
281
|
+
def _run(self, backup_id: str, path: str) -> str:
|
|
282
|
+
try:
|
|
283
|
+
ctx = self._context
|
|
284
|
+
if not ctx.has_source_access:
|
|
285
|
+
return "❌ No source access for rollback."
|
|
286
|
+
ok = rollback_file(backup_id, path, ctx.source_path)
|
|
287
|
+
if ok:
|
|
288
|
+
return f"✅ Restored {path} from backup {backup_id}"
|
|
289
|
+
return f"❌ Backup file not found: {backup_id}/{path}"
|
|
290
|
+
except Exception as e:
|
|
291
|
+
return f"Error during rollback: {e}"
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
class ListBackupsTool(ContextTool):
|
|
295
|
+
name: str = "list_backups"
|
|
296
|
+
description: str = "List recent file backups for rollback. No input required."
|
|
297
|
+
|
|
298
|
+
def _run(self, **kwargs) -> str:
|
|
299
|
+
backups = list_backups(limit=10)
|
|
300
|
+
if not backups:
|
|
301
|
+
return "No backups found."
|
|
302
|
+
lines = ["Recent backups:"]
|
|
303
|
+
for b in backups:
|
|
304
|
+
files_str = ", ".join(f["file"] for f in b["files"]) if b["files"] else "(unknown)"
|
|
305
|
+
lines.append(f" 🔖 {b['id']} → {files_str}")
|
|
306
|
+
return "\n".join(lines)
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
# ═══════════════════════════════════════════════════════════════
|
|
310
|
+
# CODE ANALYSIS TOOLS
|
|
311
|
+
# ═══════════════════════════════════════════════════════════════
|
|
312
|
+
|
|
313
|
+
class SearchCodeTool(BaseTool):
|
|
314
|
+
name: str = "search_code"
|
|
315
|
+
description: str = (
|
|
316
|
+
"Semantic search across indexed code using RAG. "
|
|
317
|
+
"Input: {\"query\": \"authentication login logic\"}. "
|
|
318
|
+
"Returns matching functions, classes, and code snippets."
|
|
319
|
+
)
|
|
320
|
+
args_schema: Type[BaseModel] = QueryInput
|
|
321
|
+
_rag: Any = PrivateAttr()
|
|
322
|
+
|
|
323
|
+
def __init__(self, rag_retriever: Any, **kwargs):
|
|
324
|
+
super().__init__(**kwargs)
|
|
325
|
+
self._rag = rag_retriever
|
|
326
|
+
|
|
327
|
+
def _run(self, query: str) -> str:
|
|
328
|
+
try:
|
|
329
|
+
results = self._rag.search(query, top_k=8)
|
|
330
|
+
if not results:
|
|
331
|
+
return f"No results found for: {query}"
|
|
332
|
+
lines = [f"Found {len(results)} results for '{query}':\n"]
|
|
333
|
+
for i, r in enumerate(results, 1):
|
|
334
|
+
lines.append(
|
|
335
|
+
f"{i}. [{r.node_type}] {r.qualname}\n"
|
|
336
|
+
f" File: {r.file_path}:{r.start_line}-{r.end_line}\n"
|
|
337
|
+
f" Score: {r.score:.2f}\n"
|
|
338
|
+
f" Preview: {r.snippet[:120]}…\n"
|
|
339
|
+
)
|
|
340
|
+
return "\n".join(lines)
|
|
341
|
+
except Exception as e:
|
|
342
|
+
return f"Error searching: {e}"
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
class GrepTool(ContextTool):
|
|
346
|
+
name: str = "grep_in_project"
|
|
347
|
+
description: str = (
|
|
348
|
+
"Search for exact text across project files (like grep). "
|
|
349
|
+
"Input: {\"pattern\": \"def my_function\", \"path\": \".\"}. "
|
|
350
|
+
"Returns matching lines with file paths and line numbers."
|
|
351
|
+
)
|
|
352
|
+
args_schema: Type[BaseModel] = GrepInput
|
|
353
|
+
|
|
354
|
+
def _run(self, pattern: str, path: str = ".") -> str:
|
|
355
|
+
try:
|
|
356
|
+
ctx = self._context
|
|
357
|
+
if not ctx.has_source_access:
|
|
358
|
+
return "❌ No source access."
|
|
359
|
+
|
|
360
|
+
root = ctx.source_path / path
|
|
361
|
+
matches = []
|
|
362
|
+
max_results = 30
|
|
363
|
+
|
|
364
|
+
for fpath in root.rglob("*"):
|
|
365
|
+
if not fpath.is_file():
|
|
366
|
+
continue
|
|
367
|
+
if fpath.suffix not in (".py", ".js", ".ts", ".json", ".toml", ".yaml", ".yml", ".md", ".txt", ".html", ".css", ".sh"):
|
|
368
|
+
continue
|
|
369
|
+
# Skip hidden dirs, __pycache__, node_modules, .git
|
|
370
|
+
parts = fpath.relative_to(ctx.source_path).parts
|
|
371
|
+
if any(p.startswith(".") or p in ("__pycache__", "node_modules", ".git", "htmlcov") for p in parts):
|
|
372
|
+
continue
|
|
373
|
+
try:
|
|
374
|
+
text = fpath.read_text(encoding="utf-8", errors="ignore")
|
|
375
|
+
for line_no, line in enumerate(text.split("\n"), 1):
|
|
376
|
+
if pattern.lower() in line.lower():
|
|
377
|
+
rel = str(fpath.relative_to(ctx.source_path))
|
|
378
|
+
matches.append(f" {rel}:{line_no} {line.strip()}")
|
|
379
|
+
if len(matches) >= max_results:
|
|
380
|
+
break
|
|
381
|
+
except Exception:
|
|
382
|
+
continue
|
|
383
|
+
if len(matches) >= max_results:
|
|
384
|
+
break
|
|
385
|
+
|
|
386
|
+
if not matches:
|
|
387
|
+
return f"No matches for '{pattern}'"
|
|
388
|
+
return f"Found {len(matches)} match(es) for '{pattern}':\n" + "\n".join(matches)
|
|
389
|
+
except Exception as e:
|
|
390
|
+
return f"Error: {e}"
|
|
391
|
+
|
|
392
|
+
|
|
393
|
+
class ProjectSummaryTool(ContextTool):
|
|
394
|
+
name: str = "get_project_summary"
|
|
395
|
+
description: str = (
|
|
396
|
+
"Get project statistics: file count, node/edge count, structure. "
|
|
397
|
+
"No input required."
|
|
398
|
+
)
|
|
399
|
+
|
|
400
|
+
def _run(self, **kwargs) -> str:
|
|
401
|
+
try:
|
|
402
|
+
s = self._context.get_project_summary()
|
|
403
|
+
lines = [
|
|
404
|
+
f"Project: {s['project_name']}",
|
|
405
|
+
f"Source: {s.get('source_path', 'N/A')}",
|
|
406
|
+
f"Indexed: {s['indexed_files']} files, {s['total_nodes']} symbols, {s['total_edges']} edges",
|
|
407
|
+
]
|
|
408
|
+
if s.get("node_types"):
|
|
409
|
+
lines.append("Symbol types:")
|
|
410
|
+
for ntype, count in sorted(s["node_types"].items()):
|
|
411
|
+
lines.append(f" {ntype}: {count}")
|
|
412
|
+
return "\n".join(lines)
|
|
413
|
+
except Exception as e:
|
|
414
|
+
return f"Error: {e}"
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
class FileTreeTool(ContextTool):
|
|
418
|
+
name: str = "file_tree"
|
|
419
|
+
description: str = (
|
|
420
|
+
"Show the full project directory tree (recursive). "
|
|
421
|
+
"Input: {\"path\": \".\"} (optional, defaults to root). "
|
|
422
|
+
"Useful for understanding project structure quickly."
|
|
423
|
+
)
|
|
424
|
+
args_schema: Type[BaseModel] = PathInput
|
|
425
|
+
|
|
426
|
+
def _run(self, path: str = ".") -> str:
|
|
427
|
+
try:
|
|
428
|
+
ctx = self._context
|
|
429
|
+
if not ctx.has_source_access:
|
|
430
|
+
return "❌ No source access."
|
|
431
|
+
|
|
432
|
+
root = ctx.source_path / path
|
|
433
|
+
lines = [f"📂 {path}/"]
|
|
434
|
+
self._walk(root, ctx.source_path, lines, prefix=" ", depth=0, max_depth=4)
|
|
435
|
+
if len(lines) > 100:
|
|
436
|
+
lines = lines[:100]
|
|
437
|
+
lines.append(" ... (truncated)")
|
|
438
|
+
return "\n".join(lines)
|
|
439
|
+
except Exception as e:
|
|
440
|
+
return f"Error: {e}"
|
|
441
|
+
|
|
442
|
+
@staticmethod
|
|
443
|
+
def _walk(current: Path, root: Path, lines: list, prefix: str, depth: int, max_depth: int):
|
|
444
|
+
if depth >= max_depth:
|
|
445
|
+
return
|
|
446
|
+
try:
|
|
447
|
+
entries = sorted(current.iterdir(), key=lambda p: (not p.is_dir(), p.name.lower()))
|
|
448
|
+
except PermissionError:
|
|
449
|
+
return
|
|
450
|
+
|
|
451
|
+
for entry in entries:
|
|
452
|
+
name = entry.name
|
|
453
|
+
if name.startswith(".") or name in ("__pycache__", "node_modules", ".git", "htmlcov"):
|
|
454
|
+
continue
|
|
455
|
+
if name.endswith(".egg-info"):
|
|
456
|
+
continue
|
|
457
|
+
|
|
458
|
+
if entry.is_dir():
|
|
459
|
+
lines.append(f"{prefix}📁 {name}/")
|
|
460
|
+
FileTreeTool._walk(entry, root, lines, prefix + " ", depth + 1, max_depth)
|
|
461
|
+
else:
|
|
462
|
+
lines.append(f"{prefix}📄 {name}")
|
|
463
|
+
|
|
464
|
+
|
|
465
|
+
# ═══════════════════════════════════════════════════════════════
|
|
466
|
+
# TOOL FACTORY
|
|
467
|
+
# ═══════════════════════════════════════════════════════════════
|
|
468
|
+
|
|
469
|
+
def create_tools(context: Any, rag_retriever: Any) -> dict:
|
|
470
|
+
"""Create all tools for CrewAI agents.
|
|
471
|
+
|
|
472
|
+
Returns dict with categorized tool lists:
|
|
473
|
+
file_ops, code_analysis, all
|
|
474
|
+
"""
|
|
475
|
+
# File operations
|
|
476
|
+
list_dir = ListDirectoryTool(context=context)
|
|
477
|
+
read_file = ReadFileTool(context=context)
|
|
478
|
+
write_file = WriteFileTool(context=context)
|
|
479
|
+
patch_file = PatchFileTool(context=context)
|
|
480
|
+
delete_file = DeleteFileTool(context=context)
|
|
481
|
+
rollback = RollbackFileTool(context=context)
|
|
482
|
+
list_bkp = ListBackupsTool(context=context)
|
|
483
|
+
file_tree = FileTreeTool(context=context)
|
|
484
|
+
|
|
485
|
+
# Code analysis
|
|
486
|
+
search_code = SearchCodeTool(rag_retriever=rag_retriever)
|
|
487
|
+
grep = GrepTool(context=context)
|
|
488
|
+
summary = ProjectSummaryTool(context=context)
|
|
489
|
+
|
|
490
|
+
file_ops = [list_dir, read_file, write_file, patch_file, delete_file, rollback, list_bkp, file_tree]
|
|
491
|
+
code_analysis = [search_code, grep, summary]
|
|
492
|
+
|
|
493
|
+
return {
|
|
494
|
+
"file_ops": file_ops,
|
|
495
|
+
"code_analysis": code_analysis,
|
|
496
|
+
"all": file_ops + code_analysis,
|
|
497
|
+
}
|