emdash-core 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- emdash_core/__init__.py +3 -0
- emdash_core/agent/__init__.py +37 -0
- emdash_core/agent/agents.py +225 -0
- emdash_core/agent/code_reviewer.py +476 -0
- emdash_core/agent/compaction.py +143 -0
- emdash_core/agent/context_manager.py +140 -0
- emdash_core/agent/events.py +338 -0
- emdash_core/agent/handlers.py +224 -0
- emdash_core/agent/inprocess_subagent.py +377 -0
- emdash_core/agent/mcp/__init__.py +50 -0
- emdash_core/agent/mcp/client.py +346 -0
- emdash_core/agent/mcp/config.py +302 -0
- emdash_core/agent/mcp/manager.py +496 -0
- emdash_core/agent/mcp/tool_factory.py +213 -0
- emdash_core/agent/prompts/__init__.py +38 -0
- emdash_core/agent/prompts/main_agent.py +104 -0
- emdash_core/agent/prompts/subagents.py +131 -0
- emdash_core/agent/prompts/workflow.py +136 -0
- emdash_core/agent/providers/__init__.py +34 -0
- emdash_core/agent/providers/base.py +143 -0
- emdash_core/agent/providers/factory.py +80 -0
- emdash_core/agent/providers/models.py +220 -0
- emdash_core/agent/providers/openai_provider.py +463 -0
- emdash_core/agent/providers/transformers_provider.py +217 -0
- emdash_core/agent/research/__init__.py +81 -0
- emdash_core/agent/research/agent.py +143 -0
- emdash_core/agent/research/controller.py +254 -0
- emdash_core/agent/research/critic.py +428 -0
- emdash_core/agent/research/macros.py +469 -0
- emdash_core/agent/research/planner.py +449 -0
- emdash_core/agent/research/researcher.py +436 -0
- emdash_core/agent/research/state.py +523 -0
- emdash_core/agent/research/synthesizer.py +594 -0
- emdash_core/agent/reviewer_profile.py +475 -0
- emdash_core/agent/rules.py +123 -0
- emdash_core/agent/runner.py +601 -0
- emdash_core/agent/session.py +262 -0
- emdash_core/agent/spec_schema.py +66 -0
- emdash_core/agent/specification.py +479 -0
- emdash_core/agent/subagent.py +397 -0
- emdash_core/agent/subagent_prompts.py +13 -0
- emdash_core/agent/toolkit.py +482 -0
- emdash_core/agent/toolkits/__init__.py +64 -0
- emdash_core/agent/toolkits/base.py +96 -0
- emdash_core/agent/toolkits/explore.py +47 -0
- emdash_core/agent/toolkits/plan.py +55 -0
- emdash_core/agent/tools/__init__.py +141 -0
- emdash_core/agent/tools/analytics.py +436 -0
- emdash_core/agent/tools/base.py +131 -0
- emdash_core/agent/tools/coding.py +484 -0
- emdash_core/agent/tools/github_mcp.py +592 -0
- emdash_core/agent/tools/history.py +13 -0
- emdash_core/agent/tools/modes.py +153 -0
- emdash_core/agent/tools/plan.py +206 -0
- emdash_core/agent/tools/plan_write.py +135 -0
- emdash_core/agent/tools/search.py +412 -0
- emdash_core/agent/tools/spec.py +341 -0
- emdash_core/agent/tools/task.py +262 -0
- emdash_core/agent/tools/task_output.py +204 -0
- emdash_core/agent/tools/tasks.py +454 -0
- emdash_core/agent/tools/traversal.py +588 -0
- emdash_core/agent/tools/web.py +179 -0
- emdash_core/analytics/__init__.py +5 -0
- emdash_core/analytics/engine.py +1286 -0
- emdash_core/api/__init__.py +5 -0
- emdash_core/api/agent.py +308 -0
- emdash_core/api/agents.py +154 -0
- emdash_core/api/analyze.py +264 -0
- emdash_core/api/auth.py +173 -0
- emdash_core/api/context.py +77 -0
- emdash_core/api/db.py +121 -0
- emdash_core/api/embed.py +131 -0
- emdash_core/api/feature.py +143 -0
- emdash_core/api/health.py +93 -0
- emdash_core/api/index.py +162 -0
- emdash_core/api/plan.py +110 -0
- emdash_core/api/projectmd.py +210 -0
- emdash_core/api/query.py +320 -0
- emdash_core/api/research.py +122 -0
- emdash_core/api/review.py +161 -0
- emdash_core/api/router.py +76 -0
- emdash_core/api/rules.py +116 -0
- emdash_core/api/search.py +119 -0
- emdash_core/api/spec.py +99 -0
- emdash_core/api/swarm.py +223 -0
- emdash_core/api/tasks.py +109 -0
- emdash_core/api/team.py +120 -0
- emdash_core/auth/__init__.py +17 -0
- emdash_core/auth/github.py +389 -0
- emdash_core/config.py +74 -0
- emdash_core/context/__init__.py +52 -0
- emdash_core/context/models.py +50 -0
- emdash_core/context/providers/__init__.py +11 -0
- emdash_core/context/providers/base.py +74 -0
- emdash_core/context/providers/explored_areas.py +183 -0
- emdash_core/context/providers/touched_areas.py +360 -0
- emdash_core/context/registry.py +73 -0
- emdash_core/context/reranker.py +199 -0
- emdash_core/context/service.py +260 -0
- emdash_core/context/session.py +352 -0
- emdash_core/core/__init__.py +104 -0
- emdash_core/core/config.py +454 -0
- emdash_core/core/exceptions.py +55 -0
- emdash_core/core/models.py +265 -0
- emdash_core/core/review_config.py +57 -0
- emdash_core/db/__init__.py +67 -0
- emdash_core/db/auth.py +134 -0
- emdash_core/db/models.py +91 -0
- emdash_core/db/provider.py +222 -0
- emdash_core/db/providers/__init__.py +5 -0
- emdash_core/db/providers/supabase.py +452 -0
- emdash_core/embeddings/__init__.py +24 -0
- emdash_core/embeddings/indexer.py +534 -0
- emdash_core/embeddings/models.py +192 -0
- emdash_core/embeddings/providers/__init__.py +7 -0
- emdash_core/embeddings/providers/base.py +112 -0
- emdash_core/embeddings/providers/fireworks.py +141 -0
- emdash_core/embeddings/providers/openai.py +104 -0
- emdash_core/embeddings/registry.py +146 -0
- emdash_core/embeddings/service.py +215 -0
- emdash_core/graph/__init__.py +26 -0
- emdash_core/graph/builder.py +134 -0
- emdash_core/graph/connection.py +692 -0
- emdash_core/graph/schema.py +416 -0
- emdash_core/graph/writer.py +667 -0
- emdash_core/ingestion/__init__.py +7 -0
- emdash_core/ingestion/change_detector.py +150 -0
- emdash_core/ingestion/git/__init__.py +5 -0
- emdash_core/ingestion/git/commit_analyzer.py +196 -0
- emdash_core/ingestion/github/__init__.py +6 -0
- emdash_core/ingestion/github/pr_fetcher.py +296 -0
- emdash_core/ingestion/github/task_extractor.py +100 -0
- emdash_core/ingestion/orchestrator.py +540 -0
- emdash_core/ingestion/parsers/__init__.py +10 -0
- emdash_core/ingestion/parsers/base_parser.py +66 -0
- emdash_core/ingestion/parsers/call_graph_builder.py +121 -0
- emdash_core/ingestion/parsers/class_extractor.py +154 -0
- emdash_core/ingestion/parsers/function_extractor.py +202 -0
- emdash_core/ingestion/parsers/import_analyzer.py +119 -0
- emdash_core/ingestion/parsers/python_parser.py +123 -0
- emdash_core/ingestion/parsers/registry.py +72 -0
- emdash_core/ingestion/parsers/ts_ast_parser.js +313 -0
- emdash_core/ingestion/parsers/typescript_parser.py +278 -0
- emdash_core/ingestion/repository.py +346 -0
- emdash_core/models/__init__.py +38 -0
- emdash_core/models/agent.py +68 -0
- emdash_core/models/index.py +77 -0
- emdash_core/models/query.py +113 -0
- emdash_core/planning/__init__.py +7 -0
- emdash_core/planning/agent_api.py +413 -0
- emdash_core/planning/context_builder.py +265 -0
- emdash_core/planning/feature_context.py +232 -0
- emdash_core/planning/feature_expander.py +646 -0
- emdash_core/planning/llm_explainer.py +198 -0
- emdash_core/planning/similarity.py +509 -0
- emdash_core/planning/team_focus.py +821 -0
- emdash_core/server.py +153 -0
- emdash_core/sse/__init__.py +5 -0
- emdash_core/sse/stream.py +196 -0
- emdash_core/swarm/__init__.py +17 -0
- emdash_core/swarm/merge_agent.py +383 -0
- emdash_core/swarm/session_manager.py +274 -0
- emdash_core/swarm/swarm_runner.py +226 -0
- emdash_core/swarm/task_definition.py +137 -0
- emdash_core/swarm/worker_spawner.py +319 -0
- emdash_core/swarm/worktree_manager.py +278 -0
- emdash_core/templates/__init__.py +10 -0
- emdash_core/templates/defaults/agent-builder.md.template +82 -0
- emdash_core/templates/defaults/focus.md.template +115 -0
- emdash_core/templates/defaults/pr-review-enhanced.md.template +309 -0
- emdash_core/templates/defaults/pr-review.md.template +80 -0
- emdash_core/templates/defaults/project.md.template +85 -0
- emdash_core/templates/defaults/research_critic.md.template +112 -0
- emdash_core/templates/defaults/research_planner.md.template +85 -0
- emdash_core/templates/defaults/research_synthesizer.md.template +128 -0
- emdash_core/templates/defaults/reviewer.md.template +81 -0
- emdash_core/templates/defaults/spec.md.template +41 -0
- emdash_core/templates/defaults/tasks.md.template +78 -0
- emdash_core/templates/loader.py +296 -0
- emdash_core/utils/__init__.py +45 -0
- emdash_core/utils/git.py +84 -0
- emdash_core/utils/image.py +502 -0
- emdash_core/utils/logger.py +51 -0
- emdash_core-0.1.7.dist-info/METADATA +35 -0
- emdash_core-0.1.7.dist-info/RECORD +187 -0
- emdash_core-0.1.7.dist-info/WHEEL +4 -0
- emdash_core-0.1.7.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,588 @@
|
|
|
1
|
+
"""Graph traversal tools for exploring code relationships.
|
|
2
|
+
|
|
3
|
+
Note: These tools are now primarily provided by the emdash-graph MCP server.
|
|
4
|
+
This file contains fallback implementations for when MCP is not available.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
from .base import BaseTool, ToolResult, ToolCategory
|
|
10
|
+
from ...utils.logger import log
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ExpandNodeTool(BaseTool):
|
|
14
|
+
"""Expand a node to see its context and relationships."""
|
|
15
|
+
|
|
16
|
+
name = "expand_node"
|
|
17
|
+
description = """Get detailed information about a code entity and its immediate relationships.
|
|
18
|
+
Shows the entity's properties, connected nodes, and relevant context.
|
|
19
|
+
Useful for understanding what a function/class does and how it connects to other code."""
|
|
20
|
+
category = ToolCategory.TRAVERSAL
|
|
21
|
+
|
|
22
|
+
def execute(
|
|
23
|
+
self,
|
|
24
|
+
node_type: str,
|
|
25
|
+
identifier: str,
|
|
26
|
+
max_hops: int = 1,
|
|
27
|
+
) -> ToolResult:
|
|
28
|
+
"""Expand a node to see relationships.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
node_type: Type of node (Function, Class, File)
|
|
32
|
+
identifier: Qualified name or file path
|
|
33
|
+
max_hops: How many relationship hops to include
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
ToolResult with node details and relationships
|
|
37
|
+
"""
|
|
38
|
+
try:
|
|
39
|
+
# Query for the node and its relationships
|
|
40
|
+
if node_type == "File":
|
|
41
|
+
cypher = """
|
|
42
|
+
MATCH (n:File {file_path: $identifier})
|
|
43
|
+
OPTIONAL MATCH (n)-[r]->(m)
|
|
44
|
+
RETURN n, collect({type: type(r), target: m}) as relationships
|
|
45
|
+
"""
|
|
46
|
+
else:
|
|
47
|
+
cypher = """
|
|
48
|
+
MATCH (n {qualified_name: $identifier})
|
|
49
|
+
WHERE $node_type IN labels(n)
|
|
50
|
+
OPTIONAL MATCH (n)-[r]->(m)
|
|
51
|
+
RETURN n, collect({type: type(r), target: m}) as relationships
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
with self.connection.session() as session:
|
|
55
|
+
result = session.run(cypher, {
|
|
56
|
+
"identifier": identifier,
|
|
57
|
+
"node_type": node_type,
|
|
58
|
+
})
|
|
59
|
+
record = result.single()
|
|
60
|
+
|
|
61
|
+
if not record:
|
|
62
|
+
return ToolResult.error_result(
|
|
63
|
+
f"{node_type} '{identifier}' not found",
|
|
64
|
+
suggestions=["Try semantic_search to find similar entities"],
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
node = dict(record["n"])
|
|
68
|
+
relationships = []
|
|
69
|
+
|
|
70
|
+
for rel in record["relationships"]:
|
|
71
|
+
if rel["target"]:
|
|
72
|
+
target = dict(rel["target"])
|
|
73
|
+
relationships.append({
|
|
74
|
+
"type": rel["type"],
|
|
75
|
+
"target_name": target.get("qualified_name") or target.get("file_path"),
|
|
76
|
+
"target_type": target.get("node_type"),
|
|
77
|
+
})
|
|
78
|
+
|
|
79
|
+
return ToolResult.success_result(
|
|
80
|
+
data={
|
|
81
|
+
"root_node": {
|
|
82
|
+
"qualified_name": node.get("qualified_name"),
|
|
83
|
+
"file_path": node.get("file_path"),
|
|
84
|
+
"node_type": node_type,
|
|
85
|
+
"docstring": node.get("docstring"),
|
|
86
|
+
"start_line": node.get("start_line"),
|
|
87
|
+
"end_line": node.get("end_line"),
|
|
88
|
+
},
|
|
89
|
+
"relationships": relationships,
|
|
90
|
+
"summary": {
|
|
91
|
+
"relationship_count": len(relationships),
|
|
92
|
+
},
|
|
93
|
+
},
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
except Exception as e:
|
|
97
|
+
log.exception("Expand node failed")
|
|
98
|
+
return ToolResult.error_result(f"Expansion failed: {str(e)}")
|
|
99
|
+
|
|
100
|
+
def get_schema(self) -> dict:
|
|
101
|
+
"""Get OpenAI function schema."""
|
|
102
|
+
return self._make_schema(
|
|
103
|
+
properties={
|
|
104
|
+
"node_type": {
|
|
105
|
+
"type": "string",
|
|
106
|
+
"enum": ["Function", "Class", "File"],
|
|
107
|
+
"description": "Type of node to expand",
|
|
108
|
+
},
|
|
109
|
+
"identifier": {
|
|
110
|
+
"type": "string",
|
|
111
|
+
"description": "Qualified name (for functions/classes) or file path (for files)",
|
|
112
|
+
},
|
|
113
|
+
"max_hops": {
|
|
114
|
+
"type": "integer",
|
|
115
|
+
"description": "How many relationship hops to include",
|
|
116
|
+
"default": 1,
|
|
117
|
+
},
|
|
118
|
+
},
|
|
119
|
+
required=["node_type", "identifier"],
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
class GetCallersTool(BaseTool):
|
|
124
|
+
"""Find functions that call a given function."""
|
|
125
|
+
|
|
126
|
+
name = "get_callers"
|
|
127
|
+
description = """Find all functions that call the specified function.
|
|
128
|
+
Useful for understanding the impact of changes and finding usage patterns."""
|
|
129
|
+
category = ToolCategory.TRAVERSAL
|
|
130
|
+
|
|
131
|
+
def execute(
|
|
132
|
+
self,
|
|
133
|
+
qualified_name: str,
|
|
134
|
+
limit: int = 20,
|
|
135
|
+
) -> ToolResult:
|
|
136
|
+
"""Get callers of a function.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
qualified_name: Qualified name of the function
|
|
140
|
+
limit: Maximum callers to return
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
ToolResult with caller information
|
|
144
|
+
"""
|
|
145
|
+
try:
|
|
146
|
+
cypher = """
|
|
147
|
+
MATCH (caller:Function)-[:CALLS]->(callee:Function {qualified_name: $qualified_name})
|
|
148
|
+
RETURN caller.qualified_name as qualified_name,
|
|
149
|
+
caller.file_path as file_path
|
|
150
|
+
LIMIT $limit
|
|
151
|
+
"""
|
|
152
|
+
|
|
153
|
+
callers = []
|
|
154
|
+
with self.connection.session() as session:
|
|
155
|
+
result = session.run(cypher, {
|
|
156
|
+
"qualified_name": qualified_name,
|
|
157
|
+
"limit": limit,
|
|
158
|
+
})
|
|
159
|
+
for record in result:
|
|
160
|
+
callers.append({
|
|
161
|
+
"qualified_name": record["qualified_name"],
|
|
162
|
+
"file_path": record["file_path"],
|
|
163
|
+
})
|
|
164
|
+
|
|
165
|
+
return ToolResult.success_result(
|
|
166
|
+
data={
|
|
167
|
+
"function": qualified_name,
|
|
168
|
+
"callers": callers,
|
|
169
|
+
"count": len(callers),
|
|
170
|
+
},
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
except Exception as e:
|
|
174
|
+
log.exception("Get callers failed")
|
|
175
|
+
return ToolResult.error_result(f"Failed to get callers: {str(e)}")
|
|
176
|
+
|
|
177
|
+
def get_schema(self) -> dict:
|
|
178
|
+
"""Get OpenAI function schema."""
|
|
179
|
+
return self._make_schema(
|
|
180
|
+
properties={
|
|
181
|
+
"qualified_name": {
|
|
182
|
+
"type": "string",
|
|
183
|
+
"description": "Qualified name of the function",
|
|
184
|
+
},
|
|
185
|
+
"limit": {
|
|
186
|
+
"type": "integer",
|
|
187
|
+
"description": "Maximum callers to return",
|
|
188
|
+
"default": 20,
|
|
189
|
+
},
|
|
190
|
+
},
|
|
191
|
+
required=["qualified_name"],
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
class GetCalleesTool(BaseTool):
|
|
196
|
+
"""Find functions called by a given function."""
|
|
197
|
+
|
|
198
|
+
name = "get_callees"
|
|
199
|
+
description = """Find all functions that the specified function calls.
|
|
200
|
+
Useful for understanding a function's dependencies."""
|
|
201
|
+
category = ToolCategory.TRAVERSAL
|
|
202
|
+
|
|
203
|
+
def execute(
|
|
204
|
+
self,
|
|
205
|
+
qualified_name: str,
|
|
206
|
+
limit: int = 20,
|
|
207
|
+
) -> ToolResult:
|
|
208
|
+
"""Get callees of a function.
|
|
209
|
+
|
|
210
|
+
Args:
|
|
211
|
+
qualified_name: Qualified name of the function
|
|
212
|
+
limit: Maximum callees to return
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
ToolResult with callee information
|
|
216
|
+
"""
|
|
217
|
+
try:
|
|
218
|
+
cypher = """
|
|
219
|
+
MATCH (caller:Function {qualified_name: $qualified_name})-[:CALLS]->(callee:Function)
|
|
220
|
+
RETURN callee.qualified_name as qualified_name,
|
|
221
|
+
callee.file_path as file_path
|
|
222
|
+
LIMIT $limit
|
|
223
|
+
"""
|
|
224
|
+
|
|
225
|
+
callees = []
|
|
226
|
+
with self.connection.session() as session:
|
|
227
|
+
result = session.run(cypher, {
|
|
228
|
+
"qualified_name": qualified_name,
|
|
229
|
+
"limit": limit,
|
|
230
|
+
})
|
|
231
|
+
for record in result:
|
|
232
|
+
callees.append({
|
|
233
|
+
"qualified_name": record["qualified_name"],
|
|
234
|
+
"file_path": record["file_path"],
|
|
235
|
+
})
|
|
236
|
+
|
|
237
|
+
return ToolResult.success_result(
|
|
238
|
+
data={
|
|
239
|
+
"function": qualified_name,
|
|
240
|
+
"callees": callees,
|
|
241
|
+
"count": len(callees),
|
|
242
|
+
},
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
except Exception as e:
|
|
246
|
+
log.exception("Get callees failed")
|
|
247
|
+
return ToolResult.error_result(f"Failed to get callees: {str(e)}")
|
|
248
|
+
|
|
249
|
+
def get_schema(self) -> dict:
|
|
250
|
+
"""Get OpenAI function schema."""
|
|
251
|
+
return self._make_schema(
|
|
252
|
+
properties={
|
|
253
|
+
"qualified_name": {
|
|
254
|
+
"type": "string",
|
|
255
|
+
"description": "Qualified name of the function",
|
|
256
|
+
},
|
|
257
|
+
"limit": {
|
|
258
|
+
"type": "integer",
|
|
259
|
+
"description": "Maximum callees to return",
|
|
260
|
+
"default": 20,
|
|
261
|
+
},
|
|
262
|
+
},
|
|
263
|
+
required=["qualified_name"],
|
|
264
|
+
)
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
class GetClassHierarchyTool(BaseTool):
|
|
268
|
+
"""Get the inheritance hierarchy for a class."""
|
|
269
|
+
|
|
270
|
+
name = "get_class_hierarchy"
|
|
271
|
+
description = """Get the inheritance hierarchy for a class.
|
|
272
|
+
Shows parent classes (bases) and child classes (subclasses)."""
|
|
273
|
+
category = ToolCategory.TRAVERSAL
|
|
274
|
+
|
|
275
|
+
def execute(
|
|
276
|
+
self,
|
|
277
|
+
class_name: str,
|
|
278
|
+
) -> ToolResult:
|
|
279
|
+
"""Get class hierarchy.
|
|
280
|
+
|
|
281
|
+
Args:
|
|
282
|
+
class_name: Qualified name of the class
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
ToolResult with hierarchy information
|
|
286
|
+
"""
|
|
287
|
+
try:
|
|
288
|
+
# Get bases (parents)
|
|
289
|
+
bases_query = """
|
|
290
|
+
MATCH (c:Class {qualified_name: $class_name})-[:INHERITS_FROM]->(base:Class)
|
|
291
|
+
RETURN base.qualified_name as qualified_name
|
|
292
|
+
"""
|
|
293
|
+
|
|
294
|
+
# Get subclasses (children)
|
|
295
|
+
children_query = """
|
|
296
|
+
MATCH (child:Class)-[:INHERITS_FROM]->(c:Class {qualified_name: $class_name})
|
|
297
|
+
RETURN child.qualified_name as qualified_name
|
|
298
|
+
"""
|
|
299
|
+
|
|
300
|
+
bases = []
|
|
301
|
+
children = []
|
|
302
|
+
|
|
303
|
+
with self.connection.session() as session:
|
|
304
|
+
result = session.run(bases_query, {"class_name": class_name})
|
|
305
|
+
for record in result:
|
|
306
|
+
bases.append(record["qualified_name"])
|
|
307
|
+
|
|
308
|
+
result = session.run(children_query, {"class_name": class_name})
|
|
309
|
+
for record in result:
|
|
310
|
+
children.append(record["qualified_name"])
|
|
311
|
+
|
|
312
|
+
return ToolResult.success_result(
|
|
313
|
+
data={
|
|
314
|
+
"class": class_name,
|
|
315
|
+
"bases": bases,
|
|
316
|
+
"subclasses": children,
|
|
317
|
+
},
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
except Exception as e:
|
|
321
|
+
log.exception("Get class hierarchy failed")
|
|
322
|
+
return ToolResult.error_result(f"Failed to get hierarchy: {str(e)}")
|
|
323
|
+
|
|
324
|
+
def get_schema(self) -> dict:
|
|
325
|
+
"""Get OpenAI function schema."""
|
|
326
|
+
return self._make_schema(
|
|
327
|
+
properties={
|
|
328
|
+
"class_name": {
|
|
329
|
+
"type": "string",
|
|
330
|
+
"description": "Qualified name of the class",
|
|
331
|
+
},
|
|
332
|
+
},
|
|
333
|
+
required=["class_name"],
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
class GetFileDependenciesTool(BaseTool):
|
|
338
|
+
"""Get import/export dependencies for a file."""
|
|
339
|
+
|
|
340
|
+
name = "get_file_dependencies"
|
|
341
|
+
description = """Get the import and export dependencies for a file.
|
|
342
|
+
Shows which files this file imports from and which files import from it."""
|
|
343
|
+
category = ToolCategory.TRAVERSAL
|
|
344
|
+
|
|
345
|
+
def execute(
|
|
346
|
+
self,
|
|
347
|
+
file_path: str,
|
|
348
|
+
) -> ToolResult:
|
|
349
|
+
"""Get file dependencies.
|
|
350
|
+
|
|
351
|
+
Args:
|
|
352
|
+
file_path: Path to the file
|
|
353
|
+
|
|
354
|
+
Returns:
|
|
355
|
+
ToolResult with dependency information
|
|
356
|
+
"""
|
|
357
|
+
try:
|
|
358
|
+
# Get imports (files this file depends on)
|
|
359
|
+
imports_query = """
|
|
360
|
+
MATCH (f:File {file_path: $file_path})-[:IMPORTS]->(imported:File)
|
|
361
|
+
RETURN imported.file_path as file_path
|
|
362
|
+
"""
|
|
363
|
+
|
|
364
|
+
# Get importers (files that depend on this file)
|
|
365
|
+
importers_query = """
|
|
366
|
+
MATCH (importer:File)-[:IMPORTS]->(f:File {file_path: $file_path})
|
|
367
|
+
RETURN importer.file_path as file_path
|
|
368
|
+
"""
|
|
369
|
+
|
|
370
|
+
imports = []
|
|
371
|
+
importers = []
|
|
372
|
+
|
|
373
|
+
with self.connection.session() as session:
|
|
374
|
+
result = session.run(imports_query, {"file_path": file_path})
|
|
375
|
+
for record in result:
|
|
376
|
+
imports.append(record["file_path"])
|
|
377
|
+
|
|
378
|
+
result = session.run(importers_query, {"file_path": file_path})
|
|
379
|
+
for record in result:
|
|
380
|
+
importers.append(record["file_path"])
|
|
381
|
+
|
|
382
|
+
return ToolResult.success_result(
|
|
383
|
+
data={
|
|
384
|
+
"file": file_path,
|
|
385
|
+
"imports": imports,
|
|
386
|
+
"imported_by": importers,
|
|
387
|
+
},
|
|
388
|
+
)
|
|
389
|
+
|
|
390
|
+
except Exception as e:
|
|
391
|
+
log.exception("Get file dependencies failed")
|
|
392
|
+
return ToolResult.error_result(f"Failed to get dependencies: {str(e)}")
|
|
393
|
+
|
|
394
|
+
def get_schema(self) -> dict:
|
|
395
|
+
"""Get OpenAI function schema."""
|
|
396
|
+
return self._make_schema(
|
|
397
|
+
properties={
|
|
398
|
+
"file_path": {
|
|
399
|
+
"type": "string",
|
|
400
|
+
"description": "Path to the file",
|
|
401
|
+
},
|
|
402
|
+
},
|
|
403
|
+
required=["file_path"],
|
|
404
|
+
)
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
class GetImpactAnalysisTool(BaseTool):
|
|
408
|
+
"""Analyze the impact of changing a code entity."""
|
|
409
|
+
|
|
410
|
+
name = "get_impact_analysis"
|
|
411
|
+
description = """Analyze the potential impact of changing a code entity.
|
|
412
|
+
Shows affected files, callers, and risk assessment."""
|
|
413
|
+
category = ToolCategory.TRAVERSAL
|
|
414
|
+
|
|
415
|
+
def execute(
|
|
416
|
+
self,
|
|
417
|
+
entity_type: str,
|
|
418
|
+
identifier: str,
|
|
419
|
+
depth: int = 2,
|
|
420
|
+
) -> ToolResult:
|
|
421
|
+
"""Analyze change impact.
|
|
422
|
+
|
|
423
|
+
Args:
|
|
424
|
+
entity_type: Type of entity (Function, Class, File)
|
|
425
|
+
identifier: Qualified name or file path
|
|
426
|
+
depth: How many levels of dependencies to analyze
|
|
427
|
+
|
|
428
|
+
Returns:
|
|
429
|
+
ToolResult with impact analysis
|
|
430
|
+
"""
|
|
431
|
+
try:
|
|
432
|
+
affected_files = set()
|
|
433
|
+
affected_functions = set()
|
|
434
|
+
|
|
435
|
+
# Simple impact analysis based on callers
|
|
436
|
+
if entity_type == "Function":
|
|
437
|
+
cypher = """
|
|
438
|
+
MATCH (caller)-[:CALLS*1..%d]->(f:Function {qualified_name: $identifier})
|
|
439
|
+
RETURN DISTINCT caller.file_path as file_path,
|
|
440
|
+
caller.qualified_name as qualified_name
|
|
441
|
+
""" % depth
|
|
442
|
+
|
|
443
|
+
with self.connection.session() as session:
|
|
444
|
+
result = session.run(cypher, {"identifier": identifier})
|
|
445
|
+
for record in result:
|
|
446
|
+
if record["file_path"]:
|
|
447
|
+
affected_files.add(record["file_path"])
|
|
448
|
+
if record["qualified_name"]:
|
|
449
|
+
affected_functions.add(record["qualified_name"])
|
|
450
|
+
|
|
451
|
+
# Determine risk level
|
|
452
|
+
num_affected = len(affected_files) + len(affected_functions)
|
|
453
|
+
if num_affected > 20:
|
|
454
|
+
risk_level = "high"
|
|
455
|
+
elif num_affected > 5:
|
|
456
|
+
risk_level = "medium"
|
|
457
|
+
else:
|
|
458
|
+
risk_level = "low"
|
|
459
|
+
|
|
460
|
+
return ToolResult.success_result(
|
|
461
|
+
data={
|
|
462
|
+
"entity": identifier,
|
|
463
|
+
"affected_files": list(affected_files)[:50],
|
|
464
|
+
"affected_functions": list(affected_functions)[:50],
|
|
465
|
+
"risk_level": risk_level,
|
|
466
|
+
"total_affected": num_affected,
|
|
467
|
+
},
|
|
468
|
+
)
|
|
469
|
+
|
|
470
|
+
except Exception as e:
|
|
471
|
+
log.exception("Impact analysis failed")
|
|
472
|
+
return ToolResult.error_result(f"Impact analysis failed: {str(e)}")
|
|
473
|
+
|
|
474
|
+
def get_schema(self) -> dict:
|
|
475
|
+
"""Get OpenAI function schema."""
|
|
476
|
+
return self._make_schema(
|
|
477
|
+
properties={
|
|
478
|
+
"entity_type": {
|
|
479
|
+
"type": "string",
|
|
480
|
+
"enum": ["Function", "Class", "File"],
|
|
481
|
+
"description": "Type of entity to analyze",
|
|
482
|
+
},
|
|
483
|
+
"identifier": {
|
|
484
|
+
"type": "string",
|
|
485
|
+
"description": "Qualified name or file path",
|
|
486
|
+
},
|
|
487
|
+
"depth": {
|
|
488
|
+
"type": "integer",
|
|
489
|
+
"description": "Levels of dependencies to analyze",
|
|
490
|
+
"default": 2,
|
|
491
|
+
},
|
|
492
|
+
},
|
|
493
|
+
required=["entity_type", "identifier"],
|
|
494
|
+
)
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
class GetNeighborsTool(BaseTool):
|
|
498
|
+
"""Get immediate neighbors of a node in the graph."""
|
|
499
|
+
|
|
500
|
+
name = "get_neighbors"
|
|
501
|
+
description = """Get all immediate neighbors of a node in the code graph.
|
|
502
|
+
Shows all directly connected entities regardless of relationship type."""
|
|
503
|
+
category = ToolCategory.TRAVERSAL
|
|
504
|
+
|
|
505
|
+
def execute(
|
|
506
|
+
self,
|
|
507
|
+
node_type: str,
|
|
508
|
+
identifier: str,
|
|
509
|
+
limit: int = 30,
|
|
510
|
+
) -> ToolResult:
|
|
511
|
+
"""Get node neighbors.
|
|
512
|
+
|
|
513
|
+
Args:
|
|
514
|
+
node_type: Type of node
|
|
515
|
+
identifier: Qualified name or file path
|
|
516
|
+
limit: Maximum neighbors to return
|
|
517
|
+
|
|
518
|
+
Returns:
|
|
519
|
+
ToolResult with neighbor information
|
|
520
|
+
"""
|
|
521
|
+
try:
|
|
522
|
+
if node_type == "File":
|
|
523
|
+
cypher = """
|
|
524
|
+
MATCH (n:File {file_path: $identifier})-[r]-(neighbor)
|
|
525
|
+
RETURN DISTINCT type(r) as relationship,
|
|
526
|
+
labels(neighbor)[0] as neighbor_type,
|
|
527
|
+
neighbor.qualified_name as qualified_name,
|
|
528
|
+
neighbor.file_path as file_path
|
|
529
|
+
LIMIT $limit
|
|
530
|
+
"""
|
|
531
|
+
else:
|
|
532
|
+
cypher = """
|
|
533
|
+
MATCH (n {qualified_name: $identifier})-[r]-(neighbor)
|
|
534
|
+
WHERE $node_type IN labels(n)
|
|
535
|
+
RETURN DISTINCT type(r) as relationship,
|
|
536
|
+
labels(neighbor)[0] as neighbor_type,
|
|
537
|
+
neighbor.qualified_name as qualified_name,
|
|
538
|
+
neighbor.file_path as file_path
|
|
539
|
+
LIMIT $limit
|
|
540
|
+
"""
|
|
541
|
+
|
|
542
|
+
neighbors = []
|
|
543
|
+
with self.connection.session() as session:
|
|
544
|
+
result = session.run(cypher, {
|
|
545
|
+
"identifier": identifier,
|
|
546
|
+
"node_type": node_type,
|
|
547
|
+
"limit": limit,
|
|
548
|
+
})
|
|
549
|
+
for record in result:
|
|
550
|
+
neighbors.append({
|
|
551
|
+
"relationship": record["relationship"],
|
|
552
|
+
"type": record["neighbor_type"],
|
|
553
|
+
"identifier": record["qualified_name"] or record["file_path"],
|
|
554
|
+
})
|
|
555
|
+
|
|
556
|
+
return ToolResult.success_result(
|
|
557
|
+
data={
|
|
558
|
+
"node": identifier,
|
|
559
|
+
"neighbors": neighbors,
|
|
560
|
+
"count": len(neighbors),
|
|
561
|
+
},
|
|
562
|
+
)
|
|
563
|
+
|
|
564
|
+
except Exception as e:
|
|
565
|
+
log.exception("Get neighbors failed")
|
|
566
|
+
return ToolResult.error_result(f"Failed to get neighbors: {str(e)}")
|
|
567
|
+
|
|
568
|
+
def get_schema(self) -> dict:
|
|
569
|
+
"""Get OpenAI function schema."""
|
|
570
|
+
return self._make_schema(
|
|
571
|
+
properties={
|
|
572
|
+
"node_type": {
|
|
573
|
+
"type": "string",
|
|
574
|
+
"enum": ["Function", "Class", "File"],
|
|
575
|
+
"description": "Type of node",
|
|
576
|
+
},
|
|
577
|
+
"identifier": {
|
|
578
|
+
"type": "string",
|
|
579
|
+
"description": "Qualified name or file path",
|
|
580
|
+
},
|
|
581
|
+
"limit": {
|
|
582
|
+
"type": "integer",
|
|
583
|
+
"description": "Maximum neighbors to return",
|
|
584
|
+
"default": 30,
|
|
585
|
+
},
|
|
586
|
+
},
|
|
587
|
+
required=["node_type", "identifier"],
|
|
588
|
+
)
|