emdash-core 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- emdash_core/__init__.py +3 -0
- emdash_core/agent/__init__.py +37 -0
- emdash_core/agent/agents.py +225 -0
- emdash_core/agent/code_reviewer.py +476 -0
- emdash_core/agent/compaction.py +143 -0
- emdash_core/agent/context_manager.py +140 -0
- emdash_core/agent/events.py +338 -0
- emdash_core/agent/handlers.py +224 -0
- emdash_core/agent/inprocess_subagent.py +377 -0
- emdash_core/agent/mcp/__init__.py +50 -0
- emdash_core/agent/mcp/client.py +346 -0
- emdash_core/agent/mcp/config.py +302 -0
- emdash_core/agent/mcp/manager.py +496 -0
- emdash_core/agent/mcp/tool_factory.py +213 -0
- emdash_core/agent/prompts/__init__.py +38 -0
- emdash_core/agent/prompts/main_agent.py +104 -0
- emdash_core/agent/prompts/subagents.py +131 -0
- emdash_core/agent/prompts/workflow.py +136 -0
- emdash_core/agent/providers/__init__.py +34 -0
- emdash_core/agent/providers/base.py +143 -0
- emdash_core/agent/providers/factory.py +80 -0
- emdash_core/agent/providers/models.py +220 -0
- emdash_core/agent/providers/openai_provider.py +463 -0
- emdash_core/agent/providers/transformers_provider.py +217 -0
- emdash_core/agent/research/__init__.py +81 -0
- emdash_core/agent/research/agent.py +143 -0
- emdash_core/agent/research/controller.py +254 -0
- emdash_core/agent/research/critic.py +428 -0
- emdash_core/agent/research/macros.py +469 -0
- emdash_core/agent/research/planner.py +449 -0
- emdash_core/agent/research/researcher.py +436 -0
- emdash_core/agent/research/state.py +523 -0
- emdash_core/agent/research/synthesizer.py +594 -0
- emdash_core/agent/reviewer_profile.py +475 -0
- emdash_core/agent/rules.py +123 -0
- emdash_core/agent/runner.py +601 -0
- emdash_core/agent/session.py +262 -0
- emdash_core/agent/spec_schema.py +66 -0
- emdash_core/agent/specification.py +479 -0
- emdash_core/agent/subagent.py +397 -0
- emdash_core/agent/subagent_prompts.py +13 -0
- emdash_core/agent/toolkit.py +482 -0
- emdash_core/agent/toolkits/__init__.py +64 -0
- emdash_core/agent/toolkits/base.py +96 -0
- emdash_core/agent/toolkits/explore.py +47 -0
- emdash_core/agent/toolkits/plan.py +55 -0
- emdash_core/agent/tools/__init__.py +141 -0
- emdash_core/agent/tools/analytics.py +436 -0
- emdash_core/agent/tools/base.py +131 -0
- emdash_core/agent/tools/coding.py +484 -0
- emdash_core/agent/tools/github_mcp.py +592 -0
- emdash_core/agent/tools/history.py +13 -0
- emdash_core/agent/tools/modes.py +153 -0
- emdash_core/agent/tools/plan.py +206 -0
- emdash_core/agent/tools/plan_write.py +135 -0
- emdash_core/agent/tools/search.py +412 -0
- emdash_core/agent/tools/spec.py +341 -0
- emdash_core/agent/tools/task.py +262 -0
- emdash_core/agent/tools/task_output.py +204 -0
- emdash_core/agent/tools/tasks.py +454 -0
- emdash_core/agent/tools/traversal.py +588 -0
- emdash_core/agent/tools/web.py +179 -0
- emdash_core/analytics/__init__.py +5 -0
- emdash_core/analytics/engine.py +1286 -0
- emdash_core/api/__init__.py +5 -0
- emdash_core/api/agent.py +308 -0
- emdash_core/api/agents.py +154 -0
- emdash_core/api/analyze.py +264 -0
- emdash_core/api/auth.py +173 -0
- emdash_core/api/context.py +77 -0
- emdash_core/api/db.py +121 -0
- emdash_core/api/embed.py +131 -0
- emdash_core/api/feature.py +143 -0
- emdash_core/api/health.py +93 -0
- emdash_core/api/index.py +162 -0
- emdash_core/api/plan.py +110 -0
- emdash_core/api/projectmd.py +210 -0
- emdash_core/api/query.py +320 -0
- emdash_core/api/research.py +122 -0
- emdash_core/api/review.py +161 -0
- emdash_core/api/router.py +76 -0
- emdash_core/api/rules.py +116 -0
- emdash_core/api/search.py +119 -0
- emdash_core/api/spec.py +99 -0
- emdash_core/api/swarm.py +223 -0
- emdash_core/api/tasks.py +109 -0
- emdash_core/api/team.py +120 -0
- emdash_core/auth/__init__.py +17 -0
- emdash_core/auth/github.py +389 -0
- emdash_core/config.py +74 -0
- emdash_core/context/__init__.py +52 -0
- emdash_core/context/models.py +50 -0
- emdash_core/context/providers/__init__.py +11 -0
- emdash_core/context/providers/base.py +74 -0
- emdash_core/context/providers/explored_areas.py +183 -0
- emdash_core/context/providers/touched_areas.py +360 -0
- emdash_core/context/registry.py +73 -0
- emdash_core/context/reranker.py +199 -0
- emdash_core/context/service.py +260 -0
- emdash_core/context/session.py +352 -0
- emdash_core/core/__init__.py +104 -0
- emdash_core/core/config.py +454 -0
- emdash_core/core/exceptions.py +55 -0
- emdash_core/core/models.py +265 -0
- emdash_core/core/review_config.py +57 -0
- emdash_core/db/__init__.py +67 -0
- emdash_core/db/auth.py +134 -0
- emdash_core/db/models.py +91 -0
- emdash_core/db/provider.py +222 -0
- emdash_core/db/providers/__init__.py +5 -0
- emdash_core/db/providers/supabase.py +452 -0
- emdash_core/embeddings/__init__.py +24 -0
- emdash_core/embeddings/indexer.py +534 -0
- emdash_core/embeddings/models.py +192 -0
- emdash_core/embeddings/providers/__init__.py +7 -0
- emdash_core/embeddings/providers/base.py +112 -0
- emdash_core/embeddings/providers/fireworks.py +141 -0
- emdash_core/embeddings/providers/openai.py +104 -0
- emdash_core/embeddings/registry.py +146 -0
- emdash_core/embeddings/service.py +215 -0
- emdash_core/graph/__init__.py +26 -0
- emdash_core/graph/builder.py +134 -0
- emdash_core/graph/connection.py +692 -0
- emdash_core/graph/schema.py +416 -0
- emdash_core/graph/writer.py +667 -0
- emdash_core/ingestion/__init__.py +7 -0
- emdash_core/ingestion/change_detector.py +150 -0
- emdash_core/ingestion/git/__init__.py +5 -0
- emdash_core/ingestion/git/commit_analyzer.py +196 -0
- emdash_core/ingestion/github/__init__.py +6 -0
- emdash_core/ingestion/github/pr_fetcher.py +296 -0
- emdash_core/ingestion/github/task_extractor.py +100 -0
- emdash_core/ingestion/orchestrator.py +540 -0
- emdash_core/ingestion/parsers/__init__.py +10 -0
- emdash_core/ingestion/parsers/base_parser.py +66 -0
- emdash_core/ingestion/parsers/call_graph_builder.py +121 -0
- emdash_core/ingestion/parsers/class_extractor.py +154 -0
- emdash_core/ingestion/parsers/function_extractor.py +202 -0
- emdash_core/ingestion/parsers/import_analyzer.py +119 -0
- emdash_core/ingestion/parsers/python_parser.py +123 -0
- emdash_core/ingestion/parsers/registry.py +72 -0
- emdash_core/ingestion/parsers/ts_ast_parser.js +313 -0
- emdash_core/ingestion/parsers/typescript_parser.py +278 -0
- emdash_core/ingestion/repository.py +346 -0
- emdash_core/models/__init__.py +38 -0
- emdash_core/models/agent.py +68 -0
- emdash_core/models/index.py +77 -0
- emdash_core/models/query.py +113 -0
- emdash_core/planning/__init__.py +7 -0
- emdash_core/planning/agent_api.py +413 -0
- emdash_core/planning/context_builder.py +265 -0
- emdash_core/planning/feature_context.py +232 -0
- emdash_core/planning/feature_expander.py +646 -0
- emdash_core/planning/llm_explainer.py +198 -0
- emdash_core/planning/similarity.py +509 -0
- emdash_core/planning/team_focus.py +821 -0
- emdash_core/server.py +153 -0
- emdash_core/sse/__init__.py +5 -0
- emdash_core/sse/stream.py +196 -0
- emdash_core/swarm/__init__.py +17 -0
- emdash_core/swarm/merge_agent.py +383 -0
- emdash_core/swarm/session_manager.py +274 -0
- emdash_core/swarm/swarm_runner.py +226 -0
- emdash_core/swarm/task_definition.py +137 -0
- emdash_core/swarm/worker_spawner.py +319 -0
- emdash_core/swarm/worktree_manager.py +278 -0
- emdash_core/templates/__init__.py +10 -0
- emdash_core/templates/defaults/agent-builder.md.template +82 -0
- emdash_core/templates/defaults/focus.md.template +115 -0
- emdash_core/templates/defaults/pr-review-enhanced.md.template +309 -0
- emdash_core/templates/defaults/pr-review.md.template +80 -0
- emdash_core/templates/defaults/project.md.template +85 -0
- emdash_core/templates/defaults/research_critic.md.template +112 -0
- emdash_core/templates/defaults/research_planner.md.template +85 -0
- emdash_core/templates/defaults/research_synthesizer.md.template +128 -0
- emdash_core/templates/defaults/reviewer.md.template +81 -0
- emdash_core/templates/defaults/spec.md.template +41 -0
- emdash_core/templates/defaults/tasks.md.template +78 -0
- emdash_core/templates/loader.py +296 -0
- emdash_core/utils/__init__.py +45 -0
- emdash_core/utils/git.py +84 -0
- emdash_core/utils/image.py +502 -0
- emdash_core/utils/logger.py +51 -0
- emdash_core-0.1.7.dist-info/METADATA +35 -0
- emdash_core-0.1.7.dist-info/RECORD +187 -0
- emdash_core-0.1.7.dist-info/WHEEL +4 -0
- emdash_core-0.1.7.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
"""PROJECT.md generation API endpoint with SSE streaming."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter
|
|
8
|
+
from fastapi.responses import StreamingResponse
|
|
9
|
+
from pydantic import BaseModel
|
|
10
|
+
|
|
11
|
+
from ..sse.stream import SSEHandler, EventType
|
|
12
|
+
|
|
13
|
+
router = APIRouter(prefix="/projectmd", tags=["projectmd"])
|
|
14
|
+
|
|
15
|
+
# Thread pool for running blocking agent code
|
|
16
|
+
_executor = ThreadPoolExecutor(max_workers=2)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ProjectMDRequest(BaseModel):
|
|
20
|
+
"""Request for PROJECT.md generation."""
|
|
21
|
+
output: str = "PROJECT.md"
|
|
22
|
+
save: bool = True
|
|
23
|
+
model: Optional[str] = None
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class ProjectMDResponse(BaseModel):
|
|
27
|
+
"""Response from PROJECT.md generation."""
|
|
28
|
+
success: bool
|
|
29
|
+
content: Optional[str] = None
|
|
30
|
+
output_path: Optional[str] = None
|
|
31
|
+
error: Optional[str] = None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _generate_projectmd_sync(
|
|
35
|
+
output: str,
|
|
36
|
+
save: bool,
|
|
37
|
+
model: str,
|
|
38
|
+
sse_handler: SSEHandler,
|
|
39
|
+
):
|
|
40
|
+
"""Run PROJECT.md generation synchronously (in thread pool).
|
|
41
|
+
|
|
42
|
+
This function runs in a background thread and emits events
|
|
43
|
+
to the SSE handler for streaming to the client.
|
|
44
|
+
"""
|
|
45
|
+
try:
|
|
46
|
+
# Import discovery agent
|
|
47
|
+
from ..agent.discovery import ProjectDiscoveryAgent
|
|
48
|
+
|
|
49
|
+
# Emit start
|
|
50
|
+
sse_handler.emit(EventType.SESSION_START, {
|
|
51
|
+
"agent_name": "Project Discovery",
|
|
52
|
+
"model": model,
|
|
53
|
+
"output": output,
|
|
54
|
+
})
|
|
55
|
+
|
|
56
|
+
# Create and run discovery agent
|
|
57
|
+
agent = ProjectDiscoveryAgent(
|
|
58
|
+
model=model,
|
|
59
|
+
verbose=True,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
# Generate PROJECT.md content
|
|
63
|
+
content = agent.run()
|
|
64
|
+
|
|
65
|
+
# Save if requested
|
|
66
|
+
output_path = None
|
|
67
|
+
if save and content:
|
|
68
|
+
from pathlib import Path
|
|
69
|
+
output_path = Path(output)
|
|
70
|
+
output_path.write_text(content)
|
|
71
|
+
sse_handler.emit(EventType.THINKING, {
|
|
72
|
+
"message": f"Saved to {output_path}",
|
|
73
|
+
})
|
|
74
|
+
|
|
75
|
+
# Emit final response
|
|
76
|
+
sse_handler.emit(EventType.RESPONSE, {
|
|
77
|
+
"content": content,
|
|
78
|
+
"saved": save,
|
|
79
|
+
"output_path": str(output_path) if output_path else None,
|
|
80
|
+
})
|
|
81
|
+
|
|
82
|
+
return content
|
|
83
|
+
|
|
84
|
+
except ImportError as e:
|
|
85
|
+
# Discovery agent not available, use simpler approach
|
|
86
|
+
sse_handler.emit(EventType.WARNING, {
|
|
87
|
+
"message": f"Discovery agent not available: {e}. Using basic generation.",
|
|
88
|
+
})
|
|
89
|
+
|
|
90
|
+
try:
|
|
91
|
+
from ..agent.runner import AgentRunner
|
|
92
|
+
from ..agent.events import AgentEventEmitter
|
|
93
|
+
|
|
94
|
+
# Create emitter that forwards to SSE handler
|
|
95
|
+
class SSEBridgeHandler:
|
|
96
|
+
def __init__(self, sse_handler: SSEHandler):
|
|
97
|
+
self._sse = sse_handler
|
|
98
|
+
|
|
99
|
+
def handle(self, event):
|
|
100
|
+
self._sse.handle(event)
|
|
101
|
+
|
|
102
|
+
emitter = AgentEventEmitter(agent_name="Project Discovery")
|
|
103
|
+
emitter.add_handler(SSEBridgeHandler(sse_handler))
|
|
104
|
+
|
|
105
|
+
runner = AgentRunner(
|
|
106
|
+
model=model,
|
|
107
|
+
verbose=True,
|
|
108
|
+
max_iterations=30,
|
|
109
|
+
emitter=emitter,
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
# Use a simple prompt for PROJECT.md generation
|
|
113
|
+
prompt = """Explore this codebase and generate a PROJECT.md document that describes:
|
|
114
|
+
1. What this project is and does
|
|
115
|
+
2. The main architecture and components
|
|
116
|
+
3. Key files and their purposes
|
|
117
|
+
4. How to get started
|
|
118
|
+
|
|
119
|
+
Use the available tools to explore the codebase, then write a comprehensive PROJECT.md."""
|
|
120
|
+
|
|
121
|
+
content = runner.run(prompt)
|
|
122
|
+
|
|
123
|
+
if save and content:
|
|
124
|
+
from pathlib import Path
|
|
125
|
+
output_path = Path(output)
|
|
126
|
+
output_path.write_text(content)
|
|
127
|
+
|
|
128
|
+
return content
|
|
129
|
+
|
|
130
|
+
except Exception as inner_e:
|
|
131
|
+
sse_handler.emit(EventType.ERROR, {
|
|
132
|
+
"message": str(inner_e),
|
|
133
|
+
})
|
|
134
|
+
raise
|
|
135
|
+
|
|
136
|
+
except Exception as e:
|
|
137
|
+
sse_handler.emit(EventType.ERROR, {
|
|
138
|
+
"message": str(e),
|
|
139
|
+
})
|
|
140
|
+
raise
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
async def _generate_projectmd_async(
|
|
144
|
+
request: ProjectMDRequest,
|
|
145
|
+
sse_handler: SSEHandler,
|
|
146
|
+
):
|
|
147
|
+
"""Generate PROJECT.md and stream events."""
|
|
148
|
+
from ..config import get_config
|
|
149
|
+
|
|
150
|
+
config = get_config()
|
|
151
|
+
model = request.model or config.default_model
|
|
152
|
+
|
|
153
|
+
loop = asyncio.get_event_loop()
|
|
154
|
+
|
|
155
|
+
try:
|
|
156
|
+
await loop.run_in_executor(
|
|
157
|
+
_executor,
|
|
158
|
+
_generate_projectmd_sync,
|
|
159
|
+
request.output,
|
|
160
|
+
request.save,
|
|
161
|
+
model,
|
|
162
|
+
sse_handler,
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
sse_handler.emit(EventType.SESSION_END, {
|
|
166
|
+
"success": True,
|
|
167
|
+
})
|
|
168
|
+
|
|
169
|
+
except Exception as e:
|
|
170
|
+
sse_handler.emit(EventType.SESSION_END, {
|
|
171
|
+
"success": False,
|
|
172
|
+
"error": str(e),
|
|
173
|
+
})
|
|
174
|
+
|
|
175
|
+
finally:
|
|
176
|
+
sse_handler.close()
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
@router.post("/generate")
|
|
180
|
+
async def generate_projectmd(request: ProjectMDRequest):
|
|
181
|
+
"""Generate PROJECT.md by exploring the codebase.
|
|
182
|
+
|
|
183
|
+
Uses AI to analyze the code graph and generate a comprehensive
|
|
184
|
+
project document that describes architecture, patterns, and
|
|
185
|
+
key components.
|
|
186
|
+
|
|
187
|
+
The response is a Server-Sent Events stream containing:
|
|
188
|
+
- session_start: Initial session info
|
|
189
|
+
- tool_start/tool_result: Exploration progress
|
|
190
|
+
- thinking: Agent reasoning
|
|
191
|
+
- response: Final PROJECT.md content
|
|
192
|
+
- session_end: Completion status
|
|
193
|
+
|
|
194
|
+
Example:
|
|
195
|
+
curl -N -X POST http://localhost:8765/api/projectmd/generate \\
|
|
196
|
+
-H "Content-Type: application/json" \\
|
|
197
|
+
-d '{"output": "PROJECT.md", "save": true}'
|
|
198
|
+
"""
|
|
199
|
+
sse_handler = SSEHandler(agent_name="Project Discovery")
|
|
200
|
+
|
|
201
|
+
asyncio.create_task(_generate_projectmd_async(request, sse_handler))
|
|
202
|
+
|
|
203
|
+
return StreamingResponse(
|
|
204
|
+
sse_handler,
|
|
205
|
+
media_type="text/event-stream",
|
|
206
|
+
headers={
|
|
207
|
+
"Cache-Control": "no-cache",
|
|
208
|
+
"Connection": "keep-alive",
|
|
209
|
+
},
|
|
210
|
+
)
|
emdash_core/api/query.py
ADDED
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
"""Graph query endpoints."""
|
|
2
|
+
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from fastapi import APIRouter, HTTPException
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
router = APIRouter(prefix="/query", tags=["query"])
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class EntityResult(BaseModel):
|
|
12
|
+
"""A code entity result."""
|
|
13
|
+
qualified_name: str
|
|
14
|
+
name: str
|
|
15
|
+
entity_type: str
|
|
16
|
+
file_path: str
|
|
17
|
+
line_number: Optional[int] = None
|
|
18
|
+
source: Optional[str] = None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ExpandRequest(BaseModel):
|
|
22
|
+
"""Request to expand a node."""
|
|
23
|
+
entity_type: str = Field(..., description="Type: File, Class, Function")
|
|
24
|
+
qualified_name: str = Field(..., description="Qualified name of entity")
|
|
25
|
+
max_hops: int = Field(default=2, description="Max traversal depth")
|
|
26
|
+
include_source: bool = Field(default=True, description="Include source code")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class ExpandResponse(BaseModel):
|
|
30
|
+
"""Expanded node with relationships."""
|
|
31
|
+
entity: EntityResult
|
|
32
|
+
callers: list[EntityResult] = Field(default_factory=list)
|
|
33
|
+
callees: list[EntityResult] = Field(default_factory=list)
|
|
34
|
+
dependencies: list[EntityResult] = Field(default_factory=list)
|
|
35
|
+
dependents: list[EntityResult] = Field(default_factory=list)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class KnowledgeSilo(BaseModel):
|
|
39
|
+
"""A knowledge silo - critical code with few maintainers."""
|
|
40
|
+
file_path: str
|
|
41
|
+
importance_score: float
|
|
42
|
+
author_count: int
|
|
43
|
+
authors: list[str]
|
|
44
|
+
function_count: int
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _get_toolkit():
|
|
48
|
+
"""Get agent toolkit."""
|
|
49
|
+
from ..agent.toolkit import AgentToolkit
|
|
50
|
+
return AgentToolkit()
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@router.get("/find-class/{name}")
|
|
54
|
+
async def find_class(name: str):
|
|
55
|
+
"""Find a class by name."""
|
|
56
|
+
try:
|
|
57
|
+
toolkit = _get_toolkit()
|
|
58
|
+
result = toolkit.text_search(query=name, entity_types=["Class"])
|
|
59
|
+
|
|
60
|
+
if not result.success:
|
|
61
|
+
raise HTTPException(status_code=500, detail=result.error)
|
|
62
|
+
|
|
63
|
+
return {
|
|
64
|
+
"results": [
|
|
65
|
+
EntityResult(
|
|
66
|
+
qualified_name=r.get("qualified_name", ""),
|
|
67
|
+
name=r.get("name", ""),
|
|
68
|
+
entity_type="Class",
|
|
69
|
+
file_path=r.get("file_path", ""),
|
|
70
|
+
line_number=r.get("line_number"),
|
|
71
|
+
)
|
|
72
|
+
for r in result.data.get("results", [])
|
|
73
|
+
]
|
|
74
|
+
}
|
|
75
|
+
except HTTPException:
|
|
76
|
+
raise
|
|
77
|
+
except Exception as e:
|
|
78
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@router.get("/find-function/{name}")
|
|
82
|
+
async def find_function(name: str):
|
|
83
|
+
"""Find a function by name."""
|
|
84
|
+
try:
|
|
85
|
+
toolkit = _get_toolkit()
|
|
86
|
+
result = toolkit.text_search(query=name, entity_types=["Function"])
|
|
87
|
+
|
|
88
|
+
if not result.success:
|
|
89
|
+
raise HTTPException(status_code=500, detail=result.error)
|
|
90
|
+
|
|
91
|
+
return {
|
|
92
|
+
"results": [
|
|
93
|
+
EntityResult(
|
|
94
|
+
qualified_name=r.get("qualified_name", ""),
|
|
95
|
+
name=r.get("name", ""),
|
|
96
|
+
entity_type="Function",
|
|
97
|
+
file_path=r.get("file_path", ""),
|
|
98
|
+
line_number=r.get("line_number"),
|
|
99
|
+
)
|
|
100
|
+
for r in result.data.get("results", [])
|
|
101
|
+
]
|
|
102
|
+
}
|
|
103
|
+
except HTTPException:
|
|
104
|
+
raise
|
|
105
|
+
except Exception as e:
|
|
106
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@router.post("/expand", response_model=ExpandResponse)
|
|
110
|
+
async def expand_node(request: ExpandRequest):
|
|
111
|
+
"""Expand a node to see its relationships.
|
|
112
|
+
|
|
113
|
+
Returns callers, callees, dependencies, and dependents.
|
|
114
|
+
"""
|
|
115
|
+
try:
|
|
116
|
+
toolkit = _get_toolkit()
|
|
117
|
+
result = toolkit.expand(
|
|
118
|
+
entity_type=request.entity_type,
|
|
119
|
+
qualified_name=request.qualified_name,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
if not result.success:
|
|
123
|
+
raise HTTPException(status_code=500, detail=result.error)
|
|
124
|
+
|
|
125
|
+
data = result.data
|
|
126
|
+
entity_data = data.get("entity", {})
|
|
127
|
+
|
|
128
|
+
return ExpandResponse(
|
|
129
|
+
entity=EntityResult(
|
|
130
|
+
qualified_name=entity_data.get("qualified_name", request.qualified_name),
|
|
131
|
+
name=entity_data.get("name", ""),
|
|
132
|
+
entity_type=request.entity_type,
|
|
133
|
+
file_path=entity_data.get("file_path", ""),
|
|
134
|
+
line_number=entity_data.get("line_number"),
|
|
135
|
+
source=entity_data.get("source") if request.include_source else None,
|
|
136
|
+
),
|
|
137
|
+
callers=[
|
|
138
|
+
EntityResult(
|
|
139
|
+
qualified_name=c.get("qualified_name", ""),
|
|
140
|
+
name=c.get("name", ""),
|
|
141
|
+
entity_type=c.get("type", ""),
|
|
142
|
+
file_path=c.get("file_path", ""),
|
|
143
|
+
)
|
|
144
|
+
for c in data.get("callers", [])
|
|
145
|
+
],
|
|
146
|
+
callees=[
|
|
147
|
+
EntityResult(
|
|
148
|
+
qualified_name=c.get("qualified_name", ""),
|
|
149
|
+
name=c.get("name", ""),
|
|
150
|
+
entity_type=c.get("type", ""),
|
|
151
|
+
file_path=c.get("file_path", ""),
|
|
152
|
+
)
|
|
153
|
+
for c in data.get("callees", [])
|
|
154
|
+
],
|
|
155
|
+
dependencies=[
|
|
156
|
+
EntityResult(
|
|
157
|
+
qualified_name=d.get("qualified_name", ""),
|
|
158
|
+
name=d.get("name", ""),
|
|
159
|
+
entity_type=d.get("type", ""),
|
|
160
|
+
file_path=d.get("file_path", ""),
|
|
161
|
+
)
|
|
162
|
+
for d in data.get("dependencies", [])
|
|
163
|
+
],
|
|
164
|
+
dependents=[
|
|
165
|
+
EntityResult(
|
|
166
|
+
qualified_name=d.get("qualified_name", ""),
|
|
167
|
+
name=d.get("name", ""),
|
|
168
|
+
entity_type=d.get("type", ""),
|
|
169
|
+
file_path=d.get("file_path", ""),
|
|
170
|
+
)
|
|
171
|
+
for d in data.get("dependents", [])
|
|
172
|
+
],
|
|
173
|
+
)
|
|
174
|
+
except HTTPException:
|
|
175
|
+
raise
|
|
176
|
+
except Exception as e:
|
|
177
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
@router.get("/callers/{qualified_name:path}")
|
|
181
|
+
async def get_callers(qualified_name: str, depth: int = 1):
|
|
182
|
+
"""Get all callers of a function."""
|
|
183
|
+
try:
|
|
184
|
+
toolkit = _get_toolkit()
|
|
185
|
+
result = toolkit.get_callers(qualified_name=qualified_name, depth=depth)
|
|
186
|
+
|
|
187
|
+
if not result.success:
|
|
188
|
+
raise HTTPException(status_code=500, detail=result.error)
|
|
189
|
+
|
|
190
|
+
return {
|
|
191
|
+
"callers": [
|
|
192
|
+
EntityResult(
|
|
193
|
+
qualified_name=c.get("qualified_name", ""),
|
|
194
|
+
name=c.get("name", ""),
|
|
195
|
+
entity_type=c.get("type", "Function"),
|
|
196
|
+
file_path=c.get("file_path", ""),
|
|
197
|
+
)
|
|
198
|
+
for c in result.data.get("callers", [])
|
|
199
|
+
]
|
|
200
|
+
}
|
|
201
|
+
except HTTPException:
|
|
202
|
+
raise
|
|
203
|
+
except Exception as e:
|
|
204
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
@router.get("/callees/{qualified_name:path}")
|
|
208
|
+
async def get_callees(qualified_name: str, depth: int = 1):
|
|
209
|
+
"""Get all callees of a function."""
|
|
210
|
+
try:
|
|
211
|
+
toolkit = _get_toolkit()
|
|
212
|
+
result = toolkit.get_callees(qualified_name=qualified_name, depth=depth)
|
|
213
|
+
|
|
214
|
+
if not result.success:
|
|
215
|
+
raise HTTPException(status_code=500, detail=result.error)
|
|
216
|
+
|
|
217
|
+
return {
|
|
218
|
+
"callees": [
|
|
219
|
+
EntityResult(
|
|
220
|
+
qualified_name=c.get("qualified_name", ""),
|
|
221
|
+
name=c.get("name", ""),
|
|
222
|
+
entity_type=c.get("type", "Function"),
|
|
223
|
+
file_path=c.get("file_path", ""),
|
|
224
|
+
)
|
|
225
|
+
for c in result.data.get("callees", [])
|
|
226
|
+
]
|
|
227
|
+
}
|
|
228
|
+
except HTTPException:
|
|
229
|
+
raise
|
|
230
|
+
except Exception as e:
|
|
231
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
@router.get("/hierarchy/{class_name:path}")
|
|
235
|
+
async def get_class_hierarchy(class_name: str, direction: str = "both"):
|
|
236
|
+
"""Get class inheritance hierarchy.
|
|
237
|
+
|
|
238
|
+
Args:
|
|
239
|
+
class_name: Qualified name of class
|
|
240
|
+
direction: 'up' (parents), 'down' (children), or 'both'
|
|
241
|
+
"""
|
|
242
|
+
try:
|
|
243
|
+
toolkit = _get_toolkit()
|
|
244
|
+
result = toolkit.get_class_hierarchy(
|
|
245
|
+
class_name=class_name,
|
|
246
|
+
direction=direction,
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
if not result.success:
|
|
250
|
+
raise HTTPException(status_code=500, detail=result.error)
|
|
251
|
+
|
|
252
|
+
return result.data
|
|
253
|
+
except HTTPException:
|
|
254
|
+
raise
|
|
255
|
+
except Exception as e:
|
|
256
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
@router.get("/dependencies/{file_path:path}")
|
|
260
|
+
async def get_file_dependencies(file_path: str, direction: str = "both"):
|
|
261
|
+
"""Get file import dependencies.
|
|
262
|
+
|
|
263
|
+
Args:
|
|
264
|
+
file_path: Path to file
|
|
265
|
+
direction: 'imports', 'imported_by', or 'both'
|
|
266
|
+
"""
|
|
267
|
+
try:
|
|
268
|
+
toolkit = _get_toolkit()
|
|
269
|
+
result = toolkit.get_file_dependencies(
|
|
270
|
+
file_path=file_path,
|
|
271
|
+
direction=direction,
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
if not result.success:
|
|
275
|
+
raise HTTPException(status_code=500, detail=result.error)
|
|
276
|
+
|
|
277
|
+
return result.data
|
|
278
|
+
except HTTPException:
|
|
279
|
+
raise
|
|
280
|
+
except Exception as e:
|
|
281
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
@router.get("/knowledge-silos")
|
|
285
|
+
async def get_knowledge_silos(
|
|
286
|
+
threshold: float = 0.7,
|
|
287
|
+
max_authors: int = 2,
|
|
288
|
+
top: int = 20,
|
|
289
|
+
):
|
|
290
|
+
"""Detect knowledge silos - critical code with few maintainers.
|
|
291
|
+
|
|
292
|
+
Args:
|
|
293
|
+
threshold: Minimum importance score
|
|
294
|
+
max_authors: Maximum number of authors to be considered a silo
|
|
295
|
+
top: Number of silos to return
|
|
296
|
+
"""
|
|
297
|
+
try:
|
|
298
|
+
from ..analytics.engine import AnalyticsEngine
|
|
299
|
+
|
|
300
|
+
engine = AnalyticsEngine()
|
|
301
|
+
silos = engine.detect_knowledge_silos(
|
|
302
|
+
importance_threshold=threshold,
|
|
303
|
+
max_authors=max_authors,
|
|
304
|
+
top=top,
|
|
305
|
+
)
|
|
306
|
+
|
|
307
|
+
return {
|
|
308
|
+
"silos": [
|
|
309
|
+
KnowledgeSilo(
|
|
310
|
+
file_path=s.get("file_path", ""),
|
|
311
|
+
importance_score=s.get("importance_score", 0.0),
|
|
312
|
+
author_count=s.get("author_count", 0),
|
|
313
|
+
authors=s.get("authors", []),
|
|
314
|
+
function_count=s.get("function_count", 0),
|
|
315
|
+
)
|
|
316
|
+
for s in silos
|
|
317
|
+
]
|
|
318
|
+
}
|
|
319
|
+
except Exception as e:
|
|
320
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"""Research endpoints with SSE streaming."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter
|
|
8
|
+
from fastapi.responses import StreamingResponse
|
|
9
|
+
from pydantic import BaseModel, Field
|
|
10
|
+
|
|
11
|
+
from ..sse.stream import SSEHandler, EventType
|
|
12
|
+
|
|
13
|
+
router = APIRouter(prefix="/research", tags=["research"])
|
|
14
|
+
|
|
15
|
+
_executor = ThreadPoolExecutor(max_workers=2)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ResearchRequest(BaseModel):
|
|
19
|
+
"""Request for deep research."""
|
|
20
|
+
goal: str = Field(..., description="Research goal")
|
|
21
|
+
max_iterations: int = Field(default=5, description="Max research iterations")
|
|
22
|
+
budget: int = Field(default=50000, description="Token budget")
|
|
23
|
+
model: Optional[str] = Field(default=None, description="LLM model for main tasks")
|
|
24
|
+
researcher_model: Optional[str] = Field(default=None, description="LLM for research")
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class ResearchResponse(BaseModel):
|
|
28
|
+
"""Research response."""
|
|
29
|
+
goal: str
|
|
30
|
+
findings: str
|
|
31
|
+
sources: list[str]
|
|
32
|
+
iterations: int
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _run_research_sync(
|
|
36
|
+
goal: str,
|
|
37
|
+
max_iterations: int,
|
|
38
|
+
model: Optional[str],
|
|
39
|
+
researcher_model: Optional[str],
|
|
40
|
+
sse_handler: SSEHandler,
|
|
41
|
+
):
|
|
42
|
+
"""Run research synchronously."""
|
|
43
|
+
import sys
|
|
44
|
+
from pathlib import Path
|
|
45
|
+
|
|
46
|
+
repo_root = Path(__file__).parent.parent.parent.parent.parent
|
|
47
|
+
if str(repo_root) not in sys.path:
|
|
48
|
+
sys.path.insert(0, str(repo_root))
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
from ..agent.research.agent import ResearchAgent
|
|
52
|
+
from ..agent.events import AgentEventEmitter
|
|
53
|
+
|
|
54
|
+
class SSEBridge:
|
|
55
|
+
def __init__(self, handler):
|
|
56
|
+
self._handler = handler
|
|
57
|
+
|
|
58
|
+
def handle(self, event):
|
|
59
|
+
self._handler.handle(event)
|
|
60
|
+
|
|
61
|
+
emitter = AgentEventEmitter(agent_name="Research")
|
|
62
|
+
emitter.add_handler(SSEBridge(sse_handler))
|
|
63
|
+
|
|
64
|
+
agent = ResearchAgent(
|
|
65
|
+
planner_model=model,
|
|
66
|
+
researcher_model=researcher_model or model,
|
|
67
|
+
critic_model=model,
|
|
68
|
+
synthesizer_model=model,
|
|
69
|
+
emitter=emitter,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
result = agent.research(goal, max_iterations=max_iterations)
|
|
73
|
+
|
|
74
|
+
sse_handler.emit(EventType.RESPONSE, {
|
|
75
|
+
"goal": goal,
|
|
76
|
+
"findings": result.get("synthesis", ""),
|
|
77
|
+
"sources": result.get("sources", []),
|
|
78
|
+
"iterations": result.get("iterations", 0),
|
|
79
|
+
})
|
|
80
|
+
|
|
81
|
+
except Exception as e:
|
|
82
|
+
sse_handler.emit(EventType.ERROR, {"message": str(e)})
|
|
83
|
+
finally:
|
|
84
|
+
sse_handler.close()
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@router.post("")
|
|
88
|
+
async def research(request: ResearchRequest):
|
|
89
|
+
"""Deep research with multi-LLM loops and critic evaluation.
|
|
90
|
+
|
|
91
|
+
Uses multiple specialized agents:
|
|
92
|
+
- Planner: Creates research plan
|
|
93
|
+
- Researcher: Gathers information
|
|
94
|
+
- Critic: Evaluates findings
|
|
95
|
+
- Synthesizer: Produces final report
|
|
96
|
+
"""
|
|
97
|
+
sse_handler = SSEHandler(agent_name="Research")
|
|
98
|
+
|
|
99
|
+
sse_handler.emit(EventType.SESSION_START, {
|
|
100
|
+
"agent_name": "Research",
|
|
101
|
+
"goal": request.goal,
|
|
102
|
+
})
|
|
103
|
+
|
|
104
|
+
async def run():
|
|
105
|
+
loop = asyncio.get_event_loop()
|
|
106
|
+
await loop.run_in_executor(
|
|
107
|
+
_executor,
|
|
108
|
+
_run_research_sync,
|
|
109
|
+
request.goal,
|
|
110
|
+
request.max_iterations,
|
|
111
|
+
request.model,
|
|
112
|
+
request.researcher_model,
|
|
113
|
+
sse_handler,
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
asyncio.create_task(run())
|
|
117
|
+
|
|
118
|
+
return StreamingResponse(
|
|
119
|
+
sse_handler,
|
|
120
|
+
media_type="text/event-stream",
|
|
121
|
+
headers={"Cache-Control": "no-cache", "Connection": "keep-alive"},
|
|
122
|
+
)
|