emdash-core 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- emdash_core/__init__.py +3 -0
- emdash_core/agent/__init__.py +37 -0
- emdash_core/agent/agents.py +225 -0
- emdash_core/agent/code_reviewer.py +476 -0
- emdash_core/agent/compaction.py +143 -0
- emdash_core/agent/context_manager.py +140 -0
- emdash_core/agent/events.py +338 -0
- emdash_core/agent/handlers.py +224 -0
- emdash_core/agent/inprocess_subagent.py +377 -0
- emdash_core/agent/mcp/__init__.py +50 -0
- emdash_core/agent/mcp/client.py +346 -0
- emdash_core/agent/mcp/config.py +302 -0
- emdash_core/agent/mcp/manager.py +496 -0
- emdash_core/agent/mcp/tool_factory.py +213 -0
- emdash_core/agent/prompts/__init__.py +38 -0
- emdash_core/agent/prompts/main_agent.py +104 -0
- emdash_core/agent/prompts/subagents.py +131 -0
- emdash_core/agent/prompts/workflow.py +136 -0
- emdash_core/agent/providers/__init__.py +34 -0
- emdash_core/agent/providers/base.py +143 -0
- emdash_core/agent/providers/factory.py +80 -0
- emdash_core/agent/providers/models.py +220 -0
- emdash_core/agent/providers/openai_provider.py +463 -0
- emdash_core/agent/providers/transformers_provider.py +217 -0
- emdash_core/agent/research/__init__.py +81 -0
- emdash_core/agent/research/agent.py +143 -0
- emdash_core/agent/research/controller.py +254 -0
- emdash_core/agent/research/critic.py +428 -0
- emdash_core/agent/research/macros.py +469 -0
- emdash_core/agent/research/planner.py +449 -0
- emdash_core/agent/research/researcher.py +436 -0
- emdash_core/agent/research/state.py +523 -0
- emdash_core/agent/research/synthesizer.py +594 -0
- emdash_core/agent/reviewer_profile.py +475 -0
- emdash_core/agent/rules.py +123 -0
- emdash_core/agent/runner.py +601 -0
- emdash_core/agent/session.py +262 -0
- emdash_core/agent/spec_schema.py +66 -0
- emdash_core/agent/specification.py +479 -0
- emdash_core/agent/subagent.py +397 -0
- emdash_core/agent/subagent_prompts.py +13 -0
- emdash_core/agent/toolkit.py +482 -0
- emdash_core/agent/toolkits/__init__.py +64 -0
- emdash_core/agent/toolkits/base.py +96 -0
- emdash_core/agent/toolkits/explore.py +47 -0
- emdash_core/agent/toolkits/plan.py +55 -0
- emdash_core/agent/tools/__init__.py +141 -0
- emdash_core/agent/tools/analytics.py +436 -0
- emdash_core/agent/tools/base.py +131 -0
- emdash_core/agent/tools/coding.py +484 -0
- emdash_core/agent/tools/github_mcp.py +592 -0
- emdash_core/agent/tools/history.py +13 -0
- emdash_core/agent/tools/modes.py +153 -0
- emdash_core/agent/tools/plan.py +206 -0
- emdash_core/agent/tools/plan_write.py +135 -0
- emdash_core/agent/tools/search.py +412 -0
- emdash_core/agent/tools/spec.py +341 -0
- emdash_core/agent/tools/task.py +262 -0
- emdash_core/agent/tools/task_output.py +204 -0
- emdash_core/agent/tools/tasks.py +454 -0
- emdash_core/agent/tools/traversal.py +588 -0
- emdash_core/agent/tools/web.py +179 -0
- emdash_core/analytics/__init__.py +5 -0
- emdash_core/analytics/engine.py +1286 -0
- emdash_core/api/__init__.py +5 -0
- emdash_core/api/agent.py +308 -0
- emdash_core/api/agents.py +154 -0
- emdash_core/api/analyze.py +264 -0
- emdash_core/api/auth.py +173 -0
- emdash_core/api/context.py +77 -0
- emdash_core/api/db.py +121 -0
- emdash_core/api/embed.py +131 -0
- emdash_core/api/feature.py +143 -0
- emdash_core/api/health.py +93 -0
- emdash_core/api/index.py +162 -0
- emdash_core/api/plan.py +110 -0
- emdash_core/api/projectmd.py +210 -0
- emdash_core/api/query.py +320 -0
- emdash_core/api/research.py +122 -0
- emdash_core/api/review.py +161 -0
- emdash_core/api/router.py +76 -0
- emdash_core/api/rules.py +116 -0
- emdash_core/api/search.py +119 -0
- emdash_core/api/spec.py +99 -0
- emdash_core/api/swarm.py +223 -0
- emdash_core/api/tasks.py +109 -0
- emdash_core/api/team.py +120 -0
- emdash_core/auth/__init__.py +17 -0
- emdash_core/auth/github.py +389 -0
- emdash_core/config.py +74 -0
- emdash_core/context/__init__.py +52 -0
- emdash_core/context/models.py +50 -0
- emdash_core/context/providers/__init__.py +11 -0
- emdash_core/context/providers/base.py +74 -0
- emdash_core/context/providers/explored_areas.py +183 -0
- emdash_core/context/providers/touched_areas.py +360 -0
- emdash_core/context/registry.py +73 -0
- emdash_core/context/reranker.py +199 -0
- emdash_core/context/service.py +260 -0
- emdash_core/context/session.py +352 -0
- emdash_core/core/__init__.py +104 -0
- emdash_core/core/config.py +454 -0
- emdash_core/core/exceptions.py +55 -0
- emdash_core/core/models.py +265 -0
- emdash_core/core/review_config.py +57 -0
- emdash_core/db/__init__.py +67 -0
- emdash_core/db/auth.py +134 -0
- emdash_core/db/models.py +91 -0
- emdash_core/db/provider.py +222 -0
- emdash_core/db/providers/__init__.py +5 -0
- emdash_core/db/providers/supabase.py +452 -0
- emdash_core/embeddings/__init__.py +24 -0
- emdash_core/embeddings/indexer.py +534 -0
- emdash_core/embeddings/models.py +192 -0
- emdash_core/embeddings/providers/__init__.py +7 -0
- emdash_core/embeddings/providers/base.py +112 -0
- emdash_core/embeddings/providers/fireworks.py +141 -0
- emdash_core/embeddings/providers/openai.py +104 -0
- emdash_core/embeddings/registry.py +146 -0
- emdash_core/embeddings/service.py +215 -0
- emdash_core/graph/__init__.py +26 -0
- emdash_core/graph/builder.py +134 -0
- emdash_core/graph/connection.py +692 -0
- emdash_core/graph/schema.py +416 -0
- emdash_core/graph/writer.py +667 -0
- emdash_core/ingestion/__init__.py +7 -0
- emdash_core/ingestion/change_detector.py +150 -0
- emdash_core/ingestion/git/__init__.py +5 -0
- emdash_core/ingestion/git/commit_analyzer.py +196 -0
- emdash_core/ingestion/github/__init__.py +6 -0
- emdash_core/ingestion/github/pr_fetcher.py +296 -0
- emdash_core/ingestion/github/task_extractor.py +100 -0
- emdash_core/ingestion/orchestrator.py +540 -0
- emdash_core/ingestion/parsers/__init__.py +10 -0
- emdash_core/ingestion/parsers/base_parser.py +66 -0
- emdash_core/ingestion/parsers/call_graph_builder.py +121 -0
- emdash_core/ingestion/parsers/class_extractor.py +154 -0
- emdash_core/ingestion/parsers/function_extractor.py +202 -0
- emdash_core/ingestion/parsers/import_analyzer.py +119 -0
- emdash_core/ingestion/parsers/python_parser.py +123 -0
- emdash_core/ingestion/parsers/registry.py +72 -0
- emdash_core/ingestion/parsers/ts_ast_parser.js +313 -0
- emdash_core/ingestion/parsers/typescript_parser.py +278 -0
- emdash_core/ingestion/repository.py +346 -0
- emdash_core/models/__init__.py +38 -0
- emdash_core/models/agent.py +68 -0
- emdash_core/models/index.py +77 -0
- emdash_core/models/query.py +113 -0
- emdash_core/planning/__init__.py +7 -0
- emdash_core/planning/agent_api.py +413 -0
- emdash_core/planning/context_builder.py +265 -0
- emdash_core/planning/feature_context.py +232 -0
- emdash_core/planning/feature_expander.py +646 -0
- emdash_core/planning/llm_explainer.py +198 -0
- emdash_core/planning/similarity.py +509 -0
- emdash_core/planning/team_focus.py +821 -0
- emdash_core/server.py +153 -0
- emdash_core/sse/__init__.py +5 -0
- emdash_core/sse/stream.py +196 -0
- emdash_core/swarm/__init__.py +17 -0
- emdash_core/swarm/merge_agent.py +383 -0
- emdash_core/swarm/session_manager.py +274 -0
- emdash_core/swarm/swarm_runner.py +226 -0
- emdash_core/swarm/task_definition.py +137 -0
- emdash_core/swarm/worker_spawner.py +319 -0
- emdash_core/swarm/worktree_manager.py +278 -0
- emdash_core/templates/__init__.py +10 -0
- emdash_core/templates/defaults/agent-builder.md.template +82 -0
- emdash_core/templates/defaults/focus.md.template +115 -0
- emdash_core/templates/defaults/pr-review-enhanced.md.template +309 -0
- emdash_core/templates/defaults/pr-review.md.template +80 -0
- emdash_core/templates/defaults/project.md.template +85 -0
- emdash_core/templates/defaults/research_critic.md.template +112 -0
- emdash_core/templates/defaults/research_planner.md.template +85 -0
- emdash_core/templates/defaults/research_synthesizer.md.template +128 -0
- emdash_core/templates/defaults/reviewer.md.template +81 -0
- emdash_core/templates/defaults/spec.md.template +41 -0
- emdash_core/templates/defaults/tasks.md.template +78 -0
- emdash_core/templates/loader.py +296 -0
- emdash_core/utils/__init__.py +45 -0
- emdash_core/utils/git.py +84 -0
- emdash_core/utils/image.py +502 -0
- emdash_core/utils/logger.py +51 -0
- emdash_core-0.1.7.dist-info/METADATA +35 -0
- emdash_core-0.1.7.dist-info/RECORD +187 -0
- emdash_core-0.1.7.dist-info/WHEEL +4 -0
- emdash_core-0.1.7.dist-info/entry_points.txt +3 -0
emdash_core/api/embed.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
"""Embedding endpoints."""
|
|
2
|
+
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from fastapi import APIRouter, HTTPException
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
router = APIRouter(prefix="/embed", tags=["embeddings"])
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class EmbedIndexRequest(BaseModel):
|
|
12
|
+
"""Request to index embeddings."""
|
|
13
|
+
repo_path: str = Field(..., description="Path to repository")
|
|
14
|
+
include_prs: bool = Field(default=True, description="Index PR embeddings")
|
|
15
|
+
include_functions: bool = Field(default=True, description="Index function embeddings")
|
|
16
|
+
include_classes: bool = Field(default=True, description="Index class embeddings")
|
|
17
|
+
reindex: bool = Field(default=False, description="Reindex all embeddings")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class EmbedStatus(BaseModel):
|
|
21
|
+
"""Embedding status."""
|
|
22
|
+
total_entities: int
|
|
23
|
+
embedded_entities: int
|
|
24
|
+
coverage_percent: float
|
|
25
|
+
pr_count: int
|
|
26
|
+
function_count: int
|
|
27
|
+
class_count: int
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class EmbedModel(BaseModel):
|
|
31
|
+
"""Embedding model info."""
|
|
32
|
+
name: str
|
|
33
|
+
dimension: int
|
|
34
|
+
description: str
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@router.get("/status", response_model=EmbedStatus)
|
|
38
|
+
async def get_embed_status():
|
|
39
|
+
"""Get embedding coverage statistics."""
|
|
40
|
+
try:
|
|
41
|
+
from ..embeddings.service import EmbeddingService
|
|
42
|
+
|
|
43
|
+
service = EmbeddingService()
|
|
44
|
+
stats = service.get_coverage_stats()
|
|
45
|
+
|
|
46
|
+
total = stats.get("total", 1)
|
|
47
|
+
embedded = stats.get("embedded", 0)
|
|
48
|
+
|
|
49
|
+
return EmbedStatus(
|
|
50
|
+
total_entities=total,
|
|
51
|
+
embedded_entities=embedded,
|
|
52
|
+
coverage_percent=(embedded / total * 100) if total > 0 else 0,
|
|
53
|
+
pr_count=stats.get("pr_count", 0),
|
|
54
|
+
function_count=stats.get("function_count", 0),
|
|
55
|
+
class_count=stats.get("class_count", 0),
|
|
56
|
+
)
|
|
57
|
+
except Exception as e:
|
|
58
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@router.post("/index")
|
|
62
|
+
async def index_embeddings(request: EmbedIndexRequest):
|
|
63
|
+
"""Generate embeddings for graph entities."""
|
|
64
|
+
from pathlib import Path
|
|
65
|
+
from ..graph.connection import configure_for_repo
|
|
66
|
+
from ..embeddings.indexer import EmbeddingIndexer
|
|
67
|
+
|
|
68
|
+
try:
|
|
69
|
+
# Configure database for the repo
|
|
70
|
+
repo_root = Path(request.repo_path).resolve()
|
|
71
|
+
configure_for_repo(repo_root)
|
|
72
|
+
|
|
73
|
+
indexer = EmbeddingIndexer()
|
|
74
|
+
total_indexed = 0
|
|
75
|
+
|
|
76
|
+
if request.include_prs:
|
|
77
|
+
total_indexed += indexer.index_pull_requests()
|
|
78
|
+
if request.include_functions:
|
|
79
|
+
total_indexed += indexer.index_functions(reindex=request.reindex)
|
|
80
|
+
if request.include_classes:
|
|
81
|
+
total_indexed += indexer.index_classes(reindex=request.reindex)
|
|
82
|
+
|
|
83
|
+
return {
|
|
84
|
+
"success": True,
|
|
85
|
+
"indexed": total_indexed,
|
|
86
|
+
}
|
|
87
|
+
except Exception as e:
|
|
88
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
@router.get("/models")
|
|
92
|
+
async def list_models():
|
|
93
|
+
"""List available embedding models."""
|
|
94
|
+
return {
|
|
95
|
+
"models": [
|
|
96
|
+
EmbedModel(
|
|
97
|
+
name="all-MiniLM-L6-v2",
|
|
98
|
+
dimension=384,
|
|
99
|
+
description="Fast, general purpose (default)",
|
|
100
|
+
),
|
|
101
|
+
EmbedModel(
|
|
102
|
+
name="all-mpnet-base-v2",
|
|
103
|
+
dimension=768,
|
|
104
|
+
description="Higher quality, slower",
|
|
105
|
+
),
|
|
106
|
+
EmbedModel(
|
|
107
|
+
name="paraphrase-multilingual-MiniLM-L12-v2",
|
|
108
|
+
dimension=384,
|
|
109
|
+
description="Multilingual support",
|
|
110
|
+
),
|
|
111
|
+
]
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
@router.post("/test")
|
|
116
|
+
async def test_embedding(text: str, model: Optional[str] = None):
|
|
117
|
+
"""Test embedding generation with sample text."""
|
|
118
|
+
try:
|
|
119
|
+
from ..embeddings.service import EmbeddingService
|
|
120
|
+
|
|
121
|
+
service = EmbeddingService(model_name=model)
|
|
122
|
+
embedding = service.embed(text)
|
|
123
|
+
|
|
124
|
+
return {
|
|
125
|
+
"text": text[:100] + "..." if len(text) > 100 else text,
|
|
126
|
+
"dimension": len(embedding),
|
|
127
|
+
"model": model or "default",
|
|
128
|
+
"preview": embedding[:5], # First 5 dimensions
|
|
129
|
+
}
|
|
130
|
+
except Exception as e:
|
|
131
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
"""Feature analysis endpoints."""
|
|
2
|
+
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from fastapi import APIRouter, HTTPException
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
router = APIRouter(prefix="/feature", tags=["feature"])
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class FeatureContextRequest(BaseModel):
|
|
12
|
+
"""Request for feature context."""
|
|
13
|
+
query: str = Field(..., description="Feature query")
|
|
14
|
+
hops: int = Field(default=2, description="Graph traversal hops")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class FeatureEntity(BaseModel):
|
|
18
|
+
"""An entity related to a feature."""
|
|
19
|
+
qualified_name: str
|
|
20
|
+
name: str
|
|
21
|
+
entity_type: str
|
|
22
|
+
file_path: str
|
|
23
|
+
relationship: str # calls, called_by, imports, etc.
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class FeatureContextResponse(BaseModel):
|
|
27
|
+
"""Feature context response."""
|
|
28
|
+
query: str
|
|
29
|
+
root_entities: list[FeatureEntity]
|
|
30
|
+
related_entities: list[FeatureEntity]
|
|
31
|
+
files: list[str]
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class FeatureExplainRequest(BaseModel):
|
|
35
|
+
"""Request for LLM feature explanation."""
|
|
36
|
+
query: str = Field(..., description="Feature query")
|
|
37
|
+
hops: int = Field(default=2, description="Graph traversal hops")
|
|
38
|
+
style: str = Field(default="technical", description="Style: technical, simple, detailed")
|
|
39
|
+
model: Optional[str] = Field(default=None, description="LLM model")
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@router.post("/context", response_model=FeatureContextResponse)
|
|
43
|
+
async def get_feature_context(request: FeatureContextRequest):
|
|
44
|
+
"""Find a feature and expand its AST graph.
|
|
45
|
+
|
|
46
|
+
Searches for code matching the query and expands the graph
|
|
47
|
+
to find related entities.
|
|
48
|
+
"""
|
|
49
|
+
try:
|
|
50
|
+
import sys
|
|
51
|
+
from pathlib import Path
|
|
52
|
+
|
|
53
|
+
from ..agent.toolkit import AgentToolkit
|
|
54
|
+
|
|
55
|
+
toolkit = AgentToolkit()
|
|
56
|
+
|
|
57
|
+
# Search for matching entities
|
|
58
|
+
search_result = toolkit.search(query=request.query, limit=5)
|
|
59
|
+
|
|
60
|
+
if not search_result.success:
|
|
61
|
+
raise HTTPException(status_code=500, detail=search_result.error)
|
|
62
|
+
|
|
63
|
+
root_entities = []
|
|
64
|
+
related_entities = []
|
|
65
|
+
files = set()
|
|
66
|
+
|
|
67
|
+
for r in search_result.data.get("results", []):
|
|
68
|
+
entity = FeatureEntity(
|
|
69
|
+
qualified_name=r.get("qualified_name", ""),
|
|
70
|
+
name=r.get("name", ""),
|
|
71
|
+
entity_type=r.get("type", ""),
|
|
72
|
+
file_path=r.get("file_path", ""),
|
|
73
|
+
relationship="root",
|
|
74
|
+
)
|
|
75
|
+
root_entities.append(entity)
|
|
76
|
+
files.add(r.get("file_path", ""))
|
|
77
|
+
|
|
78
|
+
# Expand each entity
|
|
79
|
+
if request.hops > 0:
|
|
80
|
+
expand_result = toolkit.expand(
|
|
81
|
+
entity_type=r.get("type", "Function"),
|
|
82
|
+
qualified_name=r.get("qualified_name", ""),
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
if expand_result.success:
|
|
86
|
+
for rel_type in ["callers", "callees", "dependencies"]:
|
|
87
|
+
for related in expand_result.data.get(rel_type, []):
|
|
88
|
+
related_entities.append(FeatureEntity(
|
|
89
|
+
qualified_name=related.get("qualified_name", ""),
|
|
90
|
+
name=related.get("name", ""),
|
|
91
|
+
entity_type=related.get("type", ""),
|
|
92
|
+
file_path=related.get("file_path", ""),
|
|
93
|
+
relationship=rel_type,
|
|
94
|
+
))
|
|
95
|
+
files.add(related.get("file_path", ""))
|
|
96
|
+
|
|
97
|
+
return FeatureContextResponse(
|
|
98
|
+
query=request.query,
|
|
99
|
+
root_entities=root_entities,
|
|
100
|
+
related_entities=related_entities[:20], # Limit
|
|
101
|
+
files=list(files)[:20],
|
|
102
|
+
)
|
|
103
|
+
except HTTPException:
|
|
104
|
+
raise
|
|
105
|
+
except Exception as e:
|
|
106
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@router.post("/explain")
|
|
110
|
+
async def explain_feature(request: FeatureExplainRequest):
|
|
111
|
+
"""Explain a feature using LLM based on AST graph."""
|
|
112
|
+
try:
|
|
113
|
+
import sys
|
|
114
|
+
from pathlib import Path
|
|
115
|
+
|
|
116
|
+
from ..planning.llm_explainer import LLMExplainer
|
|
117
|
+
|
|
118
|
+
explainer = LLMExplainer(model=request.model)
|
|
119
|
+
|
|
120
|
+
# Get context first
|
|
121
|
+
context_request = FeatureContextRequest(
|
|
122
|
+
query=request.query,
|
|
123
|
+
hops=request.hops,
|
|
124
|
+
)
|
|
125
|
+
context = await get_feature_context(context_request)
|
|
126
|
+
|
|
127
|
+
# Generate explanation
|
|
128
|
+
explanation = explainer.explain(
|
|
129
|
+
query=request.query,
|
|
130
|
+
context=context.model_dump(),
|
|
131
|
+
style=request.style,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
return {
|
|
135
|
+
"query": request.query,
|
|
136
|
+
"explanation": explanation,
|
|
137
|
+
"entities_analyzed": len(context.root_entities) + len(context.related_entities),
|
|
138
|
+
"files_analyzed": len(context.files),
|
|
139
|
+
}
|
|
140
|
+
except HTTPException:
|
|
141
|
+
raise
|
|
142
|
+
except Exception as e:
|
|
143
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"""Health check endpoint."""
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter
|
|
8
|
+
from pydantic import BaseModel
|
|
9
|
+
|
|
10
|
+
from ..config import get_config
|
|
11
|
+
|
|
12
|
+
router = APIRouter(prefix="/health", tags=["health"])
|
|
13
|
+
|
|
14
|
+
# Server start time for uptime calculation
|
|
15
|
+
_start_time: float = time.time()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class DatabaseStatus(BaseModel):
|
|
19
|
+
"""Database connection status."""
|
|
20
|
+
|
|
21
|
+
connected: bool
|
|
22
|
+
node_count: Optional[int] = None
|
|
23
|
+
relationship_count: Optional[int] = None
|
|
24
|
+
error: Optional[str] = None
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class HealthResponse(BaseModel):
|
|
28
|
+
"""Health check response."""
|
|
29
|
+
|
|
30
|
+
status: str # "healthy", "starting", "unhealthy"
|
|
31
|
+
version: str
|
|
32
|
+
uptime_seconds: float
|
|
33
|
+
repo_root: Optional[str]
|
|
34
|
+
database: DatabaseStatus
|
|
35
|
+
timestamp: datetime
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _check_database() -> DatabaseStatus:
|
|
39
|
+
"""Check database connection status."""
|
|
40
|
+
config = get_config()
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
# Try to import and connect to database
|
|
44
|
+
# This will be replaced with actual database check once services are moved
|
|
45
|
+
db_path = config.database_full_path
|
|
46
|
+
if db_path.exists():
|
|
47
|
+
return DatabaseStatus(
|
|
48
|
+
connected=True,
|
|
49
|
+
node_count=0, # TODO: Get actual counts
|
|
50
|
+
relationship_count=0,
|
|
51
|
+
)
|
|
52
|
+
else:
|
|
53
|
+
return DatabaseStatus(
|
|
54
|
+
connected=False,
|
|
55
|
+
error="Database not initialized"
|
|
56
|
+
)
|
|
57
|
+
except Exception as e:
|
|
58
|
+
return DatabaseStatus(
|
|
59
|
+
connected=False,
|
|
60
|
+
error=str(e)
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@router.get("", response_model=HealthResponse)
|
|
65
|
+
async def health_check() -> HealthResponse:
|
|
66
|
+
"""Check server health status."""
|
|
67
|
+
from .. import __version__
|
|
68
|
+
|
|
69
|
+
config = get_config()
|
|
70
|
+
db_status = _check_database()
|
|
71
|
+
|
|
72
|
+
status = "healthy" if db_status.connected else "starting"
|
|
73
|
+
|
|
74
|
+
return HealthResponse(
|
|
75
|
+
status=status,
|
|
76
|
+
version=__version__,
|
|
77
|
+
uptime_seconds=time.time() - _start_time,
|
|
78
|
+
repo_root=config.repo_root,
|
|
79
|
+
database=db_status,
|
|
80
|
+
timestamp=datetime.now(),
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
@router.get("/ready")
|
|
85
|
+
async def readiness_check() -> dict:
|
|
86
|
+
"""Simple readiness probe for container orchestration."""
|
|
87
|
+
return {"ready": True}
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@router.get("/live")
|
|
91
|
+
async def liveness_check() -> dict:
|
|
92
|
+
"""Simple liveness probe for container orchestration."""
|
|
93
|
+
return {"alive": True}
|
emdash_core/api/index.py
ADDED
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
"""Indexing endpoints with SSE streaming."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter
|
|
8
|
+
from fastapi.responses import StreamingResponse
|
|
9
|
+
from pydantic import BaseModel, Field
|
|
10
|
+
|
|
11
|
+
from ..sse.stream import SSEHandler, EventType
|
|
12
|
+
|
|
13
|
+
router = APIRouter(prefix="/index", tags=["indexing"])
|
|
14
|
+
|
|
15
|
+
_executor = ThreadPoolExecutor(max_workers=2)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class IndexOptions(BaseModel):
|
|
19
|
+
"""Options for indexing operation."""
|
|
20
|
+
changed_only: bool = Field(default=False, description="Only index changed files")
|
|
21
|
+
index_git: bool = Field(default=False, description="Index git history")
|
|
22
|
+
index_github: int = Field(default=0, description="Number of GitHub PRs to index")
|
|
23
|
+
detect_communities: bool = Field(default=True, description="Run community detection")
|
|
24
|
+
describe_communities: bool = Field(default=False, description="Use LLM to describe communities")
|
|
25
|
+
community_limit: int = Field(default=20, description="Max communities to describe")
|
|
26
|
+
model: Optional[str] = Field(default=None, description="Model for descriptions")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class IndexRequest(BaseModel):
|
|
30
|
+
"""Request to start indexing."""
|
|
31
|
+
repo_path: str = Field(..., description="Path to repository")
|
|
32
|
+
options: IndexOptions = Field(default_factory=IndexOptions)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class IndexStatus(BaseModel):
|
|
36
|
+
"""Current indexing status."""
|
|
37
|
+
is_indexed: bool
|
|
38
|
+
last_indexed: Optional[str]
|
|
39
|
+
last_commit: Optional[str]
|
|
40
|
+
file_count: int
|
|
41
|
+
function_count: int
|
|
42
|
+
class_count: int
|
|
43
|
+
community_count: int
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _run_index_sync(
|
|
47
|
+
repo_path: str,
|
|
48
|
+
options: IndexOptions,
|
|
49
|
+
sse_handler: SSEHandler,
|
|
50
|
+
):
|
|
51
|
+
"""Run indexing synchronously in thread pool."""
|
|
52
|
+
from pathlib import Path
|
|
53
|
+
|
|
54
|
+
try:
|
|
55
|
+
from ..graph.connection import configure_for_repo
|
|
56
|
+
from ..ingestion.orchestrator import IngestionOrchestrator
|
|
57
|
+
|
|
58
|
+
sse_handler.emit(EventType.PROGRESS, {"step": "Starting indexing", "percent": 0})
|
|
59
|
+
|
|
60
|
+
# Configure database for target repo
|
|
61
|
+
repo_root = Path(repo_path).resolve()
|
|
62
|
+
configure_for_repo(repo_root)
|
|
63
|
+
|
|
64
|
+
# Create orchestrator (uses configured connection)
|
|
65
|
+
orchestrator = IngestionOrchestrator()
|
|
66
|
+
|
|
67
|
+
sse_handler.emit(EventType.PROGRESS, {"step": "Parsing codebase", "percent": 10})
|
|
68
|
+
|
|
69
|
+
# Progress callback to emit SSE events during parsing
|
|
70
|
+
def progress_callback(step: str, percent: float):
|
|
71
|
+
sse_handler.emit(EventType.PROGRESS, {"step": step, "percent": percent})
|
|
72
|
+
|
|
73
|
+
# Run indexing with progress callback
|
|
74
|
+
result = orchestrator.index(
|
|
75
|
+
repo_path=repo_path,
|
|
76
|
+
changed_only=options.changed_only,
|
|
77
|
+
skip_git=not options.index_git,
|
|
78
|
+
pr_limit=options.index_github,
|
|
79
|
+
progress_callback=progress_callback,
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
sse_handler.emit(EventType.PROGRESS, {"step": "Building graph", "percent": 75})
|
|
83
|
+
|
|
84
|
+
if options.detect_communities:
|
|
85
|
+
sse_handler.emit(EventType.PROGRESS, {"step": "Detecting communities", "percent": 85})
|
|
86
|
+
|
|
87
|
+
sse_handler.emit(EventType.PROGRESS, {"step": "Indexing complete", "percent": 100})
|
|
88
|
+
|
|
89
|
+
sse_handler.emit(EventType.RESPONSE, {
|
|
90
|
+
"success": True,
|
|
91
|
+
"stats": result if isinstance(result, dict) else {},
|
|
92
|
+
})
|
|
93
|
+
|
|
94
|
+
except Exception as e:
|
|
95
|
+
sse_handler.emit(EventType.ERROR, {
|
|
96
|
+
"message": str(e),
|
|
97
|
+
})
|
|
98
|
+
finally:
|
|
99
|
+
sse_handler.close()
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
@router.post("/start")
|
|
103
|
+
async def index_start(request: IndexRequest):
|
|
104
|
+
"""Start indexing a repository with SSE streaming progress.
|
|
105
|
+
|
|
106
|
+
Returns a Server-Sent Events stream with progress updates.
|
|
107
|
+
"""
|
|
108
|
+
sse_handler = SSEHandler(agent_name="Indexer")
|
|
109
|
+
|
|
110
|
+
async def run_indexing():
|
|
111
|
+
loop = asyncio.get_event_loop()
|
|
112
|
+
await loop.run_in_executor(
|
|
113
|
+
_executor,
|
|
114
|
+
_run_index_sync,
|
|
115
|
+
request.repo_path,
|
|
116
|
+
request.options,
|
|
117
|
+
sse_handler,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
asyncio.create_task(run_indexing())
|
|
121
|
+
|
|
122
|
+
return StreamingResponse(
|
|
123
|
+
sse_handler,
|
|
124
|
+
media_type="text/event-stream",
|
|
125
|
+
headers={
|
|
126
|
+
"Cache-Control": "no-cache",
|
|
127
|
+
"Connection": "keep-alive",
|
|
128
|
+
},
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
@router.get("/status", response_model=IndexStatus)
|
|
133
|
+
async def index_status(repo_path: str):
|
|
134
|
+
"""Get current indexing status for a repository."""
|
|
135
|
+
try:
|
|
136
|
+
from pathlib import Path
|
|
137
|
+
from ..graph.connection import configure_for_repo
|
|
138
|
+
|
|
139
|
+
# Configure database for the target repo
|
|
140
|
+
repo_root = Path(repo_path).resolve()
|
|
141
|
+
conn = configure_for_repo(repo_root)
|
|
142
|
+
info = conn.get_database_info()
|
|
143
|
+
|
|
144
|
+
return IndexStatus(
|
|
145
|
+
is_indexed=info.get("node_count", 0) > 0,
|
|
146
|
+
last_indexed=None, # TODO: Track this
|
|
147
|
+
last_commit=None,
|
|
148
|
+
file_count=info.get("file_count", 0),
|
|
149
|
+
function_count=info.get("function_count", 0),
|
|
150
|
+
class_count=info.get("class_count", 0),
|
|
151
|
+
community_count=info.get("community_count", 0),
|
|
152
|
+
)
|
|
153
|
+
except Exception:
|
|
154
|
+
return IndexStatus(
|
|
155
|
+
is_indexed=False,
|
|
156
|
+
last_indexed=None,
|
|
157
|
+
last_commit=None,
|
|
158
|
+
file_count=0,
|
|
159
|
+
function_count=0,
|
|
160
|
+
class_count=0,
|
|
161
|
+
community_count=0,
|
|
162
|
+
)
|
emdash_core/api/plan.py
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"""Planning endpoints."""
|
|
2
|
+
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from fastapi import APIRouter, HTTPException
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
router = APIRouter(prefix="/plan", tags=["planning"])
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class PlanContextRequest(BaseModel):
|
|
12
|
+
"""Request for planning context."""
|
|
13
|
+
repo_path: str = Field(..., description="Path to repository")
|
|
14
|
+
description: str = Field(..., description="Feature description")
|
|
15
|
+
similar_prs: int = Field(default=5, description="Number of similar PRs to find")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class SimilarPR(BaseModel):
|
|
19
|
+
"""A similar PR."""
|
|
20
|
+
number: int
|
|
21
|
+
title: str
|
|
22
|
+
score: float
|
|
23
|
+
files: list[str] = Field(default_factory=list)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class PlanContextResponse(BaseModel):
|
|
27
|
+
"""Planning context response."""
|
|
28
|
+
description: str
|
|
29
|
+
similar_prs: list[SimilarPR]
|
|
30
|
+
relevant_files: list[str]
|
|
31
|
+
relevant_functions: list[str]
|
|
32
|
+
suggested_approach: Optional[str] = None
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _get_toolkit():
|
|
36
|
+
"""Get agent toolkit."""
|
|
37
|
+
from ..agent.toolkit import AgentToolkit
|
|
38
|
+
return AgentToolkit()
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@router.post("/context", response_model=PlanContextResponse)
|
|
42
|
+
async def get_plan_context(request: PlanContextRequest):
|
|
43
|
+
"""Get planning context for a feature.
|
|
44
|
+
|
|
45
|
+
Returns similar PRs, relevant files, and suggested approach.
|
|
46
|
+
"""
|
|
47
|
+
from pathlib import Path
|
|
48
|
+
from ..graph.connection import configure_for_repo
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
# Configure database for the repo
|
|
52
|
+
repo_root = Path(request.repo_path).resolve()
|
|
53
|
+
configure_for_repo(repo_root)
|
|
54
|
+
|
|
55
|
+
toolkit = _get_toolkit()
|
|
56
|
+
|
|
57
|
+
# Search for relevant code
|
|
58
|
+
search_result = toolkit.search(query=request.description, limit=10)
|
|
59
|
+
|
|
60
|
+
relevant_files = []
|
|
61
|
+
relevant_functions = []
|
|
62
|
+
if search_result.success:
|
|
63
|
+
for r in search_result.data.get("results", []):
|
|
64
|
+
if r.get("type") == "File":
|
|
65
|
+
relevant_files.append(r.get("file_path", ""))
|
|
66
|
+
else:
|
|
67
|
+
relevant_functions.append(r.get("qualified_name", ""))
|
|
68
|
+
|
|
69
|
+
# TODO: Find similar PRs using embedding search
|
|
70
|
+
similar_prs = []
|
|
71
|
+
|
|
72
|
+
return PlanContextResponse(
|
|
73
|
+
description=request.description,
|
|
74
|
+
similar_prs=similar_prs,
|
|
75
|
+
relevant_files=relevant_files[:10],
|
|
76
|
+
relevant_functions=relevant_functions[:10],
|
|
77
|
+
)
|
|
78
|
+
except Exception as e:
|
|
79
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@router.get("/similar")
|
|
83
|
+
async def find_similar_prs(repo_path: str, description: str, limit: int = 5):
|
|
84
|
+
"""Find PRs similar to a feature description."""
|
|
85
|
+
from pathlib import Path
|
|
86
|
+
from ..graph.connection import configure_for_repo
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
# Configure database for the repo
|
|
90
|
+
repo_root = Path(repo_path).resolve()
|
|
91
|
+
configure_for_repo(repo_root)
|
|
92
|
+
|
|
93
|
+
from ..planning.similarity import SimilaritySearch
|
|
94
|
+
|
|
95
|
+
search = SimilaritySearch()
|
|
96
|
+
results = search.find_similar_prs(description, limit=limit)
|
|
97
|
+
|
|
98
|
+
return {
|
|
99
|
+
"similar_prs": [
|
|
100
|
+
SimilarPR(
|
|
101
|
+
number=pr.get("number", 0),
|
|
102
|
+
title=pr.get("title", ""),
|
|
103
|
+
score=pr.get("score", 0.0),
|
|
104
|
+
files=pr.get("files", []),
|
|
105
|
+
)
|
|
106
|
+
for pr in results
|
|
107
|
+
]
|
|
108
|
+
}
|
|
109
|
+
except Exception as e:
|
|
110
|
+
raise HTTPException(status_code=500, detail=str(e))
|