sari 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- app/__init__.py +1 -0
- app/config.py +240 -0
- app/db.py +932 -0
- app/dedup_queue.py +77 -0
- app/engine_registry.py +56 -0
- app/engine_runtime.py +472 -0
- app/http_server.py +204 -0
- app/indexer.py +1532 -0
- app/main.py +147 -0
- app/models.py +39 -0
- app/queue_pipeline.py +65 -0
- app/ranking.py +144 -0
- app/registry.py +172 -0
- app/search_engine.py +572 -0
- app/watcher.py +124 -0
- app/workspace.py +286 -0
- deckard/__init__.py +3 -0
- deckard/__main__.py +4 -0
- deckard/main.py +345 -0
- deckard/version.py +1 -0
- mcp/__init__.py +1 -0
- mcp/__main__.py +19 -0
- mcp/cli.py +485 -0
- mcp/daemon.py +149 -0
- mcp/proxy.py +304 -0
- mcp/registry.py +218 -0
- mcp/server.py +519 -0
- mcp/session.py +234 -0
- mcp/telemetry.py +112 -0
- mcp/test_cli.py +89 -0
- mcp/test_daemon.py +124 -0
- mcp/test_server.py +197 -0
- mcp/tools/__init__.py +14 -0
- mcp/tools/_util.py +244 -0
- mcp/tools/deckard_guide.py +32 -0
- mcp/tools/doctor.py +208 -0
- mcp/tools/get_callers.py +60 -0
- mcp/tools/get_implementations.py +60 -0
- mcp/tools/index_file.py +75 -0
- mcp/tools/list_files.py +138 -0
- mcp/tools/read_file.py +48 -0
- mcp/tools/read_symbol.py +99 -0
- mcp/tools/registry.py +212 -0
- mcp/tools/repo_candidates.py +89 -0
- mcp/tools/rescan.py +46 -0
- mcp/tools/scan_once.py +54 -0
- mcp/tools/search.py +208 -0
- mcp/tools/search_api_endpoints.py +72 -0
- mcp/tools/search_symbols.py +63 -0
- mcp/tools/status.py +135 -0
- sari/__init__.py +1 -0
- sari/__main__.py +4 -0
- sari-0.0.1.dist-info/METADATA +521 -0
- sari-0.0.1.dist-info/RECORD +58 -0
- sari-0.0.1.dist-info/WHEEL +5 -0
- sari-0.0.1.dist-info/entry_points.txt +2 -0
- sari-0.0.1.dist-info/licenses/LICENSE +21 -0
- sari-0.0.1.dist-info/top_level.txt +4 -0
mcp/test_server.py
ADDED
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Unit tests for Local Search MCP Server
|
|
4
|
+
|
|
5
|
+
Usage:
|
|
6
|
+
python3 -m pytest .codex/tools/sari/mcp/test_server.py -v
|
|
7
|
+
# or
|
|
8
|
+
python3 .codex/tools/sari/mcp/test_server.py
|
|
9
|
+
"""
|
|
10
|
+
import json
|
|
11
|
+
import os
|
|
12
|
+
import sys
|
|
13
|
+
import tempfile
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from unittest.mock import MagicMock, patch
|
|
16
|
+
|
|
17
|
+
# Add paths for imports
|
|
18
|
+
SCRIPT_DIR = Path(__file__).parent
|
|
19
|
+
sys.path.insert(0, str(SCRIPT_DIR))
|
|
20
|
+
sys.path.insert(0, str(SCRIPT_DIR.parent / "app"))
|
|
21
|
+
|
|
22
|
+
from server import LocalSearchMCPServer
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def test_initialize():
|
|
26
|
+
"""Test MCP initialize response."""
|
|
27
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
28
|
+
server = LocalSearchMCPServer(tmpdir)
|
|
29
|
+
|
|
30
|
+
result = server.handle_initialize({})
|
|
31
|
+
|
|
32
|
+
assert result["protocolVersion"] == "2025-11-25"
|
|
33
|
+
assert result["serverInfo"]["name"] == "sari"
|
|
34
|
+
assert result["serverInfo"]["version"]
|
|
35
|
+
assert "tools" in result["capabilities"]
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def test_tools_list():
|
|
39
|
+
"""Test tools/list response."""
|
|
40
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
41
|
+
server = LocalSearchMCPServer(tmpdir)
|
|
42
|
+
|
|
43
|
+
result = server.handle_tools_list({})
|
|
44
|
+
|
|
45
|
+
tools = result["tools"]
|
|
46
|
+
tool_names = [t["name"] for t in tools]
|
|
47
|
+
|
|
48
|
+
assert "search" in tool_names
|
|
49
|
+
assert "status" in tool_names
|
|
50
|
+
assert "repo_candidates" in tool_names
|
|
51
|
+
|
|
52
|
+
# Check search tool schema
|
|
53
|
+
search_tool = next(t for t in tools if t["name"] == "search")
|
|
54
|
+
assert "query" in search_tool["inputSchema"]["properties"]
|
|
55
|
+
assert "query" in search_tool["inputSchema"]["required"]
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def test_handle_request_initialize():
|
|
59
|
+
"""Test full request handling for initialize."""
|
|
60
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
61
|
+
server = LocalSearchMCPServer(tmpdir)
|
|
62
|
+
|
|
63
|
+
request = {
|
|
64
|
+
"jsonrpc": "2.0",
|
|
65
|
+
"id": 1,
|
|
66
|
+
"method": "initialize",
|
|
67
|
+
"params": {},
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
response = server.handle_request(request)
|
|
71
|
+
|
|
72
|
+
assert response["jsonrpc"] == "2.0"
|
|
73
|
+
assert response["id"] == 1
|
|
74
|
+
assert "result" in response
|
|
75
|
+
assert response["result"]["protocolVersion"] == "2025-11-25"
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def test_handle_request_tools_list():
|
|
79
|
+
"""Test full request handling for tools/list."""
|
|
80
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
81
|
+
server = LocalSearchMCPServer(tmpdir)
|
|
82
|
+
|
|
83
|
+
request = {
|
|
84
|
+
"jsonrpc": "2.0",
|
|
85
|
+
"id": 2,
|
|
86
|
+
"method": "tools/list",
|
|
87
|
+
"params": {},
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
response = server.handle_request(request)
|
|
91
|
+
|
|
92
|
+
assert response["jsonrpc"] == "2.0"
|
|
93
|
+
assert response["id"] == 2
|
|
94
|
+
assert "result" in response
|
|
95
|
+
assert "tools" in response["result"]
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def test_handle_request_unknown_method():
|
|
99
|
+
"""Test error handling for unknown method."""
|
|
100
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
101
|
+
server = LocalSearchMCPServer(tmpdir)
|
|
102
|
+
|
|
103
|
+
request = {
|
|
104
|
+
"jsonrpc": "2.0",
|
|
105
|
+
"id": 3,
|
|
106
|
+
"method": "unknown/method",
|
|
107
|
+
"params": {},
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
response = server.handle_request(request)
|
|
111
|
+
|
|
112
|
+
assert response["jsonrpc"] == "2.0"
|
|
113
|
+
assert response["id"] == 3
|
|
114
|
+
assert "error" in response
|
|
115
|
+
assert response["error"]["code"] == -32601
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def test_handle_notification_no_response():
|
|
119
|
+
"""Test that notifications (no id) don't return a response."""
|
|
120
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
121
|
+
server = LocalSearchMCPServer(tmpdir)
|
|
122
|
+
|
|
123
|
+
# Notification has no "id" field
|
|
124
|
+
request = {
|
|
125
|
+
"jsonrpc": "2.0",
|
|
126
|
+
"method": "initialized",
|
|
127
|
+
"params": {},
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
response = server.handle_request(request)
|
|
131
|
+
|
|
132
|
+
assert response is None
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def test_tool_status():
|
|
136
|
+
"""Test status tool execution."""
|
|
137
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
138
|
+
server = LocalSearchMCPServer(tmpdir)
|
|
139
|
+
server._ensure_initialized()
|
|
140
|
+
|
|
141
|
+
result = server._tool_status({})
|
|
142
|
+
|
|
143
|
+
assert "content" in result
|
|
144
|
+
assert len(result["content"]) > 0
|
|
145
|
+
assert result["content"][0]["type"] == "text"
|
|
146
|
+
|
|
147
|
+
status = json.loads(result["content"][0]["text"])
|
|
148
|
+
assert "index_ready" in status
|
|
149
|
+
assert "workspace_root" in status
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def test_tool_search_empty_query():
|
|
153
|
+
"""Test search tool with empty query returns error."""
|
|
154
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
155
|
+
server = LocalSearchMCPServer(tmpdir)
|
|
156
|
+
server._ensure_initialized()
|
|
157
|
+
|
|
158
|
+
result = server._tool_search({"query": ""})
|
|
159
|
+
|
|
160
|
+
assert result.get("isError") is True
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def run_tests():
|
|
164
|
+
"""Run all tests without pytest."""
|
|
165
|
+
import traceback
|
|
166
|
+
|
|
167
|
+
tests = [
|
|
168
|
+
test_initialize,
|
|
169
|
+
test_tools_list,
|
|
170
|
+
test_handle_request_initialize,
|
|
171
|
+
test_handle_request_tools_list,
|
|
172
|
+
test_handle_request_unknown_method,
|
|
173
|
+
test_handle_notification_no_response,
|
|
174
|
+
test_tool_status,
|
|
175
|
+
test_tool_search_empty_query,
|
|
176
|
+
]
|
|
177
|
+
|
|
178
|
+
passed = 0
|
|
179
|
+
failed = 0
|
|
180
|
+
|
|
181
|
+
for test in tests:
|
|
182
|
+
try:
|
|
183
|
+
test()
|
|
184
|
+
print(f"✓ {test.__name__}")
|
|
185
|
+
passed += 1
|
|
186
|
+
except Exception as e:
|
|
187
|
+
print(f"✗ {test.__name__}")
|
|
188
|
+
traceback.print_exc()
|
|
189
|
+
failed += 1
|
|
190
|
+
|
|
191
|
+
print(f"\n{passed} passed, {failed} failed")
|
|
192
|
+
return failed == 0
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
if __name__ == "__main__":
|
|
196
|
+
success = run_tests()
|
|
197
|
+
sys.exit(0 if success else 1)
|
mcp/tools/__init__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MCP Tools for Local Search.
|
|
3
|
+
"""
|
|
4
|
+
from .search import execute_search
|
|
5
|
+
from .status import execute_status
|
|
6
|
+
from .repo_candidates import execute_repo_candidates
|
|
7
|
+
from .list_files import execute_list_files
|
|
8
|
+
|
|
9
|
+
__all__ = [
|
|
10
|
+
"execute_search",
|
|
11
|
+
"execute_status",
|
|
12
|
+
"execute_repo_candidates",
|
|
13
|
+
"execute_list_files",
|
|
14
|
+
]
|
mcp/tools/_util.py
ADDED
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import urllib.parse
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from typing import Any, Dict, Optional, List, Callable
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
from app.workspace import WorkspaceManager
|
|
10
|
+
except Exception:
|
|
11
|
+
WorkspaceManager = None
|
|
12
|
+
|
|
13
|
+
# --- Constants & Enums ---
|
|
14
|
+
|
|
15
|
+
class ErrorCode(str, Enum):
|
|
16
|
+
INVALID_ARGS = "INVALID_ARGS"
|
|
17
|
+
NOT_INDEXED = "NOT_INDEXED"
|
|
18
|
+
REPO_NOT_FOUND = "REPO_NOT_FOUND"
|
|
19
|
+
IO_ERROR = "IO_ERROR"
|
|
20
|
+
DB_ERROR = "DB_ERROR"
|
|
21
|
+
INTERNAL = "INTERNAL"
|
|
22
|
+
ERR_INDEXER_FOLLOWER = "ERR_INDEXER_FOLLOWER"
|
|
23
|
+
ERR_INDEXER_DISABLED = "ERR_INDEXER_DISABLED"
|
|
24
|
+
ERR_ROOT_OUT_OF_SCOPE = "ERR_ROOT_OUT_OF_SCOPE"
|
|
25
|
+
ERR_MCP_HTTP_UNSUPPORTED = "ERR_MCP_HTTP_UNSUPPORTED"
|
|
26
|
+
ERR_ENGINE_NOT_INSTALLED = "ERR_ENGINE_NOT_INSTALLED"
|
|
27
|
+
ERR_ENGINE_INIT = "ERR_ENGINE_INIT"
|
|
28
|
+
ERR_ENGINE_QUERY = "ERR_ENGINE_QUERY"
|
|
29
|
+
ERR_ENGINE_INDEX = "ERR_ENGINE_INDEX"
|
|
30
|
+
ERR_ENGINE_UNAVAILABLE = "ERR_ENGINE_UNAVAILABLE"
|
|
31
|
+
ERR_ENGINE_REBUILD = "ERR_ENGINE_REBUILD"
|
|
32
|
+
|
|
33
|
+
# --- Format Selection ---
|
|
34
|
+
|
|
35
|
+
def _get_format() -> str:
|
|
36
|
+
"""
|
|
37
|
+
Returns 'pack' or 'json'.
|
|
38
|
+
Default is 'pack'.
|
|
39
|
+
'json' is used if DECKARD_FORMAT=json.
|
|
40
|
+
"""
|
|
41
|
+
fmt = os.environ.get("DECKARD_FORMAT", "pack").strip().lower()
|
|
42
|
+
return "json" if fmt == "json" else "pack"
|
|
43
|
+
|
|
44
|
+
def _compact_enabled() -> bool:
|
|
45
|
+
"""Legacy compact check for JSON mode."""
|
|
46
|
+
val = (os.environ.get("DECKARD_RESPONSE_COMPACT") or "1").strip().lower()
|
|
47
|
+
return val not in {"0", "false", "no", "off"}
|
|
48
|
+
|
|
49
|
+
# --- PACK1 Encoders ---
|
|
50
|
+
|
|
51
|
+
def pack_encode_text(s: Any) -> str:
|
|
52
|
+
"""
|
|
53
|
+
ENC_TEXT: safe=""
|
|
54
|
+
Used for snippet, msg, reason, detail, hint.
|
|
55
|
+
"""
|
|
56
|
+
return urllib.parse.quote(str(s), safe="")
|
|
57
|
+
|
|
58
|
+
def pack_encode_id(s: Any) -> str:
|
|
59
|
+
"""
|
|
60
|
+
ENC_ID: safe="/._-:@"
|
|
61
|
+
Used for path, repo, name (identifiers).
|
|
62
|
+
"""
|
|
63
|
+
return urllib.parse.quote(str(s), safe="/._-:@")
|
|
64
|
+
|
|
65
|
+
# --- PACK1 Builders ---
|
|
66
|
+
|
|
67
|
+
def pack_header(tool: str, kv: Dict[str, Any], returned: Optional[int] = None,
|
|
68
|
+
total: Optional[int] = None, total_mode: Optional[str] = None) -> str:
|
|
69
|
+
"""
|
|
70
|
+
Builds the PACK1 header line.
|
|
71
|
+
PACK1 tool=<tool> ok=true k=v ... [returned=<N>] [total=<M>] [total_mode=<mode>]
|
|
72
|
+
"""
|
|
73
|
+
parts = ["PACK1", f"tool={tool}", "ok=true"]
|
|
74
|
+
|
|
75
|
+
# Add custom KV pairs
|
|
76
|
+
for k, v in kv.items():
|
|
77
|
+
parts.append(f"{k}={v}")
|
|
78
|
+
|
|
79
|
+
if returned is not None:
|
|
80
|
+
parts.append(f"returned={returned}")
|
|
81
|
+
|
|
82
|
+
if total_mode:
|
|
83
|
+
parts.append(f"total_mode={total_mode}")
|
|
84
|
+
|
|
85
|
+
if total is not None and total_mode != "none":
|
|
86
|
+
parts.append(f"total={total}")
|
|
87
|
+
|
|
88
|
+
return " ".join(parts)
|
|
89
|
+
|
|
90
|
+
def pack_line(kind: str, kv: Optional[Dict[str, str]] = None, single_value: Optional[str] = None) -> str:
|
|
91
|
+
"""
|
|
92
|
+
Builds a PACK1 record line.
|
|
93
|
+
If single_value is provided: <kind>:<single_value>
|
|
94
|
+
If kv is provided: <kind>:k=v k2=v2 ...
|
|
95
|
+
"""
|
|
96
|
+
if single_value is not None:
|
|
97
|
+
return f"{kind}:{single_value}"
|
|
98
|
+
|
|
99
|
+
if kv:
|
|
100
|
+
field_strs = [f"{k}={v}" for k, v in kv.items()]
|
|
101
|
+
return f"{kind}:{ ' '.join(field_strs) }"
|
|
102
|
+
|
|
103
|
+
return f"{kind}:"
|
|
104
|
+
|
|
105
|
+
def pack_error(tool: str, code: Any, msg: str, hints: List[str] = None, trace: str = None, fields: Dict[str, Any] = None) -> str:
|
|
106
|
+
"""
|
|
107
|
+
Generates PACK1 error response.
|
|
108
|
+
PACK1 tool=<tool> ok=false code=<CODE> msg=<ENCODED_MSG> [hint=<ENC>] [trace=<ENC>]
|
|
109
|
+
"""
|
|
110
|
+
parts = [
|
|
111
|
+
"PACK1",
|
|
112
|
+
f"tool={tool}",
|
|
113
|
+
"ok=false",
|
|
114
|
+
f"code={code.value if isinstance(code, ErrorCode) else str(code)}",
|
|
115
|
+
f"msg={pack_encode_text(msg)}",
|
|
116
|
+
]
|
|
117
|
+
if hints:
|
|
118
|
+
joined = " | ".join(hints)
|
|
119
|
+
parts.append(f"hint={pack_encode_text(joined)}")
|
|
120
|
+
if trace:
|
|
121
|
+
parts.append(f"trace={pack_encode_text(trace)}")
|
|
122
|
+
if fields:
|
|
123
|
+
for k, v in fields.items():
|
|
124
|
+
parts.append(f"{k}={pack_encode_text(v)}")
|
|
125
|
+
return " ".join(parts)
|
|
126
|
+
|
|
127
|
+
def pack_truncated(next_offset: int, limit: int, truncated_state: str) -> str:
|
|
128
|
+
"""
|
|
129
|
+
m:truncated=true|maybe next=use_offset offset=<nextOffset> limit=<limit>
|
|
130
|
+
"""
|
|
131
|
+
return f"m:truncated={truncated_state} next=use_offset offset={next_offset} limit={limit}"
|
|
132
|
+
|
|
133
|
+
# --- Main Utility ---
|
|
134
|
+
|
|
135
|
+
def mcp_response(
|
|
136
|
+
tool_name: str,
|
|
137
|
+
pack_func: Callable[[], str],
|
|
138
|
+
json_func: Callable[[], Dict[str, Any]]
|
|
139
|
+
) -> Dict[str, Any]:
|
|
140
|
+
"""
|
|
141
|
+
Helper to dispatch between PACK1 and JSON based on configuration.
|
|
142
|
+
|
|
143
|
+
pack_func: function that returns (str) - the full PACK1 text payload.
|
|
144
|
+
json_func: function that returns (dict) - the dict for JSON serialization.
|
|
145
|
+
"""
|
|
146
|
+
fmt = _get_format()
|
|
147
|
+
|
|
148
|
+
try:
|
|
149
|
+
if fmt == "pack":
|
|
150
|
+
text_output = pack_func()
|
|
151
|
+
return {
|
|
152
|
+
"content": [{"type": "text", "text": text_output}]
|
|
153
|
+
}
|
|
154
|
+
else:
|
|
155
|
+
# JSON mode (Legacy/Debug)
|
|
156
|
+
data = json_func()
|
|
157
|
+
|
|
158
|
+
if _compact_enabled():
|
|
159
|
+
json_text = json.dumps(data, ensure_ascii=False, separators=(",", ":"))
|
|
160
|
+
else:
|
|
161
|
+
json_text = json.dumps(data, ensure_ascii=False, indent=2)
|
|
162
|
+
|
|
163
|
+
res = {"content": [{"type": "text", "text": json_text}]}
|
|
164
|
+
if isinstance(data, dict):
|
|
165
|
+
res.update(data)
|
|
166
|
+
return res
|
|
167
|
+
except Exception as e:
|
|
168
|
+
import traceback
|
|
169
|
+
err_msg = str(e)
|
|
170
|
+
stack = traceback.format_exc()
|
|
171
|
+
|
|
172
|
+
if fmt == "pack":
|
|
173
|
+
return {
|
|
174
|
+
"content": [{"type": "text", "text": pack_error(tool_name, ErrorCode.INTERNAL, err_msg, trace=stack)}],
|
|
175
|
+
"isError": True
|
|
176
|
+
}
|
|
177
|
+
else:
|
|
178
|
+
err_obj = {
|
|
179
|
+
"error": {"code": ErrorCode.INTERNAL.value, "message": err_msg, "trace": stack},
|
|
180
|
+
"isError": True
|
|
181
|
+
}
|
|
182
|
+
return mcp_json(err_obj)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def mcp_json(obj):
|
|
186
|
+
"""Utility to format dictionary as standard MCP response."""
|
|
187
|
+
if _compact_enabled():
|
|
188
|
+
payload = json.dumps(obj, ensure_ascii=False, separators=(",", ":"))
|
|
189
|
+
else:
|
|
190
|
+
payload = json.dumps(obj, ensure_ascii=False, indent=2)
|
|
191
|
+
res = {"content": [{"type": "text", "text": payload}]}
|
|
192
|
+
if isinstance(obj, dict):
|
|
193
|
+
res.update(obj)
|
|
194
|
+
return res
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def resolve_root_ids(roots: List[str]) -> List[str]:
|
|
198
|
+
if not roots or not WorkspaceManager:
|
|
199
|
+
return []
|
|
200
|
+
out: List[str] = []
|
|
201
|
+
for r in roots:
|
|
202
|
+
try:
|
|
203
|
+
out.append(WorkspaceManager.root_id(r))
|
|
204
|
+
except Exception:
|
|
205
|
+
continue
|
|
206
|
+
return out
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def resolve_db_path(input_path: str, roots: List[str]) -> Optional[str]:
|
|
210
|
+
"""
|
|
211
|
+
Accepts either db-path (root-xxxx/rel) or filesystem path.
|
|
212
|
+
Returns normalized db-path if allowed, else None.
|
|
213
|
+
"""
|
|
214
|
+
if not input_path:
|
|
215
|
+
return None
|
|
216
|
+
if "/" in input_path and input_path.startswith("root-"):
|
|
217
|
+
root_id = input_path.split("/", 1)[0]
|
|
218
|
+
if root_id in resolve_root_ids(roots):
|
|
219
|
+
return input_path
|
|
220
|
+
return None
|
|
221
|
+
if not WorkspaceManager:
|
|
222
|
+
return None
|
|
223
|
+
if input_path.startswith("root-") and "/" not in input_path:
|
|
224
|
+
return None
|
|
225
|
+
follow_symlinks = (os.environ.get("DECKARD_FOLLOW_SYMLINKS", "0").strip().lower() in ("1", "true", "yes", "on"))
|
|
226
|
+
try:
|
|
227
|
+
p = Path(os.path.expanduser(input_path))
|
|
228
|
+
if not p.is_absolute():
|
|
229
|
+
p = (Path.cwd() / p).resolve()
|
|
230
|
+
else:
|
|
231
|
+
p = p.resolve()
|
|
232
|
+
except Exception:
|
|
233
|
+
return None
|
|
234
|
+
|
|
235
|
+
for root in roots:
|
|
236
|
+
try:
|
|
237
|
+
root_norm = WorkspaceManager._normalize_path(root, follow_symlinks=follow_symlinks) # type: ignore
|
|
238
|
+
root_path = Path(root_norm)
|
|
239
|
+
if p == root_path or root_path in p.parents:
|
|
240
|
+
rel = p.relative_to(root_path).as_posix()
|
|
241
|
+
return f"{WorkspaceManager.root_id(str(root_path))}/{rel}"
|
|
242
|
+
except Exception:
|
|
243
|
+
continue
|
|
244
|
+
return None
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Sari guidance tool for LLMs.
|
|
4
|
+
Returns a short usage guide to encourage search-first behavior.
|
|
5
|
+
"""
|
|
6
|
+
from typing import Any, Dict
|
|
7
|
+
from mcp.tools._util import mcp_response, pack_header, pack_line, pack_encode_text
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def execute_deckard_guide(args: Dict[str, Any]) -> Dict[str, Any]:
|
|
11
|
+
text = (
|
|
12
|
+
"💡 Sari - Agentic Search Workflow Guide\n\n"
|
|
13
|
+
"이 도구는 대규모 코드베이스에서 당신의 '외부 기억' 역할을 합니다. "
|
|
14
|
+
"토큰을 낭비하며 파일을 하나씩 열어보기 전에, 아래 순서를 따르면 훨씬 정확하고 빠르게 임무를 완수할 수 있습니다.\n\n"
|
|
15
|
+
"1. [정찰] `search` 또는 `repo_candidates`를 사용해 맥락을 파악하세요.\n"
|
|
16
|
+
"2. [식별] `search_symbols`나 `search_api_endpoints`로 정확한 코드 위치를 찾으세요.\n"
|
|
17
|
+
"3. [획득] `read_symbol`을 통해 필요한 구현부만 콕 집어 읽어보세요. (전체 파일을 읽는 것보다 80% 이상의 컨텍스트를 절약합니다)\n"
|
|
18
|
+
"4. [분석] `get_callers` 또는 `get_implementations`로 코드 간의 연결 고리를 파악하세요.\n\n"
|
|
19
|
+
"핵심 원칙: 먼저 '묻고(Search)', 필요한 것만 '취하고(Select)', 그 다음에 '행동(Act)' 하세요. "
|
|
20
|
+
"이것이 당신의 추론 성능을 최상으로 유지하는 방법입니다.\n"
|
|
21
|
+
"주의: 기본 모드는 경고입니다. 필요 시 search-first 모드를 warn/enforce/off로 조정할 수 있습니다."
|
|
22
|
+
)
|
|
23
|
+
def build_pack() -> str:
|
|
24
|
+
lines = [pack_header("sari_guide", {}, returned=1)]
|
|
25
|
+
lines.append(pack_line("t", single_value=pack_encode_text(text)))
|
|
26
|
+
return "\n".join(lines)
|
|
27
|
+
|
|
28
|
+
return mcp_response(
|
|
29
|
+
"sari_guide",
|
|
30
|
+
build_pack,
|
|
31
|
+
lambda: {"content": [{"type": "text", "text": text}]},
|
|
32
|
+
)
|
mcp/tools/doctor.py
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Doctor tool for Local Search MCP Server.
|
|
4
|
+
Returns structured diagnostics (no ANSI/prints).
|
|
5
|
+
"""
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
import os
|
|
9
|
+
import socket
|
|
10
|
+
import shutil
|
|
11
|
+
import sys
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any, Dict, List
|
|
14
|
+
|
|
15
|
+
try:
|
|
16
|
+
from app.db import LocalSearchDB
|
|
17
|
+
from app.config import Config
|
|
18
|
+
from app.workspace import WorkspaceManager
|
|
19
|
+
from app.registry import ServerRegistry
|
|
20
|
+
from mcp.cli import get_daemon_address, is_daemon_running, read_pid
|
|
21
|
+
except ImportError:
|
|
22
|
+
import sys
|
|
23
|
+
from pathlib import Path
|
|
24
|
+
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
|
25
|
+
from app.db import LocalSearchDB
|
|
26
|
+
from app.config import Config
|
|
27
|
+
from app.workspace import WorkspaceManager
|
|
28
|
+
from app.registry import ServerRegistry
|
|
29
|
+
from mcp.cli import get_daemon_address, is_daemon_running, read_pid
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _result(name: str, passed: bool, error: str = "") -> dict[str, Any]:
|
|
33
|
+
return {"name": name, "passed": passed, "error": error}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _check_db(ws_root: str) -> list[dict[str, Any]]:
|
|
37
|
+
results: list[dict[str, Any]] = []
|
|
38
|
+
cfg_path = WorkspaceManager.resolve_config_path(ws_root)
|
|
39
|
+
cfg = Config.load(cfg_path, workspace_root_override=ws_root)
|
|
40
|
+
db_path = Path(cfg.db_path)
|
|
41
|
+
if not db_path.exists():
|
|
42
|
+
results.append(_result("DB Existence", False, f"DB not found at {db_path}"))
|
|
43
|
+
return results
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
db = LocalSearchDB(str(db_path))
|
|
47
|
+
except Exception as e:
|
|
48
|
+
results.append(_result("DB Access", False, str(e)))
|
|
49
|
+
return results
|
|
50
|
+
|
|
51
|
+
results.append(_result("DB FTS5 Support", bool(db.fts_enabled), "FTS5 module missing in SQLite" if not db.fts_enabled else ""))
|
|
52
|
+
try:
|
|
53
|
+
cursor = db._read.execute("PRAGMA table_info(symbols)")
|
|
54
|
+
cols = [r["name"] for r in cursor.fetchall()]
|
|
55
|
+
if "end_line" in cols:
|
|
56
|
+
results.append(_result("DB Schema v2.7.0", True))
|
|
57
|
+
else:
|
|
58
|
+
results.append(_result("DB Schema v2.7.0", False, "Column 'end_line' missing in 'symbols'. Run update."))
|
|
59
|
+
except Exception as e:
|
|
60
|
+
results.append(_result("DB Schema Check", False, str(e)))
|
|
61
|
+
finally:
|
|
62
|
+
try:
|
|
63
|
+
db.close()
|
|
64
|
+
except Exception:
|
|
65
|
+
pass
|
|
66
|
+
|
|
67
|
+
return results
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def _check_port(port: int, label: str) -> dict[str, Any]:
|
|
71
|
+
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
72
|
+
try:
|
|
73
|
+
s.bind(("127.0.0.1", port))
|
|
74
|
+
return _result(f"{label} Port {port} Availability", True)
|
|
75
|
+
except OSError as e:
|
|
76
|
+
return _result(f"{label} Port {port} Availability", False, f"Address in use or missing permission: {e}")
|
|
77
|
+
finally:
|
|
78
|
+
try:
|
|
79
|
+
s.close()
|
|
80
|
+
except Exception:
|
|
81
|
+
pass
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _check_network() -> dict[str, Any]:
|
|
85
|
+
try:
|
|
86
|
+
socket.create_connection(("8.8.8.8", 53), timeout=3)
|
|
87
|
+
return _result("Network Check", True)
|
|
88
|
+
except OSError as e:
|
|
89
|
+
return _result("Network Check", False, f"Unreachable: {e}")
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def _check_disk_space(ws_root: str, min_gb: float) -> dict[str, Any]:
|
|
93
|
+
try:
|
|
94
|
+
total, used, free = shutil.disk_usage(ws_root)
|
|
95
|
+
free_gb = free / (1024**3)
|
|
96
|
+
if free_gb < min_gb:
|
|
97
|
+
return _result("Disk Space", False, f"Low space: {free_gb:.2f} GB (Min: {min_gb} GB)")
|
|
98
|
+
return _result("Disk Space", True)
|
|
99
|
+
except Exception as e:
|
|
100
|
+
return _result("Disk Space", False, str(e))
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def _check_daemon() -> dict[str, Any]:
|
|
104
|
+
host, port = get_daemon_address()
|
|
105
|
+
running = is_daemon_running(host, port)
|
|
106
|
+
if running:
|
|
107
|
+
pid = read_pid()
|
|
108
|
+
return _result("Sari Daemon", True, f"Running on {host}:{port} (PID: {pid})")
|
|
109
|
+
return _result("Sari Daemon", False, "Not running")
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def _check_search_first_usage(usage: Dict[str, Any], mode: str) -> dict[str, Any]:
|
|
113
|
+
violations = int(usage.get("read_without_search", 0))
|
|
114
|
+
searches = int(usage.get("search", 0))
|
|
115
|
+
symbol_searches = int(usage.get("search_symbols", 0))
|
|
116
|
+
if violations == 0:
|
|
117
|
+
return _result("Search-First Usage", True, "")
|
|
118
|
+
policy = mode if mode in {"off", "warn", "enforce"} else "unknown"
|
|
119
|
+
error = (
|
|
120
|
+
f"Search-first policy {policy}: {violations} read call(s) without prior search "
|
|
121
|
+
f"(search={searches}, search_symbols={symbol_searches})."
|
|
122
|
+
)
|
|
123
|
+
return _result("Search-First Usage", False, error)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def execute_doctor(args: Dict[str, Any]) -> Dict[str, Any]:
|
|
127
|
+
ws_root = WorkspaceManager.resolve_workspace_root()
|
|
128
|
+
|
|
129
|
+
include_network = bool(args.get("include_network", True))
|
|
130
|
+
include_port = bool(args.get("include_port", True))
|
|
131
|
+
include_db = bool(args.get("include_db", True))
|
|
132
|
+
include_disk = bool(args.get("include_disk", True))
|
|
133
|
+
include_daemon = bool(args.get("include_daemon", True))
|
|
134
|
+
include_venv = bool(args.get("include_venv", True))
|
|
135
|
+
include_marker = bool(args.get("include_marker", False))
|
|
136
|
+
port = int(args.get("port", 0))
|
|
137
|
+
min_disk_gb = float(args.get("min_disk_gb", 1.0))
|
|
138
|
+
|
|
139
|
+
results: list[dict[str, Any]] = []
|
|
140
|
+
|
|
141
|
+
if include_venv:
|
|
142
|
+
in_venv = sys.prefix != sys.base_prefix
|
|
143
|
+
results.append(_result("Virtualenv", True, "" if in_venv else "Not running in venv (ok)"))
|
|
144
|
+
|
|
145
|
+
if include_marker:
|
|
146
|
+
results.append(_result("Workspace Marker (.codex-root)", True, "Marker check deprecated"))
|
|
147
|
+
|
|
148
|
+
if include_daemon:
|
|
149
|
+
results.append(_check_daemon())
|
|
150
|
+
|
|
151
|
+
if include_port:
|
|
152
|
+
daemon_host, daemon_port = get_daemon_address()
|
|
153
|
+
results.append(_check_port(daemon_port, "Daemon"))
|
|
154
|
+
http_port = 0
|
|
155
|
+
try:
|
|
156
|
+
inst = ServerRegistry().get_instance(ws_root)
|
|
157
|
+
if inst and inst.get("port"):
|
|
158
|
+
http_port = int(inst.get("port"))
|
|
159
|
+
except Exception:
|
|
160
|
+
http_port = 0
|
|
161
|
+
if not http_port:
|
|
162
|
+
try:
|
|
163
|
+
cfg_path = WorkspaceManager.resolve_config_path(ws_root)
|
|
164
|
+
cfg = Config.load(cfg_path, workspace_root_override=ws_root)
|
|
165
|
+
http_port = int(cfg.http_api_port)
|
|
166
|
+
except Exception:
|
|
167
|
+
http_port = 0
|
|
168
|
+
if port:
|
|
169
|
+
http_port = port
|
|
170
|
+
if http_port:
|
|
171
|
+
results.append(_check_port(http_port, "HTTP"))
|
|
172
|
+
|
|
173
|
+
if include_network:
|
|
174
|
+
results.append(_check_network())
|
|
175
|
+
|
|
176
|
+
if include_db:
|
|
177
|
+
results.extend(_check_db(ws_root))
|
|
178
|
+
|
|
179
|
+
if include_disk:
|
|
180
|
+
results.append(_check_disk_space(ws_root, min_disk_gb))
|
|
181
|
+
|
|
182
|
+
usage = args.get("search_usage")
|
|
183
|
+
if isinstance(usage, dict):
|
|
184
|
+
mode = str(args.get("search_first_mode", "unknown"))
|
|
185
|
+
results.append(_check_search_first_usage(usage, mode))
|
|
186
|
+
|
|
187
|
+
output = {
|
|
188
|
+
"workspace_root": ws_root,
|
|
189
|
+
"results": results,
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
compact = str(os.environ.get("DECKARD_RESPONSE_COMPACT") or "1").strip().lower() not in {"0", "false", "no", "off"}
|
|
193
|
+
payload = json.dumps(output, ensure_ascii=False, separators=(",", ":")) if compact else json.dumps(output, ensure_ascii=False, indent=2)
|
|
194
|
+
try:
|
|
195
|
+
from mcp.tools._util import mcp_response, pack_header, pack_line, pack_encode_text
|
|
196
|
+
except Exception:
|
|
197
|
+
from _util import mcp_response, pack_header, pack_line, pack_encode_text
|
|
198
|
+
|
|
199
|
+
def build_pack() -> str:
|
|
200
|
+
lines = [pack_header("doctor", {}, returned=1)]
|
|
201
|
+
lines.append(pack_line("t", single_value=pack_encode_text(payload)))
|
|
202
|
+
return "\n".join(lines)
|
|
203
|
+
|
|
204
|
+
return mcp_response(
|
|
205
|
+
"doctor",
|
|
206
|
+
build_pack,
|
|
207
|
+
lambda: {"content": [{"type": "text", "text": payload}]},
|
|
208
|
+
)
|