sari 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- app/__init__.py +1 -0
- app/config.py +240 -0
- app/db.py +932 -0
- app/dedup_queue.py +77 -0
- app/engine_registry.py +56 -0
- app/engine_runtime.py +472 -0
- app/http_server.py +204 -0
- app/indexer.py +1532 -0
- app/main.py +147 -0
- app/models.py +39 -0
- app/queue_pipeline.py +65 -0
- app/ranking.py +144 -0
- app/registry.py +172 -0
- app/search_engine.py +572 -0
- app/watcher.py +124 -0
- app/workspace.py +286 -0
- deckard/__init__.py +3 -0
- deckard/__main__.py +4 -0
- deckard/main.py +345 -0
- deckard/version.py +1 -0
- mcp/__init__.py +1 -0
- mcp/__main__.py +19 -0
- mcp/cli.py +485 -0
- mcp/daemon.py +149 -0
- mcp/proxy.py +304 -0
- mcp/registry.py +218 -0
- mcp/server.py +519 -0
- mcp/session.py +234 -0
- mcp/telemetry.py +112 -0
- mcp/test_cli.py +89 -0
- mcp/test_daemon.py +124 -0
- mcp/test_server.py +197 -0
- mcp/tools/__init__.py +14 -0
- mcp/tools/_util.py +244 -0
- mcp/tools/deckard_guide.py +32 -0
- mcp/tools/doctor.py +208 -0
- mcp/tools/get_callers.py +60 -0
- mcp/tools/get_implementations.py +60 -0
- mcp/tools/index_file.py +75 -0
- mcp/tools/list_files.py +138 -0
- mcp/tools/read_file.py +48 -0
- mcp/tools/read_symbol.py +99 -0
- mcp/tools/registry.py +212 -0
- mcp/tools/repo_candidates.py +89 -0
- mcp/tools/rescan.py +46 -0
- mcp/tools/scan_once.py +54 -0
- mcp/tools/search.py +208 -0
- mcp/tools/search_api_endpoints.py +72 -0
- mcp/tools/search_symbols.py +63 -0
- mcp/tools/status.py +135 -0
- sari/__init__.py +1 -0
- sari/__main__.py +4 -0
- sari-0.0.1.dist-info/METADATA +521 -0
- sari-0.0.1.dist-info/RECORD +58 -0
- sari-0.0.1.dist-info/WHEEL +5 -0
- sari-0.0.1.dist-info/entry_points.txt +2 -0
- sari-0.0.1.dist-info/licenses/LICENSE +21 -0
- sari-0.0.1.dist-info/top_level.txt +4 -0
app/workspace.py
ADDED
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Workspace management for Local Search MCP Server.
|
|
4
|
+
Handles workspace detection and global path resolution.
|
|
5
|
+
"""
|
|
6
|
+
import os
|
|
7
|
+
import hashlib
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Optional
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class WorkspaceManager:
|
|
13
|
+
"""Manages workspace detection and global paths."""
|
|
14
|
+
|
|
15
|
+
@staticmethod
|
|
16
|
+
def _normalize_path(path: str, follow_symlinks: bool) -> str:
|
|
17
|
+
expanded = os.path.expanduser(path)
|
|
18
|
+
if follow_symlinks:
|
|
19
|
+
normalized = os.path.realpath(expanded)
|
|
20
|
+
else:
|
|
21
|
+
normalized = os.path.abspath(expanded)
|
|
22
|
+
if os.name == "nt":
|
|
23
|
+
normalized = normalized.lower()
|
|
24
|
+
return normalized.rstrip(os.sep)
|
|
25
|
+
|
|
26
|
+
@staticmethod
|
|
27
|
+
def root_id(path: str) -> str:
|
|
28
|
+
"""Stable root id derived from normalized path."""
|
|
29
|
+
follow_symlinks = (os.environ.get("DECKARD_FOLLOW_SYMLINKS", "0").strip().lower() in ("1", "true", "yes", "on"))
|
|
30
|
+
norm = WorkspaceManager._normalize_path(path, follow_symlinks=follow_symlinks)
|
|
31
|
+
digest = hashlib.sha1(norm.encode("utf-8")).hexdigest()[:8]
|
|
32
|
+
return f"root-{digest}"
|
|
33
|
+
|
|
34
|
+
@staticmethod
|
|
35
|
+
def resolve_workspace_roots(
|
|
36
|
+
root_uri: Optional[str] = None,
|
|
37
|
+
roots_json: Optional[str] = None,
|
|
38
|
+
roots_env: Optional[dict] = None,
|
|
39
|
+
config_roots: Optional[list] = None
|
|
40
|
+
) -> list[str]:
|
|
41
|
+
"""
|
|
42
|
+
Resolve multiple workspace roots with priority, normalization, and deduplication.
|
|
43
|
+
|
|
44
|
+
Priority (Union & Merge):
|
|
45
|
+
1. config.roots
|
|
46
|
+
2. DECKARD_ROOTS_JSON
|
|
47
|
+
3. DECKARD_ROOT_1..N
|
|
48
|
+
4. DECKARD_WORKSPACE_ROOT (legacy)
|
|
49
|
+
5. LOCAL_SEARCH_WORKSPACE_ROOT (legacy)
|
|
50
|
+
6. root_uri (MCP initialize param, ephemeral)
|
|
51
|
+
7. Fallback to cwd (only if no candidates)
|
|
52
|
+
|
|
53
|
+
Returns:
|
|
54
|
+
List of absolute, normalized paths.
|
|
55
|
+
"""
|
|
56
|
+
candidates: list[tuple[str, str]] = []
|
|
57
|
+
env_vars = roots_env if roots_env is not None else os.environ
|
|
58
|
+
follow_symlinks = (env_vars.get("DECKARD_FOLLOW_SYMLINKS", "0").strip().lower() in ("1", "true", "yes", "on"))
|
|
59
|
+
keep_nested = (env_vars.get("DECKARD_KEEP_NESTED_ROOTS", "0").strip().lower() in ("1", "true", "yes", "on"))
|
|
60
|
+
|
|
61
|
+
# 1. config.roots
|
|
62
|
+
if config_roots:
|
|
63
|
+
for x in config_roots:
|
|
64
|
+
if x:
|
|
65
|
+
candidates.append((str(x), "config"))
|
|
66
|
+
|
|
67
|
+
# 2. DECKARD_ROOTS_JSON
|
|
68
|
+
import json
|
|
69
|
+
json_str = roots_json or env_vars.get("DECKARD_ROOTS_JSON", "")
|
|
70
|
+
if json_str:
|
|
71
|
+
try:
|
|
72
|
+
loaded = json.loads(json_str)
|
|
73
|
+
if isinstance(loaded, list):
|
|
74
|
+
for x in loaded:
|
|
75
|
+
if x:
|
|
76
|
+
candidates.append((str(x), "env"))
|
|
77
|
+
except Exception:
|
|
78
|
+
pass
|
|
79
|
+
|
|
80
|
+
# 3. DECKARD_ROOT_1..N
|
|
81
|
+
for k, v in env_vars.items():
|
|
82
|
+
if k.startswith("DECKARD_ROOT_") and k[13:].isdigit():
|
|
83
|
+
if v and v.strip():
|
|
84
|
+
candidates.append((v.strip(), "env"))
|
|
85
|
+
|
|
86
|
+
# 4. Legacy DECKARD_WORKSPACE_ROOT (Higher priority than LOCAL_SEARCH)
|
|
87
|
+
legacy_val = (env_vars.get("DECKARD_WORKSPACE_ROOT") or "").strip()
|
|
88
|
+
if legacy_val:
|
|
89
|
+
if legacy_val == "${cwd}":
|
|
90
|
+
candidates.append((os.getcwd(), "env"))
|
|
91
|
+
else:
|
|
92
|
+
candidates.append((legacy_val, "env"))
|
|
93
|
+
|
|
94
|
+
# 5. Legacy LOCAL_SEARCH_WORKSPACE_ROOT
|
|
95
|
+
ls_val = (env_vars.get("LOCAL_SEARCH_WORKSPACE_ROOT") or "").strip()
|
|
96
|
+
if ls_val:
|
|
97
|
+
if ls_val == "${cwd}":
|
|
98
|
+
candidates.append((os.getcwd(), "env"))
|
|
99
|
+
else:
|
|
100
|
+
candidates.append((ls_val, "env"))
|
|
101
|
+
|
|
102
|
+
# 6. root_uri (ephemeral)
|
|
103
|
+
if root_uri:
|
|
104
|
+
uri_path = root_uri[7:] if root_uri.startswith("file://") else root_uri
|
|
105
|
+
try:
|
|
106
|
+
if uri_path:
|
|
107
|
+
candidate = os.path.expanduser(uri_path)
|
|
108
|
+
if os.path.exists(candidate):
|
|
109
|
+
candidates.append((candidate, "root_uri"))
|
|
110
|
+
except Exception:
|
|
111
|
+
pass
|
|
112
|
+
|
|
113
|
+
# 7. Fallback to cwd
|
|
114
|
+
if not candidates:
|
|
115
|
+
candidates.append((os.getcwd(), "fallback"))
|
|
116
|
+
|
|
117
|
+
# Normalization
|
|
118
|
+
resolved_paths: list[tuple[str, str]] = []
|
|
119
|
+
seen = set()
|
|
120
|
+
for p, src in candidates:
|
|
121
|
+
try:
|
|
122
|
+
abs_path = WorkspaceManager._normalize_path(p, follow_symlinks=follow_symlinks)
|
|
123
|
+
if abs_path not in seen:
|
|
124
|
+
resolved_paths.append((abs_path, src))
|
|
125
|
+
seen.add(abs_path)
|
|
126
|
+
except Exception:
|
|
127
|
+
continue
|
|
128
|
+
|
|
129
|
+
# Inclusion check while preserving priority order (first seen wins)
|
|
130
|
+
final_roots: list[str] = []
|
|
131
|
+
final_meta: list[tuple[str, str]] = []
|
|
132
|
+
if keep_nested:
|
|
133
|
+
for p, src in resolved_paths:
|
|
134
|
+
final_roots.append(p)
|
|
135
|
+
final_meta.append((p, src))
|
|
136
|
+
else:
|
|
137
|
+
for p, src in resolved_paths:
|
|
138
|
+
p_path = Path(p)
|
|
139
|
+
is_covered = False
|
|
140
|
+
for existing, ex_src in final_meta:
|
|
141
|
+
try:
|
|
142
|
+
existing_path = Path(existing)
|
|
143
|
+
# If root_uri is a child of config/env, drop root_uri
|
|
144
|
+
if src == "root_uri" and ex_src in {"config", "env"}:
|
|
145
|
+
if p_path == existing_path or existing_path in p_path.parents or p.startswith(existing + os.sep):
|
|
146
|
+
is_covered = True
|
|
147
|
+
break
|
|
148
|
+
# If root_uri is parent of config/env, keep both (skip collapse)
|
|
149
|
+
if ex_src == "root_uri" and src in {"config", "env"}:
|
|
150
|
+
if p_path == existing_path or p.startswith(existing + os.sep) or existing_path in p_path.parents:
|
|
151
|
+
is_covered = False
|
|
152
|
+
continue
|
|
153
|
+
# Default: collapse nested roots (parent keeps, child removed)
|
|
154
|
+
if p_path == existing_path or existing_path in p_path.parents or p.startswith(existing + os.sep):
|
|
155
|
+
is_covered = True
|
|
156
|
+
break
|
|
157
|
+
except Exception:
|
|
158
|
+
continue
|
|
159
|
+
if not is_covered:
|
|
160
|
+
final_meta.append((p, src))
|
|
161
|
+
final_roots.append(p)
|
|
162
|
+
|
|
163
|
+
return final_roots
|
|
164
|
+
|
|
165
|
+
@staticmethod
|
|
166
|
+
def resolve_workspace_root(root_uri: Optional[str] = None) -> str:
|
|
167
|
+
"""
|
|
168
|
+
Unified resolver for workspace root directory.
|
|
169
|
+
Legacy wrapper around resolve_workspace_roots.
|
|
170
|
+
Returns the first resolved root.
|
|
171
|
+
"""
|
|
172
|
+
roots = WorkspaceManager.resolve_workspace_roots(root_uri=root_uri)
|
|
173
|
+
return roots[0] if roots else str(Path.cwd())
|
|
174
|
+
|
|
175
|
+
@staticmethod
|
|
176
|
+
def is_path_allowed(path: str, roots: list[str]) -> bool:
|
|
177
|
+
"""Check if path is within any of the roots."""
|
|
178
|
+
try:
|
|
179
|
+
follow_symlinks = (os.environ.get("DECKARD_FOLLOW_SYMLINKS", "0").strip().lower() in ("1", "true", "yes", "on"))
|
|
180
|
+
p = Path(WorkspaceManager._normalize_path(path, follow_symlinks=follow_symlinks))
|
|
181
|
+
for r in roots:
|
|
182
|
+
root_path = Path(WorkspaceManager._normalize_path(r, follow_symlinks=follow_symlinks))
|
|
183
|
+
if p == root_path or root_path in p.parents:
|
|
184
|
+
return True
|
|
185
|
+
return False
|
|
186
|
+
except Exception:
|
|
187
|
+
return False
|
|
188
|
+
|
|
189
|
+
@staticmethod
|
|
190
|
+
def detect_workspace(root_uri: Optional[str] = None) -> str:
|
|
191
|
+
"""Legacy alias for resolve_workspace_root."""
|
|
192
|
+
return WorkspaceManager.resolve_workspace_root(root_uri)
|
|
193
|
+
|
|
194
|
+
@staticmethod
|
|
195
|
+
def resolve_config_path(workspace_root: str) -> str:
|
|
196
|
+
"""
|
|
197
|
+
Resolve config path with unified priority.
|
|
198
|
+
|
|
199
|
+
Priority:
|
|
200
|
+
1. DECKARD_CONFIG environment variable (SSOT)
|
|
201
|
+
2. Default SSOT path (~/.config/sari/config.json or %APPDATA%/sari/config.json)
|
|
202
|
+
"""
|
|
203
|
+
val = (os.environ.get("DECKARD_CONFIG") or "").strip()
|
|
204
|
+
if val:
|
|
205
|
+
p = Path(os.path.expanduser(val))
|
|
206
|
+
if p.exists():
|
|
207
|
+
return str(p.resolve())
|
|
208
|
+
|
|
209
|
+
if os.name == "nt":
|
|
210
|
+
ssot = Path(os.environ.get("APPDATA", os.path.expanduser("~\\AppData\\Roaming"))) / "sari" / "config.json"
|
|
211
|
+
else:
|
|
212
|
+
ssot = Path.home() / ".config" / "sari" / "config.json"
|
|
213
|
+
|
|
214
|
+
if ssot.exists():
|
|
215
|
+
return str(ssot.resolve())
|
|
216
|
+
|
|
217
|
+
# Legacy migration (one-time copy + backup)
|
|
218
|
+
legacy_candidates = [
|
|
219
|
+
Path(workspace_root) / ".codex" / "tools" / "deckard" / "config" / "config.json",
|
|
220
|
+
Path.home() / ".deckard" / "config.json",
|
|
221
|
+
]
|
|
222
|
+
for legacy in legacy_candidates:
|
|
223
|
+
if legacy.exists():
|
|
224
|
+
try:
|
|
225
|
+
ssot.parent.mkdir(parents=True, exist_ok=True)
|
|
226
|
+
ssot.write_text(legacy.read_text(encoding="utf-8"), encoding="utf-8")
|
|
227
|
+
bak = legacy.with_suffix(legacy.suffix + ".bak")
|
|
228
|
+
try:
|
|
229
|
+
legacy.rename(bak)
|
|
230
|
+
except Exception:
|
|
231
|
+
marker = legacy.parent / ".migrated"
|
|
232
|
+
marker.write_text(f"migrated to {ssot}", encoding="utf-8")
|
|
233
|
+
print(f"[sari] migrated legacy config from {legacy} to {ssot}")
|
|
234
|
+
except Exception:
|
|
235
|
+
pass
|
|
236
|
+
break
|
|
237
|
+
|
|
238
|
+
return str(ssot.resolve())
|
|
239
|
+
|
|
240
|
+
@staticmethod
|
|
241
|
+
def get_global_data_dir() -> Path:
|
|
242
|
+
"""Get global data directory: ~/.local/share/sari/ (or AppData/Local on Win)"""
|
|
243
|
+
if os.name == "nt":
|
|
244
|
+
return Path(os.environ.get("LOCALAPPDATA", os.path.expanduser("~\\AppData\\Local"))) / "sari"
|
|
245
|
+
return Path.home() / ".local" / "share" / "sari"
|
|
246
|
+
|
|
247
|
+
@staticmethod
|
|
248
|
+
def get_global_db_path() -> Path:
|
|
249
|
+
"""Get global DB path: ~/.local/share/sari/index.db (Opt-in only)"""
|
|
250
|
+
return WorkspaceManager.get_global_data_dir() / "index.db"
|
|
251
|
+
|
|
252
|
+
@staticmethod
|
|
253
|
+
def get_local_db_path(workspace_root: str) -> Path:
|
|
254
|
+
"""Get workspace-local DB path: .codex/tools/sari/data/index.db"""
|
|
255
|
+
return Path(workspace_root) / ".codex" / "tools" / "sari" / "data" / "index.db"
|
|
256
|
+
|
|
257
|
+
@staticmethod
|
|
258
|
+
def get_global_log_dir() -> Path:
|
|
259
|
+
"""Get global log directory, with env override."""
|
|
260
|
+
for env_key in ["DECKARD_LOG_DIR", "LOCAL_SEARCH_LOG_DIR"]:
|
|
261
|
+
val = (os.environ.get(env_key) or "").strip()
|
|
262
|
+
if val:
|
|
263
|
+
return Path(os.path.expanduser(val)).resolve()
|
|
264
|
+
return WorkspaceManager.get_global_data_dir() / "logs"
|
|
265
|
+
|
|
266
|
+
@staticmethod
|
|
267
|
+
def roots_hash(root_ids: list[str]) -> str:
|
|
268
|
+
joined = "|".join(sorted(root_ids))
|
|
269
|
+
digest = hashlib.sha1(joined.encode("utf-8")).hexdigest()[:12]
|
|
270
|
+
return digest
|
|
271
|
+
|
|
272
|
+
@staticmethod
|
|
273
|
+
def get_engine_base_dir() -> Path:
|
|
274
|
+
return WorkspaceManager.get_global_data_dir() / "engine"
|
|
275
|
+
|
|
276
|
+
@staticmethod
|
|
277
|
+
def get_engine_venv_dir() -> Path:
|
|
278
|
+
return WorkspaceManager.get_engine_base_dir() / ".venv"
|
|
279
|
+
|
|
280
|
+
@staticmethod
|
|
281
|
+
def get_engine_cache_dir() -> Path:
|
|
282
|
+
return Path(os.path.expanduser("~/.cache")) / "sari" / "engine"
|
|
283
|
+
|
|
284
|
+
@staticmethod
|
|
285
|
+
def get_engine_index_dir(roots_hash: str) -> Path:
|
|
286
|
+
return WorkspaceManager.get_global_data_dir() / "index" / roots_hash
|
deckard/__init__.py
ADDED
deckard/__main__.py
ADDED
deckard/main.py
ADDED
|
@@ -0,0 +1,345 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import json
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import List
|
|
7
|
+
|
|
8
|
+
from app.workspace import WorkspaceManager
|
|
9
|
+
from app.config import Config
|
|
10
|
+
from app.db import LocalSearchDB
|
|
11
|
+
from app.engine_registry import get_default_engine
|
|
12
|
+
from mcp.tools._util import pack_error, ErrorCode
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _print_transport_error(fmt: str) -> int:
|
|
16
|
+
msg = "MCP-over-HTTP transport is not supported."
|
|
17
|
+
if fmt == "json":
|
|
18
|
+
payload = {"error": {"code": ErrorCode.ERR_MCP_HTTP_UNSUPPORTED.value, "message": msg}}
|
|
19
|
+
print(json.dumps(payload, ensure_ascii=False))
|
|
20
|
+
else:
|
|
21
|
+
print(pack_error("server", ErrorCode.ERR_MCP_HTTP_UNSUPPORTED, msg))
|
|
22
|
+
return 1
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _write_toml_block(cfg_path: Path, command: str, args: List[str], env: dict) -> None:
|
|
26
|
+
cfg_path.parent.mkdir(parents=True, exist_ok=True)
|
|
27
|
+
lines = cfg_path.read_text(encoding="utf-8").splitlines() if cfg_path.exists() else []
|
|
28
|
+
new_lines = []
|
|
29
|
+
in_sari = False
|
|
30
|
+
for line in lines:
|
|
31
|
+
if line.strip() == "[mcp_servers.sari]":
|
|
32
|
+
in_sari = True
|
|
33
|
+
continue
|
|
34
|
+
if in_sari and line.startswith("[") and line.strip() != "[mcp_servers.sari]":
|
|
35
|
+
in_sari = False
|
|
36
|
+
new_lines.append(line)
|
|
37
|
+
continue
|
|
38
|
+
if not in_sari:
|
|
39
|
+
new_lines.append(line)
|
|
40
|
+
env_kv = ", ".join([f'{k} = "{v}"' for k, v in env.items()])
|
|
41
|
+
block = [
|
|
42
|
+
"[mcp_servers.sari]",
|
|
43
|
+
f'command = "{command}"',
|
|
44
|
+
f"args = {json.dumps(args)}",
|
|
45
|
+
f"env = {{ {env_kv} }}",
|
|
46
|
+
"startup_timeout_sec = 60",
|
|
47
|
+
]
|
|
48
|
+
new_lines = block + new_lines
|
|
49
|
+
cfg_path.write_text("\n".join(new_lines) + "\n", encoding="utf-8")
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _write_json_settings(cfg_path: Path, command: str, args: List[str], env: dict) -> None:
|
|
53
|
+
cfg_path.parent.mkdir(parents=True, exist_ok=True)
|
|
54
|
+
data = {}
|
|
55
|
+
if cfg_path.exists():
|
|
56
|
+
try:
|
|
57
|
+
data = json.loads(cfg_path.read_text(encoding="utf-8"))
|
|
58
|
+
except Exception:
|
|
59
|
+
data = {}
|
|
60
|
+
mcp_servers = data.get("mcpServers") or {}
|
|
61
|
+
mcp_servers["sari"] = {"command": command, "args": args, "env": env}
|
|
62
|
+
data["mcpServers"] = mcp_servers
|
|
63
|
+
cfg_path.write_text(json.dumps(data, ensure_ascii=False, indent=2) + "\n", encoding="utf-8")
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _cmd_install(host: str, do_print: bool) -> int:
|
|
67
|
+
ssot = WorkspaceManager.resolve_config_path(str(Path.cwd()))
|
|
68
|
+
env = {
|
|
69
|
+
"DECKARD_CONFIG": ssot,
|
|
70
|
+
}
|
|
71
|
+
args = ["--transport", "stdio", "--format", "pack"]
|
|
72
|
+
command = "sari"
|
|
73
|
+
|
|
74
|
+
if do_print:
|
|
75
|
+
payload = {
|
|
76
|
+
"command": command,
|
|
77
|
+
"args": args,
|
|
78
|
+
"env": env,
|
|
79
|
+
}
|
|
80
|
+
print(json.dumps(payload, ensure_ascii=False, indent=2))
|
|
81
|
+
return 0
|
|
82
|
+
|
|
83
|
+
if host in {"codex", "gemini"}:
|
|
84
|
+
cfg_path = Path.cwd() / f".{host}" / "config.toml"
|
|
85
|
+
_write_toml_block(cfg_path, command, args, env)
|
|
86
|
+
print(f"[sari] Updated {cfg_path}")
|
|
87
|
+
return 0
|
|
88
|
+
if host in {"claude"}:
|
|
89
|
+
if os.name == "nt":
|
|
90
|
+
cfg_path = Path(os.environ.get("APPDATA", os.path.expanduser("~\\AppData\\Roaming"))) / "Claude" / "claude_desktop_config.json"
|
|
91
|
+
else:
|
|
92
|
+
cfg_path = Path.home() / "Library" / "Application Support" / "Claude" / "claude_desktop_config.json"
|
|
93
|
+
_write_json_settings(cfg_path, command, args, env)
|
|
94
|
+
print(f"[sari] Updated {cfg_path}")
|
|
95
|
+
return 0
|
|
96
|
+
if host in {"cursor"}:
|
|
97
|
+
cfg_path = Path.home() / ".cursor" / "mcp.json"
|
|
98
|
+
_write_json_settings(cfg_path, command, args, env)
|
|
99
|
+
print(f"[sari] Updated {cfg_path}")
|
|
100
|
+
return 0
|
|
101
|
+
|
|
102
|
+
print(f"[sari] Unsupported host: {host}", file=sys.stderr)
|
|
103
|
+
return 2
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def _cmd_config_show() -> int:
|
|
107
|
+
cfg_path = WorkspaceManager.resolve_config_path(str(Path.cwd()))
|
|
108
|
+
if not Path(cfg_path).exists():
|
|
109
|
+
print("{}")
|
|
110
|
+
return 0
|
|
111
|
+
print(Path(cfg_path).read_text(encoding="utf-8"))
|
|
112
|
+
return 0
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _cmd_roots_list() -> int:
|
|
116
|
+
cfg_path = WorkspaceManager.resolve_config_path(str(Path.cwd()))
|
|
117
|
+
if not Path(cfg_path).exists():
|
|
118
|
+
print("[]")
|
|
119
|
+
return 0
|
|
120
|
+
data = json.loads(Path(cfg_path).read_text(encoding="utf-8"))
|
|
121
|
+
roots = data.get("roots") or data.get("workspace_roots") or []
|
|
122
|
+
print(json.dumps(roots, ensure_ascii=False, indent=2))
|
|
123
|
+
return 0
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def _cmd_roots_add(path: str) -> int:
|
|
127
|
+
cfg_path = WorkspaceManager.resolve_config_path(str(Path.cwd()))
|
|
128
|
+
data = {}
|
|
129
|
+
if Path(cfg_path).exists():
|
|
130
|
+
try:
|
|
131
|
+
data = json.loads(Path(cfg_path).read_text(encoding="utf-8"))
|
|
132
|
+
except Exception:
|
|
133
|
+
data = {}
|
|
134
|
+
roots = data.get("roots") or data.get("workspace_roots") or []
|
|
135
|
+
roots = [r for r in roots if r]
|
|
136
|
+
roots.append(path)
|
|
137
|
+
final = WorkspaceManager.resolve_workspace_roots(root_uri=None, roots_env={}, config_roots=roots)
|
|
138
|
+
data["roots"] = final
|
|
139
|
+
Path(cfg_path).parent.mkdir(parents=True, exist_ok=True)
|
|
140
|
+
Path(cfg_path).write_text(json.dumps(data, ensure_ascii=False, indent=2) + "\n", encoding="utf-8")
|
|
141
|
+
print(json.dumps(final, ensure_ascii=False, indent=2))
|
|
142
|
+
return 0
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def _cmd_roots_remove(path: str) -> int:
|
|
146
|
+
cfg_path = WorkspaceManager.resolve_config_path(str(Path.cwd()))
|
|
147
|
+
if not Path(cfg_path).exists():
|
|
148
|
+
print("[]")
|
|
149
|
+
return 0
|
|
150
|
+
data = json.loads(Path(cfg_path).read_text(encoding="utf-8"))
|
|
151
|
+
roots = data.get("roots") or data.get("workspace_roots") or []
|
|
152
|
+
roots = [r for r in roots if r and r != path]
|
|
153
|
+
data["roots"] = roots
|
|
154
|
+
Path(cfg_path).write_text(json.dumps(data, ensure_ascii=False, indent=2) + "\n", encoding="utf-8")
|
|
155
|
+
print(json.dumps(roots, ensure_ascii=False, indent=2))
|
|
156
|
+
return 0
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def _cmd_index() -> int:
|
|
160
|
+
try:
|
|
161
|
+
from mcp.cli import _request_http
|
|
162
|
+
_request_http("/rescan", {})
|
|
163
|
+
print(json.dumps({"requested": True}))
|
|
164
|
+
return 0
|
|
165
|
+
except Exception as e:
|
|
166
|
+
print(json.dumps({"requested": False, "error": str(e)}))
|
|
167
|
+
return 1
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def _cmd_status() -> int:
|
|
171
|
+
try:
|
|
172
|
+
from mcp.cli import _request_http
|
|
173
|
+
data = _request_http("/status", {})
|
|
174
|
+
print(json.dumps(data, ensure_ascii=False, indent=2))
|
|
175
|
+
return 0
|
|
176
|
+
except Exception as e:
|
|
177
|
+
print(json.dumps({"error": str(e)}))
|
|
178
|
+
return 1
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def _load_engine_context():
|
|
182
|
+
workspace_root = WorkspaceManager.resolve_workspace_root()
|
|
183
|
+
cfg_path = WorkspaceManager.resolve_config_path(str(Path.cwd()))
|
|
184
|
+
cfg = Config.load(cfg_path, workspace_root_override=workspace_root)
|
|
185
|
+
db = LocalSearchDB(cfg.db_path)
|
|
186
|
+
db.set_engine(get_default_engine(db, cfg, cfg.workspace_roots))
|
|
187
|
+
return cfg, db
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def _cmd_engine_status() -> int:
|
|
191
|
+
try:
|
|
192
|
+
_cfg, db = _load_engine_context()
|
|
193
|
+
if hasattr(db.engine, "status"):
|
|
194
|
+
st = db.engine.status()
|
|
195
|
+
print(json.dumps(st.__dict__, ensure_ascii=False, indent=2))
|
|
196
|
+
return 0
|
|
197
|
+
print(json.dumps({"error": "engine status unsupported"}, ensure_ascii=False, indent=2))
|
|
198
|
+
return 1
|
|
199
|
+
except Exception as e:
|
|
200
|
+
print(json.dumps({"error": str(e)}))
|
|
201
|
+
return 1
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def _cmd_engine_install() -> int:
|
|
205
|
+
try:
|
|
206
|
+
_cfg, db = _load_engine_context()
|
|
207
|
+
if hasattr(db.engine, "install"):
|
|
208
|
+
db.engine.install()
|
|
209
|
+
print(json.dumps({"ok": True}))
|
|
210
|
+
return 0
|
|
211
|
+
print(json.dumps({"error": "engine install unsupported"}))
|
|
212
|
+
return 1
|
|
213
|
+
except Exception as e:
|
|
214
|
+
print(json.dumps({"error": str(e)}))
|
|
215
|
+
return 1
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def _cmd_engine_rebuild() -> int:
|
|
219
|
+
try:
|
|
220
|
+
_cfg, db = _load_engine_context()
|
|
221
|
+
if hasattr(db.engine, "rebuild"):
|
|
222
|
+
db.engine.rebuild()
|
|
223
|
+
print(json.dumps({"ok": True}))
|
|
224
|
+
return 0
|
|
225
|
+
print(json.dumps({"error": "engine rebuild unsupported"}))
|
|
226
|
+
return 1
|
|
227
|
+
except Exception as e:
|
|
228
|
+
print(json.dumps({"error": str(e)}))
|
|
229
|
+
return 1
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def _cmd_engine_verify() -> int:
|
|
233
|
+
try:
|
|
234
|
+
_cfg, db = _load_engine_context()
|
|
235
|
+
if hasattr(db.engine, "status"):
|
|
236
|
+
st = db.engine.status()
|
|
237
|
+
if st.engine_ready:
|
|
238
|
+
print(json.dumps({"ok": True}))
|
|
239
|
+
return 0
|
|
240
|
+
print(json.dumps({"ok": False, "reason": st.reason, "hint": st.hint}))
|
|
241
|
+
return 2
|
|
242
|
+
print(json.dumps({"error": "engine status unsupported"}))
|
|
243
|
+
return 1
|
|
244
|
+
except Exception as e:
|
|
245
|
+
print(json.dumps({"error": str(e)}))
|
|
246
|
+
return 1
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def _cmd_doctor() -> int:
|
|
250
|
+
try:
|
|
251
|
+
from doctor import run_doctor
|
|
252
|
+
run_doctor()
|
|
253
|
+
return 0
|
|
254
|
+
except Exception as e:
|
|
255
|
+
print(str(e), file=sys.stderr)
|
|
256
|
+
return 1
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def run_cmd(argv: List[str]) -> int:
|
|
260
|
+
if not argv:
|
|
261
|
+
print("missing subcommand", file=sys.stderr)
|
|
262
|
+
return 2
|
|
263
|
+
if argv[0] == "doctor":
|
|
264
|
+
return _cmd_doctor()
|
|
265
|
+
if argv[0] == "status":
|
|
266
|
+
return _cmd_status()
|
|
267
|
+
if argv[0] == "config" and len(argv) > 1 and argv[1] == "show":
|
|
268
|
+
return _cmd_config_show()
|
|
269
|
+
if argv[0] == "roots":
|
|
270
|
+
if len(argv) < 2:
|
|
271
|
+
print("roots add|remove|list", file=sys.stderr)
|
|
272
|
+
return 2
|
|
273
|
+
if argv[1] == "list":
|
|
274
|
+
return _cmd_roots_list()
|
|
275
|
+
if argv[1] == "add" and len(argv) > 2:
|
|
276
|
+
return _cmd_roots_add(argv[2])
|
|
277
|
+
if argv[1] == "remove" and len(argv) > 2:
|
|
278
|
+
return _cmd_roots_remove(argv[2])
|
|
279
|
+
if argv[0] == "index":
|
|
280
|
+
return _cmd_index()
|
|
281
|
+
if argv[0] == "install":
|
|
282
|
+
parser = argparse.ArgumentParser(prog="sari --cmd install")
|
|
283
|
+
parser.add_argument("--host", required=True, choices=["codex", "gemini", "claude", "cursor"])
|
|
284
|
+
parser.add_argument("--print", action="store_true")
|
|
285
|
+
ns = parser.parse_args(argv[1:])
|
|
286
|
+
return _cmd_install(ns.host, ns.print)
|
|
287
|
+
if argv[0] == "engine":
|
|
288
|
+
if len(argv) < 2:
|
|
289
|
+
print("engine status|install|rebuild|verify", file=sys.stderr)
|
|
290
|
+
return 2
|
|
291
|
+
action = argv[1]
|
|
292
|
+
if action == "status":
|
|
293
|
+
return _cmd_engine_status()
|
|
294
|
+
if action == "install":
|
|
295
|
+
return _cmd_engine_install()
|
|
296
|
+
if action == "rebuild":
|
|
297
|
+
return _cmd_engine_rebuild()
|
|
298
|
+
if action == "verify":
|
|
299
|
+
return _cmd_engine_verify()
|
|
300
|
+
print(f"Unknown subcommand: {argv[0]}", file=sys.stderr)
|
|
301
|
+
return 2
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
def main(argv: List[str] = None) -> int:
|
|
305
|
+
argv = list(argv or sys.argv[1:])
|
|
306
|
+
if argv and argv[0] in {"daemon", "proxy", "status", "search", "init"}:
|
|
307
|
+
from mcp.cli import main as legacy_main
|
|
308
|
+
sys.argv = ["sari"] + argv
|
|
309
|
+
return legacy_main()
|
|
310
|
+
if "--cmd" in argv:
|
|
311
|
+
idx = argv.index("--cmd")
|
|
312
|
+
cmd_args = argv[idx + 1 :]
|
|
313
|
+
return run_cmd(cmd_args)
|
|
314
|
+
|
|
315
|
+
parser = argparse.ArgumentParser(add_help=False)
|
|
316
|
+
parser.add_argument("--transport", default="stdio", choices=["stdio", "http"])
|
|
317
|
+
parser.add_argument("--format", default="pack", choices=["pack", "json"])
|
|
318
|
+
parser.add_argument("--http-api", action="store_true")
|
|
319
|
+
parser.add_argument("--http-api-port")
|
|
320
|
+
parser.add_argument("--version", action="store_true")
|
|
321
|
+
parser.add_argument("--help", action="store_true")
|
|
322
|
+
ns, _ = parser.parse_known_args(argv)
|
|
323
|
+
|
|
324
|
+
if ns.help:
|
|
325
|
+
print("sari [--transport stdio|http] [--format pack|json] [--http-api] [--cmd <subcommand>]")
|
|
326
|
+
return 0
|
|
327
|
+
if ns.version:
|
|
328
|
+
from mcp.server import LocalSearchMCPServer
|
|
329
|
+
print(LocalSearchMCPServer.SERVER_VERSION)
|
|
330
|
+
return 0
|
|
331
|
+
|
|
332
|
+
os.environ["DECKARD_FORMAT"] = ns.format
|
|
333
|
+
|
|
334
|
+
if ns.http_api:
|
|
335
|
+
if ns.http_api_port:
|
|
336
|
+
os.environ["DECKARD_HTTP_API_PORT"] = str(ns.http_api_port)
|
|
337
|
+
from app.main import main as http_main
|
|
338
|
+
return http_main()
|
|
339
|
+
|
|
340
|
+
if ns.transport == "http":
|
|
341
|
+
return _print_transport_error(ns.format)
|
|
342
|
+
|
|
343
|
+
from mcp.server import main as mcp_main
|
|
344
|
+
mcp_main()
|
|
345
|
+
return 0
|
deckard/version.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.0.1"
|
mcp/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# MCP Server for Local Search
|
mcp/__main__.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Entry point for `python -m mcp` execution.
|
|
4
|
+
|
|
5
|
+
Routes to either:
|
|
6
|
+
- CLI mode (sari daemon/proxy commands)
|
|
7
|
+
- Legacy server mode (for backward compatibility)
|
|
8
|
+
"""
|
|
9
|
+
import sys
|
|
10
|
+
|
|
11
|
+
if __name__ == "__main__":
|
|
12
|
+
# Check if running as CLI
|
|
13
|
+
if len(sys.argv) > 1 and sys.argv[1] in ("daemon", "proxy"):
|
|
14
|
+
from .cli import main
|
|
15
|
+
sys.exit(main())
|
|
16
|
+
else:
|
|
17
|
+
# Legacy: Run as stdio MCP server
|
|
18
|
+
from .server import main
|
|
19
|
+
main()
|