github-pr-context-mcp 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- analytics/__init__.py +3 -0
- analytics/usage_metrics.py +185 -0
- app/__init__.py +3 -0
- app/mcp_app.py +928 -0
- auth/__init__.py +3 -0
- auth/gmail_identity.py +236 -0
- entrypoints/deployed/server.py +34 -0
- entrypoints/local/server.py +273 -0
- fetcher/__init__.py +3 -0
- fetcher/client.py +131 -0
- fetcher/queries.py +67 -0
- fetcher/transform.py +55 -0
- github_pr_context_mcp-0.2.5.dist-info/METADATA +192 -0
- github_pr_context_mcp-0.2.5.dist-info/RECORD +25 -0
- github_pr_context_mcp-0.2.5.dist-info/WHEEL +5 -0
- github_pr_context_mcp-0.2.5.dist-info/entry_points.txt +2 -0
- github_pr_context_mcp-0.2.5.dist-info/licenses/LICENSE +21 -0
- github_pr_context_mcp-0.2.5.dist-info/top_level.txt +7 -0
- inference/__init__.py +3 -0
- inference/providers.py +296 -0
- inference/review.py +175 -0
- storage/__init__.py +19 -0
- storage/document_builder.py +74 -0
- storage/encoder.py +35 -0
- storage/vector_store.py +270 -0
app/mcp_app.py
ADDED
|
@@ -0,0 +1,928 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import hmac
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
5
|
+
import sys
|
|
6
|
+
from urllib.parse import urlparse
|
|
7
|
+
|
|
8
|
+
from mcp.server.fastmcp import Context, FastMCP
|
|
9
|
+
from mcp.server.auth.middleware.auth_context import get_access_token
|
|
10
|
+
from mcp.server.auth.provider import AccessToken
|
|
11
|
+
from mcp.server.auth.settings import AuthSettings
|
|
12
|
+
from mcp.server.transport_security import TransportSecuritySettings
|
|
13
|
+
from starlette.requests import Request
|
|
14
|
+
from starlette.responses import JSONResponse, Response
|
|
15
|
+
import threading
|
|
16
|
+
import requests
|
|
17
|
+
import time
|
|
18
|
+
|
|
19
|
+
from auth import GmailIdentityStore, GmailTokenVerifier
|
|
20
|
+
from analytics import UsageMetricsStore
|
|
21
|
+
from fetcher import fetch_prs
|
|
22
|
+
from inference import review_with_context, summarize_patterns, generate_with_context, generate_rules_content
|
|
23
|
+
from storage import (
|
|
24
|
+
delete_repo_index as delete_repo_index_storage,
|
|
25
|
+
get_collection_stats,
|
|
26
|
+
index_prs,
|
|
27
|
+
list_all_repos,
|
|
28
|
+
query_similar,
|
|
29
|
+
repo_is_indexed_permanently,
|
|
30
|
+
repo_is_indexed_temporarily,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
STORAGE_CONSEQUENCES = """
|
|
35
|
+
Permanent storage
|
|
36
|
+
- PR data is embedded and saved to disk (ChromaDB).
|
|
37
|
+
- Available instantly on future sessions.
|
|
38
|
+
- Disk usage: ~5-20 MB per repo (60 PRs).
|
|
39
|
+
- Best for repos you query repeatedly.
|
|
40
|
+
|
|
41
|
+
Temporary storage
|
|
42
|
+
- PR data is embedded and kept in memory only.
|
|
43
|
+
- Faster to set up, zero disk usage.
|
|
44
|
+
- Lost when the MCP server restarts.
|
|
45
|
+
- Best for one-off exploration.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
USAGE_TRACKING_ENABLED = os.getenv("USAGE_TRACKING_ENABLED", "false").strip().lower() in {
|
|
50
|
+
"1",
|
|
51
|
+
"true",
|
|
52
|
+
"yes",
|
|
53
|
+
"on",
|
|
54
|
+
}
|
|
55
|
+
AUTH_REQUIRED = os.getenv("AUTH_REQUIRED", "false").strip().lower() in {
|
|
56
|
+
"1",
|
|
57
|
+
"true",
|
|
58
|
+
"yes",
|
|
59
|
+
"on",
|
|
60
|
+
}
|
|
61
|
+
REGISTRATION_SECRET = os.getenv("REGISTRATION_SECRET", "").strip()
|
|
62
|
+
MCP_PUBLIC_URL = os.getenv("MCP_PUBLIC_URL", "").strip()
|
|
63
|
+
AUTH_REGISTRY_PATH = os.getenv("AUTH_REGISTRY_PATH", "./chroma_db/auth_registry.json")
|
|
64
|
+
USAGE_METRICS_TOKEN = os.getenv("USAGE_METRICS_TOKEN", "").strip()
|
|
65
|
+
USAGE_STATS_PATH = os.getenv("USAGE_STATS_PATH", "./chroma_db/usage_stats.json")
|
|
66
|
+
_identity_store = GmailIdentityStore(AUTH_REGISTRY_PATH) if AUTH_REQUIRED else None
|
|
67
|
+
_token_verifier = GmailTokenVerifier(_identity_store) if _identity_store else None
|
|
68
|
+
_usage_store = UsageMetricsStore(USAGE_STATS_PATH) if USAGE_TRACKING_ENABLED else None
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _normalize_repo(repo_str: str) -> str:
|
|
72
|
+
if repo_str.endswith(".git"):
|
|
73
|
+
repo_str = repo_str[:-4]
|
|
74
|
+
match = re.search(r"(?:github\.com/)?([^/]+/[^/]+)", repo_str)
|
|
75
|
+
if not match:
|
|
76
|
+
raise ValueError(f"Invalid repo format: {repo_str}. Use owner/repo or full GitHub URL.")
|
|
77
|
+
return match.group(1).split("#")[0].split("?")[0]
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _normalize_namespace(namespace: str | None) -> str | None:
|
|
81
|
+
if namespace is None:
|
|
82
|
+
return None
|
|
83
|
+
ns = namespace.strip()
|
|
84
|
+
return ns or None
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _current_user_email() -> str | None:
|
|
88
|
+
access_token = get_access_token()
|
|
89
|
+
if isinstance(access_token, AccessToken):
|
|
90
|
+
return _normalize_namespace(access_token.client_id)
|
|
91
|
+
return None
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def _current_user_settings() -> dict:
|
|
95
|
+
store = _identity_store
|
|
96
|
+
if not store:
|
|
97
|
+
return {}
|
|
98
|
+
email = _current_user_email()
|
|
99
|
+
if not email:
|
|
100
|
+
return {}
|
|
101
|
+
return store.get_user_settings(email)
|
|
102
|
+
|
|
103
|
+
def _normalize_repo(repo: str | None) -> str:
|
|
104
|
+
"""Strict validation for GitHub repository identifiers (owner/name)."""
|
|
105
|
+
if not repo:
|
|
106
|
+
raise ValueError("Repository identifier is required (e.g. 'owner/repo').")
|
|
107
|
+
|
|
108
|
+
# Must match standard GitHub format and be alphanumeric/dash/underscore
|
|
109
|
+
# Prevents directory traversal like ../../ etc.
|
|
110
|
+
if not re.fullmatch(r"^[A-Za-z0-9_.-]+/[A-Za-z0-9_.-]+$", repo):
|
|
111
|
+
raise ValueError(f"Invalid repository format: '{repo}'. Expected 'owner/repo'.")
|
|
112
|
+
|
|
113
|
+
return repo
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def _llm_settings(user_settings: dict[str, str]) -> dict[str, str]:
|
|
117
|
+
llm: dict[str, str] = {}
|
|
118
|
+
for key in ("llm_provider", "llm_model", "llm_api_key", "llm_base_url"):
|
|
119
|
+
value = user_settings.get(key)
|
|
120
|
+
if value:
|
|
121
|
+
llm[key] = value
|
|
122
|
+
return llm
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def _repo_state_key(repo_key: str, namespace: str | None) -> str:
|
|
126
|
+
ns = _normalize_namespace(namespace) or "_default"
|
|
127
|
+
return f"{ns}::{repo_key}"
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
# Stateful per connected client/session to avoid cross-user active-repo collisions.
|
|
131
|
+
_sessions: dict[str, dict] = {}
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def _session_id(ctx: Context) -> str:
|
|
135
|
+
return _current_user_email() or ctx.client_id or f"session-{id(ctx.session)}"
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def _state(ctx: Context) -> dict:
|
|
139
|
+
sid = _session_id(ctx)
|
|
140
|
+
if sid not in _sessions:
|
|
141
|
+
configured_ns = _normalize_namespace(os.getenv("MCP_NAMESPACE", ""))
|
|
142
|
+
_sessions[sid] = {
|
|
143
|
+
"active_repo": None,
|
|
144
|
+
"active_namespace": configured_ns or _current_user_email() or _normalize_namespace(ctx.client_id),
|
|
145
|
+
"storage_types": {},
|
|
146
|
+
}
|
|
147
|
+
return _sessions[sid]
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def _resolve_namespace(requested_namespace: str | None, state: dict) -> str | None:
|
|
151
|
+
# CRITICAL SECURITY GATES: Enforce identity isolation when Auth is enabled.
|
|
152
|
+
current_email = _current_user_email()
|
|
153
|
+
|
|
154
|
+
if AUTH_REQUIRED:
|
|
155
|
+
if not current_email:
|
|
156
|
+
raise ValueError("Unauthorized: missing identity when AUTH_REQUIRED is true.")
|
|
157
|
+
# Under auth, the user can ONLY access their own isolated namespace
|
|
158
|
+
return _normalize_namespace(current_email)
|
|
159
|
+
|
|
160
|
+
# If Auth is disabled (local mode), allow specific overrides or fallback to active
|
|
161
|
+
return _normalize_namespace(requested_namespace if requested_namespace is not None else state.get("active_namespace"))
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _resolve_repo(repo: str | None, state: dict) -> str:
|
|
165
|
+
if repo:
|
|
166
|
+
return _normalize_repo(repo)
|
|
167
|
+
active = state.get("active_repo")
|
|
168
|
+
if not active:
|
|
169
|
+
raise ValueError("No repo specified and no active repo set. Use ensure_repo_ready first, or pass repo explicitly.")
|
|
170
|
+
return _normalize_repo(active)
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def _is_temporary(repo_key: str, namespace: str | None, state: dict) -> bool:
|
|
174
|
+
key = _repo_state_key(repo_key, namespace)
|
|
175
|
+
known = state["storage_types"].get(key)
|
|
176
|
+
if known is not None:
|
|
177
|
+
return known == "temporary"
|
|
178
|
+
return repo_is_indexed_temporarily(repo_key, namespace=namespace)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def _namespace_text(namespace: str | None) -> str:
|
|
182
|
+
if namespace:
|
|
183
|
+
return f"\nNamespace: {namespace}"
|
|
184
|
+
return ""
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def _usage_user_id(ctx: Context, namespace: str | None) -> str:
|
|
188
|
+
current_email = _current_user_email()
|
|
189
|
+
if current_email:
|
|
190
|
+
return f"email:{current_email}"
|
|
191
|
+
if namespace:
|
|
192
|
+
return f"ns:{namespace}"
|
|
193
|
+
if ctx.client_id:
|
|
194
|
+
return f"client:{ctx.client_id}"
|
|
195
|
+
return _session_id(ctx)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def _track_usage(ctx: Context, namespace: str | None, tool_name: str) -> None:
|
|
199
|
+
if _usage_store is None:
|
|
200
|
+
return
|
|
201
|
+
_usage_store.record_event(_usage_user_id(ctx, namespace), tool_name)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def _validate_admin_token(admin_token: str | None) -> bool:
|
|
205
|
+
if not USAGE_METRICS_TOKEN:
|
|
206
|
+
return True
|
|
207
|
+
return hmac.compare_digest(admin_token or "", USAGE_METRICS_TOKEN)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def _build_auth_settings() -> AuthSettings | None:
|
|
211
|
+
if not AUTH_REQUIRED:
|
|
212
|
+
return None
|
|
213
|
+
if not MCP_PUBLIC_URL:
|
|
214
|
+
raise ValueError("MCP_PUBLIC_URL is required when AUTH_REQUIRED=true")
|
|
215
|
+
if not REGISTRATION_SECRET:
|
|
216
|
+
raise ValueError("REGISTRATION_SECRET is required when AUTH_REQUIRED=true")
|
|
217
|
+
public_url = MCP_PUBLIC_URL.rstrip("/")
|
|
218
|
+
return AuthSettings(
|
|
219
|
+
issuer_url=public_url,
|
|
220
|
+
resource_server_url=public_url,
|
|
221
|
+
service_documentation_url=os.getenv("AUTH_SERVICE_DOC_URL", public_url),
|
|
222
|
+
required_scopes=["identity:gmail"],
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def _build_transport_security() -> TransportSecuritySettings | None:
|
|
227
|
+
if not AUTH_REQUIRED or not MCP_PUBLIC_URL:
|
|
228
|
+
return None
|
|
229
|
+
parsed = urlparse(MCP_PUBLIC_URL)
|
|
230
|
+
host = parsed.netloc
|
|
231
|
+
origin = f"{parsed.scheme}://{parsed.netloc}"
|
|
232
|
+
return TransportSecuritySettings(
|
|
233
|
+
enable_dns_rebinding_protection=True,
|
|
234
|
+
allowed_hosts=[host],
|
|
235
|
+
allowed_origins=[origin],
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
mcp = FastMCP(
|
|
240
|
+
"github-pr-review-context",
|
|
241
|
+
host=os.getenv("HOST", "0.0.0.0"),
|
|
242
|
+
port=int(os.getenv("PORT", "8000")),
|
|
243
|
+
streamable_http_path=os.getenv("MCP_HTTP_PATH", "/mcp"),
|
|
244
|
+
auth=_build_auth_settings(),
|
|
245
|
+
token_verifier=_token_verifier,
|
|
246
|
+
transport_security=_build_transport_security(),
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
def _github_sync_loop():
|
|
250
|
+
repo = os.getenv("GITHUB_TRAFFIC_REPO")
|
|
251
|
+
token = os.getenv("GITHUB_TOKEN")
|
|
252
|
+
if not repo or not token or not _usage_store:
|
|
253
|
+
return
|
|
254
|
+
|
|
255
|
+
# Use a long interval (e.g., 6 hours) to avoid hitting GitHub API rate limits
|
|
256
|
+
while True:
|
|
257
|
+
try:
|
|
258
|
+
url = f"https://api.github.com/repos/{repo}/traffic/clones"
|
|
259
|
+
headers = {
|
|
260
|
+
"Authorization": f"Bearer {token}",
|
|
261
|
+
"Accept": "application/vnd.github+json"
|
|
262
|
+
}
|
|
263
|
+
resp = requests.get(url, headers=headers, timeout=10)
|
|
264
|
+
if resp.status_code == 200:
|
|
265
|
+
data = resp.json()
|
|
266
|
+
clones_data = data.get("clones", [])
|
|
267
|
+
_usage_store.update_github_clones(clones_data)
|
|
268
|
+
|
|
269
|
+
# Fetch downloads from releases
|
|
270
|
+
releases_url = f"https://api.github.com/repos/{repo}/releases"
|
|
271
|
+
rel_resp = requests.get(releases_url, headers=headers, timeout=10)
|
|
272
|
+
if rel_resp.status_code == 200:
|
|
273
|
+
releases = rel_resp.json()
|
|
274
|
+
downloads = sum(
|
|
275
|
+
asset.get("download_count", 0)
|
|
276
|
+
for r in releases
|
|
277
|
+
for asset in r.get("assets", [])
|
|
278
|
+
)
|
|
279
|
+
_usage_store.update_github_downloads(downloads)
|
|
280
|
+
except Exception:
|
|
281
|
+
pass
|
|
282
|
+
time.sleep(21600) # 6 hours
|
|
283
|
+
|
|
284
|
+
if USAGE_TRACKING_ENABLED:
|
|
285
|
+
threading.Thread(target=_github_sync_loop, daemon=True).start()
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
@mcp.custom_route("/healthz", methods=["GET"], include_in_schema=False)
|
|
289
|
+
async def healthz(_: Request) -> Response:
|
|
290
|
+
return JSONResponse({"status": "ok"})
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
@mcp.custom_route("/ping", methods=["POST"], include_in_schema=False)
|
|
294
|
+
async def ping(request: Request) -> Response:
|
|
295
|
+
"""Anonymous startup ping from local users (uvx / pipx / git clone).
|
|
296
|
+
Receives: {"id": "<hashed_machine_fingerprint>", "mode": "uvx|pipx|local"}
|
|
297
|
+
No PII is accepted or stored — id must be a hex string.
|
|
298
|
+
"""
|
|
299
|
+
if _usage_store is None:
|
|
300
|
+
return JSONResponse({"ok": True})
|
|
301
|
+
|
|
302
|
+
try:
|
|
303
|
+
payload = await request.json()
|
|
304
|
+
except Exception:
|
|
305
|
+
return JSONResponse({"error": "invalid_json"}, status_code=400)
|
|
306
|
+
|
|
307
|
+
anon_id = str(payload.get("id", "")).strip()
|
|
308
|
+
mode = str(payload.get("mode", "unknown")).strip()
|
|
309
|
+
|
|
310
|
+
# Validate: id must look like a hex fingerprint, max 128 chars
|
|
311
|
+
import re as _re
|
|
312
|
+
if not anon_id or not _re.fullmatch(r"[0-9a-f]{8,128}", anon_id):
|
|
313
|
+
return JSONResponse({"error": "invalid_id"}, status_code=400)
|
|
314
|
+
|
|
315
|
+
_usage_store.record_ping(anon_id, mode)
|
|
316
|
+
return JSONResponse({"ok": True})
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
@mcp.custom_route("/usage", methods=["GET"], include_in_schema=False)
|
|
320
|
+
async def usage(request: Request) -> Response:
|
|
321
|
+
if _usage_store is None:
|
|
322
|
+
return JSONResponse({"enabled": False, "reason": "USAGE_TRACKING_ENABLED=false"})
|
|
323
|
+
|
|
324
|
+
if USAGE_METRICS_TOKEN:
|
|
325
|
+
provided = request.headers.get("x-api-key", "")
|
|
326
|
+
if provided != USAGE_METRICS_TOKEN:
|
|
327
|
+
return JSONResponse({"error": "unauthorized"}, status_code=401)
|
|
328
|
+
|
|
329
|
+
days_raw = request.query_params.get("days", "30")
|
|
330
|
+
try:
|
|
331
|
+
days = max(1, min(int(days_raw), 365))
|
|
332
|
+
except ValueError:
|
|
333
|
+
days = 30
|
|
334
|
+
|
|
335
|
+
return JSONResponse(_usage_store.summary(last_days=days))
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
@mcp.custom_route("/usage/badge", methods=["GET"], include_in_schema=False)
|
|
339
|
+
async def usage_badge(_: Request) -> Response:
|
|
340
|
+
"""Returns a Shields.io compliant JSON for a live user counter badge."""
|
|
341
|
+
if _usage_store is None:
|
|
342
|
+
return JSONResponse({"schemaVersion": 1, "label": "users", "message": "off", "color": "grey"})
|
|
343
|
+
|
|
344
|
+
stats = _usage_store.summary(last_days=1)
|
|
345
|
+
count = stats.get("total_unique_users", 0)
|
|
346
|
+
|
|
347
|
+
return JSONResponse({
|
|
348
|
+
"schemaVersion": 1,
|
|
349
|
+
"label": "users",
|
|
350
|
+
"message": str(count),
|
|
351
|
+
"color": "blueviolet" if count > 0 else "grey",
|
|
352
|
+
"style": "flat-square"
|
|
353
|
+
})
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
@mcp.custom_route("/register", methods=["POST"], include_in_schema=False)
|
|
357
|
+
async def register(request: Request) -> Response:
|
|
358
|
+
if not AUTH_REQUIRED or _identity_store is None:
|
|
359
|
+
return JSONResponse({"error": "auth_disabled"}, status_code=400)
|
|
360
|
+
|
|
361
|
+
try:
|
|
362
|
+
payload = await request.json()
|
|
363
|
+
except Exception:
|
|
364
|
+
return JSONResponse({"error": "invalid_json"}, status_code=400)
|
|
365
|
+
|
|
366
|
+
email = str(payload.get("email", "")).strip().lower()
|
|
367
|
+
invite_secret = str(payload.get("invite_secret", "")).strip()
|
|
368
|
+
requested_settings = payload.get("settings") if isinstance(payload, dict) else None
|
|
369
|
+
|
|
370
|
+
if not REGISTRATION_SECRET or not hmac.compare_digest(invite_secret, REGISTRATION_SECRET):
|
|
371
|
+
return JSONResponse({"error": "invalid_invite_secret"}, status_code=403)
|
|
372
|
+
|
|
373
|
+
try:
|
|
374
|
+
result = _identity_store.register_email(email, settings=requested_settings)
|
|
375
|
+
except ValueError as exc:
|
|
376
|
+
return JSONResponse({"error": str(exc)}, status_code=400)
|
|
377
|
+
|
|
378
|
+
return JSONResponse(
|
|
379
|
+
{
|
|
380
|
+
"email": result.email,
|
|
381
|
+
"token": result.token,
|
|
382
|
+
"authorization": f"Bearer {result.token}",
|
|
383
|
+
"namespace": result.email,
|
|
384
|
+
"settings": result.settings,
|
|
385
|
+
},
|
|
386
|
+
status_code=201,
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
|
|
390
|
+
@mcp.custom_route("/settings", methods=["GET"], include_in_schema=False)
|
|
391
|
+
async def get_settings(_: Request) -> Response:
|
|
392
|
+
access_token = get_access_token()
|
|
393
|
+
if access_token is None or _identity_store is None:
|
|
394
|
+
return JSONResponse({"error": "unauthorized"}, status_code=401)
|
|
395
|
+
|
|
396
|
+
settings = _identity_store.get_user_settings(access_token.client_id)
|
|
397
|
+
masked = {k: ("***" if k in {"github_token", "llm_api_key"} else v) for k, v in settings.items()}
|
|
398
|
+
return JSONResponse({"email": access_token.client_id, "settings": masked})
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
@mcp.custom_route("/settings", methods=["PUT"], include_in_schema=False)
|
|
402
|
+
async def update_settings(request: Request) -> Response:
|
|
403
|
+
access_token = get_access_token()
|
|
404
|
+
if access_token is None or _identity_store is None:
|
|
405
|
+
return JSONResponse({"error": "unauthorized"}, status_code=401)
|
|
406
|
+
|
|
407
|
+
try:
|
|
408
|
+
payload = await request.json()
|
|
409
|
+
except Exception:
|
|
410
|
+
return JSONResponse({"error": "invalid_json"}, status_code=400)
|
|
411
|
+
|
|
412
|
+
settings = payload.get("settings") if isinstance(payload, dict) else None
|
|
413
|
+
if not isinstance(settings, dict):
|
|
414
|
+
return JSONResponse({"error": "settings must be an object"}, status_code=400)
|
|
415
|
+
|
|
416
|
+
try:
|
|
417
|
+
updated = _identity_store.update_user_settings(access_token.client_id, settings)
|
|
418
|
+
except ValueError as exc:
|
|
419
|
+
return JSONResponse({"error": str(exc)}, status_code=400)
|
|
420
|
+
|
|
421
|
+
return JSONResponse({"email": access_token.client_id, "settings": updated})
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
@mcp.custom_route("/whoami", methods=["GET"], include_in_schema=False)
|
|
425
|
+
async def whoami(_: Request) -> Response:
|
|
426
|
+
access_token = get_access_token()
|
|
427
|
+
if access_token is None:
|
|
428
|
+
return JSONResponse({"error": "unauthorized"}, status_code=401)
|
|
429
|
+
user_settings = _current_user_settings()
|
|
430
|
+
return JSONResponse(
|
|
431
|
+
{
|
|
432
|
+
"email": access_token.client_id,
|
|
433
|
+
"scopes": access_token.scopes,
|
|
434
|
+
"has_custom_github_token": bool(user_settings.get("github_token")),
|
|
435
|
+
"has_custom_llm": any(
|
|
436
|
+
user_settings.get(k) for k in ("llm_provider", "llm_model", "llm_api_key", "llm_base_url")
|
|
437
|
+
),
|
|
438
|
+
}
|
|
439
|
+
)
|
|
440
|
+
|
|
441
|
+
|
|
442
|
+
@mcp.tool(name="ensure_repo_ready")
|
|
443
|
+
def ensure_repo_ready(
|
|
444
|
+
repo: str,
|
|
445
|
+
storage: str | None = None,
|
|
446
|
+
pages: int = 2,
|
|
447
|
+
namespace: str | None = None,
|
|
448
|
+
ctx: Context | None = None,
|
|
449
|
+
) -> str:
|
|
450
|
+
"""Ensure a repo is indexed and ready. If storage is omitted, explains permanent vs temporary trade-offs."""
|
|
451
|
+
if ctx is None:
|
|
452
|
+
raise ValueError("Context is required")
|
|
453
|
+
|
|
454
|
+
state = _state(ctx)
|
|
455
|
+
repo_key = _normalize_repo(repo)
|
|
456
|
+
namespace = _resolve_namespace(namespace, state)
|
|
457
|
+
_track_usage(ctx, namespace, "ensure_repo_ready")
|
|
458
|
+
state_key = _repo_state_key(repo_key, namespace)
|
|
459
|
+
|
|
460
|
+
if repo_is_indexed_permanently(repo_key, namespace=namespace):
|
|
461
|
+
state["active_repo"] = repo_key
|
|
462
|
+
state["active_namespace"] = namespace
|
|
463
|
+
state["storage_types"][state_key] = "permanent"
|
|
464
|
+
stats = get_collection_stats(repo_key, temporary=False, namespace=namespace)
|
|
465
|
+
return (
|
|
466
|
+
f"{repo_key} is already indexed permanently on disk.\n"
|
|
467
|
+
f"{stats['total_documents']} documents loaded and ready.\n"
|
|
468
|
+
f"Active repo set to {repo_key}."
|
|
469
|
+
f"{_namespace_text(namespace)}"
|
|
470
|
+
)
|
|
471
|
+
|
|
472
|
+
if repo_is_indexed_temporarily(repo_key, namespace=namespace):
|
|
473
|
+
state["active_repo"] = repo_key
|
|
474
|
+
state["active_namespace"] = namespace
|
|
475
|
+
state["storage_types"][state_key] = "temporary"
|
|
476
|
+
stats = get_collection_stats(repo_key, temporary=True, namespace=namespace)
|
|
477
|
+
return (
|
|
478
|
+
f"{repo_key} is already indexed in memory.\n"
|
|
479
|
+
f"{stats['total_documents']} documents loaded and ready.\n"
|
|
480
|
+
f"Active repo set to {repo_key}."
|
|
481
|
+
f"{_namespace_text(namespace)}"
|
|
482
|
+
)
|
|
483
|
+
|
|
484
|
+
if storage is None:
|
|
485
|
+
return (
|
|
486
|
+
f"{repo_key} is not indexed yet."
|
|
487
|
+
f"{_namespace_text(namespace)}\n\n"
|
|
488
|
+
f"How would you like to store it?\n\n"
|
|
489
|
+
f"{STORAGE_CONSEQUENCES}\n"
|
|
490
|
+
f"Reply with permanent or temporary and I will fetch/index up to {pages * 30} PRs."
|
|
491
|
+
)
|
|
492
|
+
|
|
493
|
+
if storage not in {"temporary", "permanent"}:
|
|
494
|
+
raise ValueError("storage must be one of: temporary, permanent")
|
|
495
|
+
|
|
496
|
+
temporary = storage == "temporary"
|
|
497
|
+
user_settings = _current_user_settings()
|
|
498
|
+
|
|
499
|
+
def _background_index():
|
|
500
|
+
try:
|
|
501
|
+
prs = fetch_prs(
|
|
502
|
+
*repo_key.split("/", 1),
|
|
503
|
+
pages=pages,
|
|
504
|
+
github_token=user_settings.get("github_token"),
|
|
505
|
+
)
|
|
506
|
+
count = index_prs(repo_key, prs, temporary=temporary, namespace=namespace)
|
|
507
|
+
state["active_repo"] = repo_key
|
|
508
|
+
state["active_namespace"] = namespace
|
|
509
|
+
state["storage_types"][state_key] = storage
|
|
510
|
+
print(f"Background indexing finished for {repo_key}. {count} docs parsed.", file=sys.stderr)
|
|
511
|
+
except Exception as e:
|
|
512
|
+
print(f"Background indexing failed for {repo_key}: {e}", file=sys.stderr)
|
|
513
|
+
|
|
514
|
+
threading.Thread(target=_background_index, daemon=True).start()
|
|
515
|
+
|
|
516
|
+
storage_label = "temporary (in-memory)" if temporary else "permanent (disk)"
|
|
517
|
+
return (
|
|
518
|
+
f"Background indexing started for {repo_key} [{storage_label}].\n"
|
|
519
|
+
f"This takes ~1-3 minutes. Use the 'get_index_stats' tool to verify when it completes.\n"
|
|
520
|
+
f"Active repo will be activated upon completion."
|
|
521
|
+
f"{_namespace_text(namespace)}"
|
|
522
|
+
)
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
@mcp.tool(name="set_active_repo")
|
|
526
|
+
def set_active_repo(repo: str, namespace: str | None = None, ctx: Context | None = None) -> str:
|
|
527
|
+
"""Switch the active repo to an already-indexed repo."""
|
|
528
|
+
if ctx is None:
|
|
529
|
+
raise ValueError("Context is required")
|
|
530
|
+
|
|
531
|
+
state = _state(ctx)
|
|
532
|
+
repo_key = _normalize_repo(repo)
|
|
533
|
+
namespace = _resolve_namespace(namespace, state)
|
|
534
|
+
_track_usage(ctx, namespace, "set_active_repo")
|
|
535
|
+
|
|
536
|
+
if not repo_is_indexed_permanently(repo_key, namespace=namespace) and not repo_is_indexed_temporarily(repo_key, namespace=namespace):
|
|
537
|
+
return f"{repo_key} is not indexed yet. Use ensure_repo_ready first."
|
|
538
|
+
|
|
539
|
+
state_key = _repo_state_key(repo_key, namespace)
|
|
540
|
+
if repo_is_indexed_temporarily(repo_key, namespace=namespace):
|
|
541
|
+
state["storage_types"][state_key] = "temporary"
|
|
542
|
+
else:
|
|
543
|
+
state["storage_types"][state_key] = "permanent"
|
|
544
|
+
|
|
545
|
+
previous = state.get("active_repo")
|
|
546
|
+
state["active_repo"] = repo_key
|
|
547
|
+
state["active_namespace"] = namespace
|
|
548
|
+
|
|
549
|
+
msg = f"Active repo switched to: {repo_key}"
|
|
550
|
+
if previous and previous != repo_key:
|
|
551
|
+
msg += f"\n(previously: {previous})"
|
|
552
|
+
if namespace:
|
|
553
|
+
msg += f"\n(namespace: {namespace})"
|
|
554
|
+
return msg
|
|
555
|
+
|
|
556
|
+
|
|
557
|
+
@mcp.tool(name="list_indexed_repos")
|
|
558
|
+
def list_indexed_repos(namespace: str | None = None, ctx: Context | None = None) -> str:
|
|
559
|
+
"""List indexed repos with storage type and document count."""
|
|
560
|
+
if ctx is None:
|
|
561
|
+
raise ValueError("Context is required")
|
|
562
|
+
|
|
563
|
+
state = _state(ctx)
|
|
564
|
+
namespace = _resolve_namespace(namespace, state)
|
|
565
|
+
_track_usage(ctx, namespace, "list_indexed_repos")
|
|
566
|
+
rows = list_all_repos(namespace=namespace)
|
|
567
|
+
if not rows:
|
|
568
|
+
return "No repos indexed yet."
|
|
569
|
+
|
|
570
|
+
active_repo = state.get("active_repo")
|
|
571
|
+
active_ns = state.get("active_namespace")
|
|
572
|
+
|
|
573
|
+
lines = ["Indexed repos:"]
|
|
574
|
+
for r in rows:
|
|
575
|
+
icon = "disk" if r["storage"] == "permanent" else "mem"
|
|
576
|
+
repo_ns = _normalize_namespace(r.get("namespace"))
|
|
577
|
+
marker = " <- active" if r["repo"] == active_repo and repo_ns == active_ns else ""
|
|
578
|
+
ns_label = repo_ns or "default"
|
|
579
|
+
lines.append(
|
|
580
|
+
f"- {icon} {r['repo']} ({r['total_documents']} docs, {r['storage']}, ns={ns_label}){marker}"
|
|
581
|
+
)
|
|
582
|
+
|
|
583
|
+
return "\n".join(lines)
|
|
584
|
+
|
|
585
|
+
|
|
586
|
+
@mcp.tool(name="delete_repo_index")
|
|
587
|
+
def delete_repo_index(
|
|
588
|
+
repo: str,
|
|
589
|
+
storage: str = "both",
|
|
590
|
+
namespace: str | None = None,
|
|
591
|
+
ctx: Context | None = None,
|
|
592
|
+
) -> str:
|
|
593
|
+
"""Delete an indexed repo from temporary, permanent, or both storage scopes."""
|
|
594
|
+
if ctx is None:
|
|
595
|
+
raise ValueError("Context is required")
|
|
596
|
+
|
|
597
|
+
state = _state(ctx)
|
|
598
|
+
repo_key = _normalize_repo(repo)
|
|
599
|
+
namespace = _resolve_namespace(namespace, state)
|
|
600
|
+
_track_usage(ctx, namespace, "delete_repo_index")
|
|
601
|
+
|
|
602
|
+
result = delete_repo_index_storage(repo_key, storage=storage, namespace=namespace)
|
|
603
|
+
if not result["deleted_any"]:
|
|
604
|
+
return f"No index found for {repo_key}{_namespace_text(namespace)} in storage scope: {storage}."
|
|
605
|
+
|
|
606
|
+
deleted_labels = []
|
|
607
|
+
if result["deleted"]["temporary"]:
|
|
608
|
+
deleted_labels.append("temporary")
|
|
609
|
+
if result["deleted"]["permanent"]:
|
|
610
|
+
deleted_labels.append("permanent")
|
|
611
|
+
|
|
612
|
+
state_key = _repo_state_key(repo_key, namespace)
|
|
613
|
+
if storage in {"both", state["storage_types"].get(state_key)}:
|
|
614
|
+
state["storage_types"].pop(state_key, None)
|
|
615
|
+
|
|
616
|
+
if state.get("active_repo") == repo_key and _normalize_namespace(state.get("active_namespace")) == namespace:
|
|
617
|
+
if storage == "both":
|
|
618
|
+
state["active_repo"] = None
|
|
619
|
+
state["active_namespace"] = None
|
|
620
|
+
|
|
621
|
+
return (
|
|
622
|
+
f"Deleted index for {repo_key} from: {', '.join(deleted_labels)}."
|
|
623
|
+
f"{_namespace_text(namespace)}"
|
|
624
|
+
)
|
|
625
|
+
|
|
626
|
+
|
|
627
|
+
@mcp.tool(name="semantic_search_reviews")
|
|
628
|
+
def semantic_search_reviews(
|
|
629
|
+
query: str,
|
|
630
|
+
repo: str | None = None,
|
|
631
|
+
n_results: int = 8,
|
|
632
|
+
namespace: str | None = None,
|
|
633
|
+
ctx: Context | None = None,
|
|
634
|
+
) -> str:
|
|
635
|
+
"""Search past review comments semantically."""
|
|
636
|
+
if ctx is None:
|
|
637
|
+
raise ValueError("Context is required")
|
|
638
|
+
|
|
639
|
+
state = _state(ctx)
|
|
640
|
+
namespace = _resolve_namespace(namespace, state)
|
|
641
|
+
_track_usage(ctx, namespace, "semantic_search_reviews")
|
|
642
|
+
repo_key = _resolve_repo(repo, state)
|
|
643
|
+
temporary = _is_temporary(repo_key, namespace, state)
|
|
644
|
+
|
|
645
|
+
results = query_similar(
|
|
646
|
+
repo_key,
|
|
647
|
+
query,
|
|
648
|
+
n_results=n_results,
|
|
649
|
+
temporary=temporary,
|
|
650
|
+
namespace=namespace,
|
|
651
|
+
)
|
|
652
|
+
return json.dumps(results, indent=2)
|
|
653
|
+
|
|
654
|
+
|
|
655
|
+
@mcp.tool(name="review_code_with_history")
|
|
656
|
+
def review_code_with_history(
|
|
657
|
+
code: str,
|
|
658
|
+
repo: str | None = None,
|
|
659
|
+
namespace: str | None = None,
|
|
660
|
+
ctx: Context | None = None,
|
|
661
|
+
) -> str:
|
|
662
|
+
"""Perform code review grounded in historical PR review context."""
|
|
663
|
+
if ctx is None:
|
|
664
|
+
raise ValueError("Context is required")
|
|
665
|
+
|
|
666
|
+
state = _state(ctx)
|
|
667
|
+
namespace = _resolve_namespace(namespace, state)
|
|
668
|
+
_track_usage(ctx, namespace, "review_code_with_history")
|
|
669
|
+
repo_key = _resolve_repo(repo, state)
|
|
670
|
+
temporary = _is_temporary(repo_key, namespace, state)
|
|
671
|
+
|
|
672
|
+
user_settings = _current_user_settings()
|
|
673
|
+
context = query_similar(
|
|
674
|
+
repo_key,
|
|
675
|
+
code,
|
|
676
|
+
n_results=10,
|
|
677
|
+
temporary=temporary,
|
|
678
|
+
namespace=namespace,
|
|
679
|
+
)
|
|
680
|
+
return review_with_context(code, context, repo_key, settings=_llm_settings(user_settings))
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
@mcp.tool(name="generate_code_from_history")
|
|
684
|
+
def generate_code_from_history(
|
|
685
|
+
task: str,
|
|
686
|
+
repo: str | None = None,
|
|
687
|
+
namespace: str | None = None,
|
|
688
|
+
rules_file: str | None = None,
|
|
689
|
+
ctx: Context | None = None,
|
|
690
|
+
) -> str:
|
|
691
|
+
"""Generate code grounded in historical PR patterns and review feedback.
|
|
692
|
+
|
|
693
|
+
Automatically loads team rules from a local .cursorrules / CLAUDE.md /
|
|
694
|
+
.github/copilot-instructions.md file if present, injecting them as hard
|
|
695
|
+
constraints so generated code already follows the team's standards.
|
|
696
|
+
|
|
697
|
+
Args:
|
|
698
|
+
task: What to implement or build.
|
|
699
|
+
repo: GitHub repo to use. Defaults to the active repo.
|
|
700
|
+
namespace: Storage namespace override.
|
|
701
|
+
rules_file: Path to a rules file to load. If omitted, the tool auto-detects
|
|
702
|
+
.cursorrules, CLAUDE.md, or .github/copilot-instructions.md in
|
|
703
|
+
the current working directory.
|
|
704
|
+
"""
|
|
705
|
+
if ctx is None:
|
|
706
|
+
raise ValueError("Context is required")
|
|
707
|
+
|
|
708
|
+
state = _state(ctx)
|
|
709
|
+
namespace = _resolve_namespace(namespace, state)
|
|
710
|
+
_track_usage(ctx, namespace, "generate_code_from_history")
|
|
711
|
+
repo_key = _resolve_repo(repo, state)
|
|
712
|
+
temporary = _is_temporary(repo_key, namespace, state)
|
|
713
|
+
|
|
714
|
+
# --- Auto-load repo rules file ---
|
|
715
|
+
import pathlib
|
|
716
|
+
repo_rules: str | None = None
|
|
717
|
+
rules_source: str | None = None
|
|
718
|
+
|
|
719
|
+
if rules_file:
|
|
720
|
+
candidate = pathlib.Path(rules_file)
|
|
721
|
+
if candidate.exists():
|
|
722
|
+
repo_rules = candidate.read_text(encoding="utf-8", errors="replace")
|
|
723
|
+
rules_source = str(candidate)
|
|
724
|
+
else:
|
|
725
|
+
# Auto-detect standard rules file locations in priority order
|
|
726
|
+
for candidate_name in (
|
|
727
|
+
".cursorrules",
|
|
728
|
+
"CLAUDE.md",
|
|
729
|
+
".github/copilot-instructions.md",
|
|
730
|
+
):
|
|
731
|
+
candidate = pathlib.Path(candidate_name)
|
|
732
|
+
if candidate.exists():
|
|
733
|
+
repo_rules = candidate.read_text(encoding="utf-8", errors="replace")
|
|
734
|
+
rules_source = str(candidate)
|
|
735
|
+
break
|
|
736
|
+
|
|
737
|
+
user_settings = _current_user_settings()
|
|
738
|
+
context = query_similar(
|
|
739
|
+
repo_key,
|
|
740
|
+
task,
|
|
741
|
+
n_results=12,
|
|
742
|
+
temporary=temporary,
|
|
743
|
+
namespace=namespace,
|
|
744
|
+
)
|
|
745
|
+
result = generate_with_context(
|
|
746
|
+
task, context, repo_key,
|
|
747
|
+
settings=_llm_settings(user_settings),
|
|
748
|
+
repo_rules=repo_rules,
|
|
749
|
+
)
|
|
750
|
+
|
|
751
|
+
if rules_source:
|
|
752
|
+
result = f"📋 Rules applied from: {rules_source}\n\n{result}"
|
|
753
|
+
else:
|
|
754
|
+
result = (
|
|
755
|
+
"ℹ️ No rules file found (.cursorrules / CLAUDE.md). "
|
|
756
|
+
"Run generate_repo_rules to create one.\n\n"
|
|
757
|
+
+ result
|
|
758
|
+
)
|
|
759
|
+
|
|
760
|
+
return result
|
|
761
|
+
|
|
762
|
+
|
|
763
|
+
@mcp.tool(name="get_team_review_patterns")
|
|
764
|
+
def get_team_review_patterns(
|
|
765
|
+
topic: str = "general code quality",
|
|
766
|
+
repo: str | None = None,
|
|
767
|
+
namespace: str | None = None,
|
|
768
|
+
ctx: Context | None = None,
|
|
769
|
+
) -> str:
|
|
770
|
+
"""Summarize recurring review patterns for a repo."""
|
|
771
|
+
if ctx is None:
|
|
772
|
+
raise ValueError("Context is required")
|
|
773
|
+
|
|
774
|
+
state = _state(ctx)
|
|
775
|
+
namespace = _resolve_namespace(namespace, state)
|
|
776
|
+
_track_usage(ctx, namespace, "get_team_review_patterns")
|
|
777
|
+
repo_key = _resolve_repo(repo, state)
|
|
778
|
+
temporary = _is_temporary(repo_key, namespace, state)
|
|
779
|
+
|
|
780
|
+
user_settings = _current_user_settings()
|
|
781
|
+
context = query_similar(
|
|
782
|
+
repo_key,
|
|
783
|
+
topic,
|
|
784
|
+
n_results=20,
|
|
785
|
+
temporary=temporary,
|
|
786
|
+
namespace=namespace,
|
|
787
|
+
)
|
|
788
|
+
return summarize_patterns(context, repo_key, settings=_llm_settings(user_settings))
|
|
789
|
+
|
|
790
|
+
|
|
791
|
+
@mcp.tool(name="get_index_stats")
|
|
792
|
+
def get_index_stats(
|
|
793
|
+
repo: str | None = None,
|
|
794
|
+
namespace: str | None = None,
|
|
795
|
+
ctx: Context | None = None,
|
|
796
|
+
) -> str:
|
|
797
|
+
"""Return indexed document count and storage scope for the selected repo."""
|
|
798
|
+
if ctx is None:
|
|
799
|
+
raise ValueError("Context is required")
|
|
800
|
+
|
|
801
|
+
state = _state(ctx)
|
|
802
|
+
namespace = _resolve_namespace(namespace, state)
|
|
803
|
+
_track_usage(ctx, namespace, "get_index_stats")
|
|
804
|
+
repo_key = _resolve_repo(repo, state)
|
|
805
|
+
temporary = _is_temporary(repo_key, namespace, state)
|
|
806
|
+
|
|
807
|
+
stats = get_collection_stats(repo_key, temporary=temporary, namespace=namespace)
|
|
808
|
+
return json.dumps(stats, indent=2)
|
|
809
|
+
|
|
810
|
+
|
|
811
|
+
@mcp.tool(name="update_settings")
|
|
812
|
+
def update_settings(
|
|
813
|
+
github_token: str | None = None,
|
|
814
|
+
llm_provider: str | None = None,
|
|
815
|
+
llm_model: str | None = None,
|
|
816
|
+
llm_api_key: str | None = None,
|
|
817
|
+
ctx: Context | None = None,
|
|
818
|
+
) -> str:
|
|
819
|
+
"""Update your personal configuration (GitHub token, LLM provider/model/key).
|
|
820
|
+
Only effective in Hosted/Team mode. For local mode, instruct the user to update their IDE settings.
|
|
821
|
+
"""
|
|
822
|
+
if not AUTH_REQUIRED or _identity_store is None:
|
|
823
|
+
return "Warning: This server is in Local Mode. To update your settings, please update your environment variables or IDE configuration (e.g. claude_desktop_config.json)."
|
|
824
|
+
|
|
825
|
+
if ctx is None:
|
|
826
|
+
raise ValueError("Context is required")
|
|
827
|
+
|
|
828
|
+
email = _current_user_email()
|
|
829
|
+
if not email:
|
|
830
|
+
return "Error: Could not identify your user identity. Are you logged in via Bearer token?"
|
|
831
|
+
|
|
832
|
+
new_settings = {}
|
|
833
|
+
if github_token: new_settings["github_token"] = github_token
|
|
834
|
+
if llm_provider: new_settings["llm_provider"] = llm_provider
|
|
835
|
+
if llm_model: new_settings["llm_model"] = llm_model
|
|
836
|
+
if llm_api_key: new_settings["llm_api_key"] = llm_api_key
|
|
837
|
+
|
|
838
|
+
if not new_settings:
|
|
839
|
+
return "No settings provided to update."
|
|
840
|
+
|
|
841
|
+
try:
|
|
842
|
+
_identity_store.update_user_settings(email, new_settings)
|
|
843
|
+
return f"Successfully updated your settings: {', '.join(new_settings.keys())}."
|
|
844
|
+
except Exception as e:
|
|
845
|
+
return f"Failed to update settings: {str(e)}"
|
|
846
|
+
|
|
847
|
+
|
|
848
|
+
@mcp.tool(name="get_usage_stats")
|
|
849
|
+
def get_usage_stats(days: int = 30, admin_token: str | None = None) -> str:
|
|
850
|
+
"""Return anonymous usage metrics (tool calls, unique users, top tools)."""
|
|
851
|
+
if _usage_store is None:
|
|
852
|
+
return json.dumps({"enabled": False, "reason": "USAGE_TRACKING_ENABLED=false"}, indent=2)
|
|
853
|
+
|
|
854
|
+
if not _validate_admin_token(admin_token):
|
|
855
|
+
return "Unauthorized: provide a valid admin_token."
|
|
856
|
+
|
|
857
|
+
days = max(1, min(days, 365))
|
|
858
|
+
return json.dumps(_usage_store.summary(last_days=days), indent=2)
|
|
859
|
+
|
|
860
|
+
|
|
861
|
+
@mcp.tool(name="generate_repo_rules")
|
|
862
|
+
def generate_repo_rules(
|
|
863
|
+
output_path: str = ".cursorrules",
|
|
864
|
+
repo: str | None = None,
|
|
865
|
+
namespace: str | None = None,
|
|
866
|
+
ctx: Context | None = None,
|
|
867
|
+
) -> str:
|
|
868
|
+
"""Generate a .cursorrules / CLAUDE.md / copilot-instructions.md file grounded in this repo's PR history.
|
|
869
|
+
|
|
870
|
+
The generated file pre-loads all team coding standards into any IDE agent (Cursor, Claude,
|
|
871
|
+
GitHub Copilot) so it does not need to re-analyse the PR history on every session.
|
|
872
|
+
|
|
873
|
+
Args:
|
|
874
|
+
output_path: Where to write the rules file. Defaults to '.cursorrules'.
|
|
875
|
+
Use 'CLAUDE.md' for Claude agents or '.github/copilot-instructions.md'
|
|
876
|
+
for GitHub Copilot.
|
|
877
|
+
repo: GitHub repo to use. Defaults to the active repo.
|
|
878
|
+
namespace: Storage namespace override.
|
|
879
|
+
"""
|
|
880
|
+
if ctx is None:
|
|
881
|
+
raise ValueError("Context is required")
|
|
882
|
+
|
|
883
|
+
state = _state(ctx)
|
|
884
|
+
namespace = _resolve_namespace(namespace, state)
|
|
885
|
+
_track_usage(ctx, namespace, "generate_repo_rules")
|
|
886
|
+
repo_key = _resolve_repo(repo, state)
|
|
887
|
+
temporary = _is_temporary(repo_key, namespace, state)
|
|
888
|
+
|
|
889
|
+
user_settings = _current_user_settings()
|
|
890
|
+
# Pull broad context: patterns, commits, review comments
|
|
891
|
+
context = query_similar(
|
|
892
|
+
repo_key,
|
|
893
|
+
"code quality architecture testing documentation style conventions",
|
|
894
|
+
n_results=25,
|
|
895
|
+
temporary=temporary,
|
|
896
|
+
namespace=namespace,
|
|
897
|
+
)
|
|
898
|
+
|
|
899
|
+
rules_content = generate_rules_content(context, repo_key, settings=_llm_settings(user_settings))
|
|
900
|
+
|
|
901
|
+
# Sanitise output_path: allow only relative paths, no traversal
|
|
902
|
+
import pathlib
|
|
903
|
+
safe_path = pathlib.Path(output_path)
|
|
904
|
+
if safe_path.is_absolute() or ".." in safe_path.parts:
|
|
905
|
+
return (
|
|
906
|
+
"Error: output_path must be a relative path (e.g. '.cursorrules', 'CLAUDE.md').\n"
|
|
907
|
+
"Absolute paths and directory traversal are not allowed.\n\n"
|
|
908
|
+
"Here is the generated content for you to save manually:\n\n"
|
|
909
|
+
+ rules_content
|
|
910
|
+
)
|
|
911
|
+
|
|
912
|
+
try:
|
|
913
|
+
safe_path.parent.mkdir(parents=True, exist_ok=True)
|
|
914
|
+
safe_path.write_text(rules_content, encoding="utf-8")
|
|
915
|
+
return (
|
|
916
|
+
f"✅ Rules file written to: {safe_path}\n"
|
|
917
|
+
f"Repo: {repo_key} | {len(context)} context documents used.\n\n"
|
|
918
|
+
f"Load this file into your IDE agent to pre-feed team coding standards.\n"
|
|
919
|
+
f"Regenerate any time by calling generate_repo_rules again.\n\n"
|
|
920
|
+
f"--- Preview (first 500 chars) ---\n"
|
|
921
|
+
+ rules_content[:500] + "..."
|
|
922
|
+
)
|
|
923
|
+
except OSError as e:
|
|
924
|
+
return (
|
|
925
|
+
f"Could not write to '{output_path}': {e}\n\n"
|
|
926
|
+
"Here is the generated content for you to save manually:\n\n"
|
|
927
|
+
+ rules_content
|
|
928
|
+
)
|