beadhub 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beadhub/__init__.py +12 -0
- beadhub/api.py +260 -0
- beadhub/auth.py +101 -0
- beadhub/aweb_context.py +65 -0
- beadhub/aweb_introspection.py +70 -0
- beadhub/beads_sync.py +514 -0
- beadhub/cli.py +330 -0
- beadhub/config.py +65 -0
- beadhub/db.py +129 -0
- beadhub/defaults/invariants/01-tracking-bdh-only.md +11 -0
- beadhub/defaults/invariants/02-communication-mail-first.md +36 -0
- beadhub/defaults/invariants/03-communication-chat.md +60 -0
- beadhub/defaults/invariants/04-identity-no-impersonation.md +17 -0
- beadhub/defaults/invariants/05-collaborate.md +12 -0
- beadhub/defaults/roles/backend.md +55 -0
- beadhub/defaults/roles/coordinator.md +44 -0
- beadhub/defaults/roles/frontend.md +77 -0
- beadhub/defaults/roles/implementer.md +73 -0
- beadhub/defaults/roles/reviewer.md +56 -0
- beadhub/defaults/roles/startup-expert.md +93 -0
- beadhub/defaults.py +262 -0
- beadhub/events.py +704 -0
- beadhub/internal_auth.py +121 -0
- beadhub/jsonl.py +68 -0
- beadhub/logging.py +62 -0
- beadhub/migrations/beads/001_initial.sql +70 -0
- beadhub/migrations/beads/002_search_indexes.sql +20 -0
- beadhub/migrations/server/001_initial.sql +279 -0
- beadhub/names.py +33 -0
- beadhub/notifications.py +275 -0
- beadhub/pagination.py +125 -0
- beadhub/presence.py +495 -0
- beadhub/rate_limit.py +152 -0
- beadhub/redis_client.py +11 -0
- beadhub/roles.py +35 -0
- beadhub/routes/__init__.py +1 -0
- beadhub/routes/agents.py +303 -0
- beadhub/routes/bdh.py +655 -0
- beadhub/routes/beads.py +778 -0
- beadhub/routes/claims.py +141 -0
- beadhub/routes/escalations.py +471 -0
- beadhub/routes/init.py +348 -0
- beadhub/routes/mcp.py +338 -0
- beadhub/routes/policies.py +833 -0
- beadhub/routes/repos.py +538 -0
- beadhub/routes/status.py +568 -0
- beadhub/routes/subscriptions.py +362 -0
- beadhub/routes/workspaces.py +1642 -0
- beadhub/workspace_config.py +202 -0
- beadhub-0.1.0.dist-info/METADATA +254 -0
- beadhub-0.1.0.dist-info/RECORD +54 -0
- beadhub-0.1.0.dist-info/WHEEL +4 -0
- beadhub-0.1.0.dist-info/entry_points.txt +2 -0
- beadhub-0.1.0.dist-info/licenses/LICENSE +21 -0
beadhub/routes/init.py
ADDED
|
@@ -0,0 +1,348 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
from uuid import UUID
|
|
5
|
+
|
|
6
|
+
from aweb.auth import validate_project_slug
|
|
7
|
+
from aweb.bootstrap import BootstrapIdentityResult, bootstrap_identity
|
|
8
|
+
from fastapi import APIRouter, Depends, HTTPException, Request
|
|
9
|
+
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
|
10
|
+
|
|
11
|
+
from beadhub.beads_sync import is_valid_alias, is_valid_human_name
|
|
12
|
+
from beadhub.db import DatabaseInfra, get_db_infra
|
|
13
|
+
from beadhub.names import CLASSIC_NAMES
|
|
14
|
+
from beadhub.rate_limit import enforce_init_rate_limit
|
|
15
|
+
from beadhub.redis_client import get_redis
|
|
16
|
+
from beadhub.roles import ROLE_MAX_LENGTH, is_valid_role, normalize_role, role_to_alias_prefix
|
|
17
|
+
from beadhub.routes.repos import canonicalize_git_url, extract_repo_name
|
|
18
|
+
|
|
19
|
+
router = APIRouter(prefix="/v1/init", tags=["init"])
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _now_iso() -> str:
|
|
23
|
+
return datetime.now(timezone.utc).isoformat()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class InitRequest(BaseModel):
|
|
27
|
+
"""Bootstrap an aweb identity and (optionally) a BeadHub workspace."""
|
|
28
|
+
|
|
29
|
+
model_config = ConfigDict(extra="forbid")
|
|
30
|
+
|
|
31
|
+
# aweb identity fields (protocol)
|
|
32
|
+
project_slug: str = Field(default="", max_length=256)
|
|
33
|
+
project_name: str = Field(default="", max_length=256)
|
|
34
|
+
alias: str | None = Field(default=None, min_length=1, max_length=64)
|
|
35
|
+
human_name: str = Field(default="", max_length=64)
|
|
36
|
+
agent_type: str = Field(default="agent", max_length=32)
|
|
37
|
+
|
|
38
|
+
# beadhub extension fields (optional)
|
|
39
|
+
repo_origin: str | None = Field(default=None, max_length=2048)
|
|
40
|
+
role: str = Field(default="agent", max_length=ROLE_MAX_LENGTH)
|
|
41
|
+
hostname: str = Field(default="", max_length=255)
|
|
42
|
+
workspace_path: str = Field(default="", max_length=4096)
|
|
43
|
+
|
|
44
|
+
@field_validator("project_slug")
|
|
45
|
+
@classmethod
|
|
46
|
+
def _validate_project_slug(cls, v: str) -> str:
|
|
47
|
+
v = (v or "").strip()
|
|
48
|
+
if not v:
|
|
49
|
+
return ""
|
|
50
|
+
return validate_project_slug(v)
|
|
51
|
+
|
|
52
|
+
@field_validator("alias")
|
|
53
|
+
@classmethod
|
|
54
|
+
def _validate_alias(cls, v: str | None) -> str | None:
|
|
55
|
+
if v is None:
|
|
56
|
+
return None
|
|
57
|
+
v = v.strip()
|
|
58
|
+
if not v:
|
|
59
|
+
return None
|
|
60
|
+
if not is_valid_alias(v):
|
|
61
|
+
raise ValueError("Invalid alias format")
|
|
62
|
+
return v
|
|
63
|
+
|
|
64
|
+
@field_validator("human_name")
|
|
65
|
+
@classmethod
|
|
66
|
+
def _validate_human_name(cls, v: str) -> str:
|
|
67
|
+
v = (v or "").strip()
|
|
68
|
+
if v and not is_valid_human_name(v):
|
|
69
|
+
raise ValueError("Invalid human_name format")
|
|
70
|
+
return v
|
|
71
|
+
|
|
72
|
+
@field_validator("role")
|
|
73
|
+
@classmethod
|
|
74
|
+
def _validate_role(cls, v: str) -> str:
|
|
75
|
+
v = normalize_role((v or "").strip()) or "agent"
|
|
76
|
+
if not is_valid_role(v):
|
|
77
|
+
raise ValueError("Invalid role format")
|
|
78
|
+
return v
|
|
79
|
+
|
|
80
|
+
@field_validator("hostname")
|
|
81
|
+
@classmethod
|
|
82
|
+
def _validate_hostname(cls, v: str) -> str:
|
|
83
|
+
v = (v or "").strip()
|
|
84
|
+
if v and ("\x00" in v or any(ord(c) < 32 for c in v)):
|
|
85
|
+
raise ValueError(
|
|
86
|
+
"hostname contains invalid characters (null bytes or control characters)"
|
|
87
|
+
)
|
|
88
|
+
return v
|
|
89
|
+
|
|
90
|
+
@field_validator("workspace_path")
|
|
91
|
+
@classmethod
|
|
92
|
+
def _validate_workspace_path(cls, v: str) -> str:
|
|
93
|
+
v = (v or "").strip()
|
|
94
|
+
if v and ("\x00" in v or any(ord(c) < 32 and c not in "\t\n" for c in v)):
|
|
95
|
+
raise ValueError(
|
|
96
|
+
"workspace_path contains invalid characters (null bytes or control characters)"
|
|
97
|
+
)
|
|
98
|
+
return v
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class InitResponse(BaseModel):
|
|
102
|
+
status: str = "ok"
|
|
103
|
+
created_at: str
|
|
104
|
+
api_key: str
|
|
105
|
+
project_id: str
|
|
106
|
+
project_slug: str
|
|
107
|
+
agent_id: str
|
|
108
|
+
repo_id: str | None = None
|
|
109
|
+
canonical_origin: str | None = None
|
|
110
|
+
workspace_id: str | None = None
|
|
111
|
+
alias: str
|
|
112
|
+
created: bool = False
|
|
113
|
+
workspace_created: bool = False
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
async def _infer_project_slug_from_repo(
|
|
117
|
+
db_infra: DatabaseInfra, *, canonical_origin: str
|
|
118
|
+
) -> str | None:
|
|
119
|
+
server_db = db_infra.get_manager("server")
|
|
120
|
+
row = await server_db.fetch_one(
|
|
121
|
+
"""
|
|
122
|
+
SELECT p.slug
|
|
123
|
+
FROM {{tables.repos}} r
|
|
124
|
+
JOIN {{tables.projects}} p ON r.project_id = p.id AND p.deleted_at IS NULL
|
|
125
|
+
WHERE r.canonical_origin = $1 AND r.deleted_at IS NULL
|
|
126
|
+
""",
|
|
127
|
+
canonical_origin,
|
|
128
|
+
)
|
|
129
|
+
if not row:
|
|
130
|
+
return None
|
|
131
|
+
slug = (row.get("slug") or "").strip()
|
|
132
|
+
return slug or None
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
async def _suggest_name_prefix_for_project(db_infra: DatabaseInfra, *, project_id: str) -> str:
|
|
136
|
+
aweb_db = db_infra.get_manager("aweb")
|
|
137
|
+
rows = await aweb_db.fetch_all(
|
|
138
|
+
"""
|
|
139
|
+
SELECT alias
|
|
140
|
+
FROM {{tables.agents}}
|
|
141
|
+
WHERE project_id = $1 AND deleted_at IS NULL
|
|
142
|
+
ORDER BY alias
|
|
143
|
+
""",
|
|
144
|
+
UUID(project_id),
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
used_prefixes: set[str] = set()
|
|
148
|
+
for row in rows:
|
|
149
|
+
alias = (row.get("alias") or "").strip()
|
|
150
|
+
if not alias:
|
|
151
|
+
continue
|
|
152
|
+
parts = alias.split("-")
|
|
153
|
+
if len(parts) >= 2 and parts[1].isdigit():
|
|
154
|
+
prefix = f"{parts[0]}-{parts[1]}".lower()
|
|
155
|
+
else:
|
|
156
|
+
prefix = parts[0].lower()
|
|
157
|
+
if prefix:
|
|
158
|
+
used_prefixes.add(prefix)
|
|
159
|
+
|
|
160
|
+
for name in CLASSIC_NAMES:
|
|
161
|
+
if name not in used_prefixes:
|
|
162
|
+
return name
|
|
163
|
+
|
|
164
|
+
for num in range(1, 100):
|
|
165
|
+
for name in CLASSIC_NAMES:
|
|
166
|
+
numbered = f"{name}-{num:02d}"
|
|
167
|
+
if numbered not in used_prefixes:
|
|
168
|
+
return numbered
|
|
169
|
+
|
|
170
|
+
raise HTTPException(
|
|
171
|
+
status_code=409,
|
|
172
|
+
detail=f"All name prefixes are taken (tried {len(CLASSIC_NAMES)} names × 100 variants).",
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
@router.post("", response_model=InitResponse)
|
|
177
|
+
async def init(
|
|
178
|
+
request: Request,
|
|
179
|
+
payload: InitRequest,
|
|
180
|
+
db_infra: DatabaseInfra = Depends(get_db_infra),
|
|
181
|
+
redis=Depends(get_redis),
|
|
182
|
+
) -> InitResponse:
|
|
183
|
+
"""Bootstrap identity and optionally register a BeadHub workspace.
|
|
184
|
+
|
|
185
|
+
- Always mints a new `aw_sk_*` API key for the created/ensured agent.
|
|
186
|
+
- If `repo_origin` is provided, also ensures the repo and creates a BeadHub workspace
|
|
187
|
+
using `workspace_id = agent_id` (v1 mapping).
|
|
188
|
+
"""
|
|
189
|
+
await enforce_init_rate_limit(request, redis)
|
|
190
|
+
|
|
191
|
+
canonical_origin: str | None = None
|
|
192
|
+
if payload.repo_origin is not None:
|
|
193
|
+
canonical_origin = canonicalize_git_url(payload.repo_origin)
|
|
194
|
+
|
|
195
|
+
project_slug = payload.project_slug
|
|
196
|
+
if not project_slug:
|
|
197
|
+
if canonical_origin is None:
|
|
198
|
+
raise HTTPException(status_code=422, detail="project_slug is required")
|
|
199
|
+
inferred = await _infer_project_slug_from_repo(db_infra, canonical_origin=canonical_origin)
|
|
200
|
+
if inferred is None:
|
|
201
|
+
raise HTTPException(status_code=422, detail="project_not_found: repo not registered")
|
|
202
|
+
project_slug = inferred
|
|
203
|
+
|
|
204
|
+
alias = (payload.alias or "").strip() or None
|
|
205
|
+
if alias is None and canonical_origin is not None:
|
|
206
|
+
from aweb.bootstrap import ensure_project
|
|
207
|
+
|
|
208
|
+
ensured = await ensure_project(
|
|
209
|
+
db_infra, project_slug=project_slug, project_name=payload.project_name or project_slug
|
|
210
|
+
)
|
|
211
|
+
prefix = await _suggest_name_prefix_for_project(db_infra, project_id=ensured.project_id)
|
|
212
|
+
alias = f"{prefix}-{role_to_alias_prefix(payload.role)}"
|
|
213
|
+
|
|
214
|
+
identity: BootstrapIdentityResult = await bootstrap_identity(
|
|
215
|
+
db_infra,
|
|
216
|
+
project_slug=project_slug,
|
|
217
|
+
project_name=payload.project_name or project_slug,
|
|
218
|
+
alias=alias,
|
|
219
|
+
human_name=payload.human_name or "",
|
|
220
|
+
agent_type=payload.agent_type,
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
if canonical_origin is None:
|
|
224
|
+
return InitResponse(
|
|
225
|
+
created_at=_now_iso(),
|
|
226
|
+
api_key=identity.api_key,
|
|
227
|
+
project_id=identity.project_id,
|
|
228
|
+
project_slug=identity.project_slug,
|
|
229
|
+
agent_id=identity.agent_id,
|
|
230
|
+
alias=identity.alias,
|
|
231
|
+
created=identity.created,
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
server_db = db_infra.get_manager("server")
|
|
235
|
+
|
|
236
|
+
repo_name = extract_repo_name(canonical_origin)
|
|
237
|
+
workspace_created = False
|
|
238
|
+
|
|
239
|
+
async with server_db.transaction() as tx:
|
|
240
|
+
await tx.execute(
|
|
241
|
+
"""
|
|
242
|
+
INSERT INTO {{tables.projects}} (id, tenant_id, slug, name, deleted_at)
|
|
243
|
+
VALUES ($1, NULL, $2, $3, NULL)
|
|
244
|
+
ON CONFLICT (id)
|
|
245
|
+
DO UPDATE SET slug = EXCLUDED.slug, name = EXCLUDED.name, deleted_at = NULL
|
|
246
|
+
""",
|
|
247
|
+
UUID(identity.project_id),
|
|
248
|
+
identity.project_slug,
|
|
249
|
+
identity.project_name or None,
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
repo = await tx.fetch_one(
|
|
253
|
+
"""
|
|
254
|
+
INSERT INTO {{tables.repos}} (project_id, origin_url, canonical_origin, name)
|
|
255
|
+
VALUES ($1, $2, $3, $4)
|
|
256
|
+
ON CONFLICT (project_id, canonical_origin)
|
|
257
|
+
DO UPDATE SET origin_url = EXCLUDED.origin_url, deleted_at = NULL
|
|
258
|
+
RETURNING id
|
|
259
|
+
""",
|
|
260
|
+
UUID(identity.project_id),
|
|
261
|
+
payload.repo_origin,
|
|
262
|
+
canonical_origin,
|
|
263
|
+
repo_name,
|
|
264
|
+
)
|
|
265
|
+
repo_id = str(repo["id"])
|
|
266
|
+
|
|
267
|
+
existing = await tx.fetch_one(
|
|
268
|
+
"""
|
|
269
|
+
SELECT
|
|
270
|
+
w.workspace_id,
|
|
271
|
+
w.repo_id,
|
|
272
|
+
w.alias,
|
|
273
|
+
r.canonical_origin AS existing_canonical_origin
|
|
274
|
+
FROM {{tables.workspaces}} w
|
|
275
|
+
LEFT JOIN {{tables.repos}} r ON w.repo_id = r.id
|
|
276
|
+
WHERE w.workspace_id = $1 AND w.project_id = $2
|
|
277
|
+
""",
|
|
278
|
+
UUID(identity.agent_id),
|
|
279
|
+
UUID(identity.project_id),
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
if existing is None:
|
|
283
|
+
workspace_created = True
|
|
284
|
+
await tx.execute(
|
|
285
|
+
"""
|
|
286
|
+
INSERT INTO {{tables.workspaces}}
|
|
287
|
+
(workspace_id, project_id, repo_id, alias, human_name, role, hostname, workspace_path)
|
|
288
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
|
289
|
+
""",
|
|
290
|
+
UUID(identity.agent_id),
|
|
291
|
+
UUID(identity.project_id),
|
|
292
|
+
UUID(repo_id),
|
|
293
|
+
identity.alias,
|
|
294
|
+
payload.human_name or "",
|
|
295
|
+
payload.role,
|
|
296
|
+
payload.hostname or None,
|
|
297
|
+
payload.workspace_path or None,
|
|
298
|
+
)
|
|
299
|
+
else:
|
|
300
|
+
existing_repo_id = existing.get("repo_id")
|
|
301
|
+
existing_canonical = existing.get("existing_canonical_origin")
|
|
302
|
+
if existing_repo_id is None or str(existing_repo_id) != repo_id:
|
|
303
|
+
raise HTTPException(
|
|
304
|
+
status_code=409,
|
|
305
|
+
detail=(
|
|
306
|
+
"workspace_repo_mismatch: "
|
|
307
|
+
f"alias '{identity.alias}' (workspace_id={identity.agent_id}) is already registered "
|
|
308
|
+
f"for repo '{existing_canonical or existing_repo_id}'. "
|
|
309
|
+
f"Cannot initialize the same agent for repo '{canonical_origin}'. "
|
|
310
|
+
"Choose a different alias (new agent/worktree) or initialize from the original repo."
|
|
311
|
+
),
|
|
312
|
+
)
|
|
313
|
+
|
|
314
|
+
await tx.execute(
|
|
315
|
+
"""
|
|
316
|
+
UPDATE {{tables.workspaces}}
|
|
317
|
+
SET repo_id = $3,
|
|
318
|
+
alias = $4,
|
|
319
|
+
human_name = $5,
|
|
320
|
+
role = $6,
|
|
321
|
+
hostname = $7,
|
|
322
|
+
workspace_path = $8,
|
|
323
|
+
deleted_at = NULL
|
|
324
|
+
WHERE workspace_id = $1 AND project_id = $2
|
|
325
|
+
""",
|
|
326
|
+
UUID(identity.agent_id),
|
|
327
|
+
UUID(identity.project_id),
|
|
328
|
+
UUID(repo_id),
|
|
329
|
+
identity.alias,
|
|
330
|
+
payload.human_name or "",
|
|
331
|
+
payload.role,
|
|
332
|
+
payload.hostname or None,
|
|
333
|
+
payload.workspace_path or None,
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
return InitResponse(
|
|
337
|
+
created_at=_now_iso(),
|
|
338
|
+
api_key=identity.api_key,
|
|
339
|
+
project_id=identity.project_id,
|
|
340
|
+
project_slug=identity.project_slug,
|
|
341
|
+
agent_id=identity.agent_id,
|
|
342
|
+
repo_id=repo_id,
|
|
343
|
+
canonical_origin=canonical_origin,
|
|
344
|
+
workspace_id=identity.agent_id,
|
|
345
|
+
alias=identity.alias,
|
|
346
|
+
created=identity.created,
|
|
347
|
+
workspace_created=workspace_created,
|
|
348
|
+
)
|
beadhub/routes/mcp.py
ADDED
|
@@ -0,0 +1,338 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
from typing import Any
|
|
6
|
+
from uuid import UUID
|
|
7
|
+
|
|
8
|
+
from fastapi import APIRouter, Depends, HTTPException, Request
|
|
9
|
+
from redis.asyncio import Redis
|
|
10
|
+
|
|
11
|
+
from beadhub.auth import verify_workspace_access
|
|
12
|
+
|
|
13
|
+
from ..db import DatabaseInfra, get_db_infra
|
|
14
|
+
from ..presence import (
|
|
15
|
+
get_workspace_ids_by_project_id,
|
|
16
|
+
list_agent_presences_by_workspace_ids,
|
|
17
|
+
update_agent_presence,
|
|
18
|
+
)
|
|
19
|
+
from ..redis_client import get_redis
|
|
20
|
+
from .beads import beads_ready as http_beads_ready
|
|
21
|
+
from .beads import get_issue_by_bead_id as http_get_issue
|
|
22
|
+
from .escalations import (
|
|
23
|
+
CreateEscalationRequest,
|
|
24
|
+
CreateEscalationResponse,
|
|
25
|
+
create_escalation,
|
|
26
|
+
)
|
|
27
|
+
from .escalations import get_escalation as http_get_escalation
|
|
28
|
+
from .status import status as http_status
|
|
29
|
+
from .subscriptions import SubscribeRequest
|
|
30
|
+
from .subscriptions import list_subscriptions as http_list_subscriptions
|
|
31
|
+
from .subscriptions import subscribe as http_subscribe
|
|
32
|
+
from .subscriptions import unsubscribe as http_unsubscribe
|
|
33
|
+
|
|
34
|
+
logger = logging.getLogger(__name__)
|
|
35
|
+
|
|
36
|
+
router = APIRouter(tags=["mcp"])
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _rpc_error(id_value: Any, code: int, message: str, data: Any | None = None) -> dict[str, Any]:
|
|
40
|
+
error: dict[str, Any] = {"code": code, "message": message}
|
|
41
|
+
if data is not None:
|
|
42
|
+
error["data"] = data
|
|
43
|
+
return {"jsonrpc": "2.0", "id": id_value, "error": error}
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _rpc_result(id_value: Any, payload: Any) -> dict[str, Any]:
|
|
47
|
+
# MCP clients expect text content with a JSON string payload.
|
|
48
|
+
return {
|
|
49
|
+
"jsonrpc": "2.0",
|
|
50
|
+
"id": id_value,
|
|
51
|
+
"result": {"content": [{"type": "text", "text": json.dumps(payload)}]},
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@router.post("/mcp")
|
|
56
|
+
async def mcp_entry(
|
|
57
|
+
request: Request,
|
|
58
|
+
payload: dict[str, Any],
|
|
59
|
+
redis: Redis = Depends(get_redis),
|
|
60
|
+
db_infra: DatabaseInfra = Depends(get_db_infra),
|
|
61
|
+
) -> dict[str, Any]:
|
|
62
|
+
"""JSON-RPC 2.0 entrypoint for BeadHub MCP tools.
|
|
63
|
+
|
|
64
|
+
Clean-slate split:
|
|
65
|
+
- mail/chat/locks live in aweb and are not exposed here
|
|
66
|
+
- this surface is bead/workspace specific (ready issues, subscriptions, status, escalations)
|
|
67
|
+
"""
|
|
68
|
+
rpc_id = payload.get("id")
|
|
69
|
+
if payload.get("jsonrpc") != "2.0":
|
|
70
|
+
return _rpc_error(rpc_id, -32600, "Invalid jsonrpc version")
|
|
71
|
+
if payload.get("method") != "tools/call":
|
|
72
|
+
return _rpc_error(rpc_id, -32601, "Method not found")
|
|
73
|
+
|
|
74
|
+
params = payload.get("params") or {}
|
|
75
|
+
name = params.get("name")
|
|
76
|
+
arguments = params.get("arguments") or {}
|
|
77
|
+
if not isinstance(name, str):
|
|
78
|
+
return _rpc_error(rpc_id, -32602, "Tool name must be a string")
|
|
79
|
+
if not isinstance(arguments, dict):
|
|
80
|
+
return _rpc_error(rpc_id, -32602, "Tool arguments must be an object")
|
|
81
|
+
|
|
82
|
+
try:
|
|
83
|
+
if name == "register_agent":
|
|
84
|
+
result = await _tool_register_agent(request, redis, db_infra, arguments)
|
|
85
|
+
elif name == "list_agents":
|
|
86
|
+
result = await _tool_list_agents(request, redis, db_infra, arguments)
|
|
87
|
+
elif name == "status":
|
|
88
|
+
result = await _tool_status(request, redis, db_infra, arguments)
|
|
89
|
+
elif name == "get_ready_issues":
|
|
90
|
+
result = await _tool_get_ready_issues(request, db_infra, arguments)
|
|
91
|
+
elif name == "get_issue":
|
|
92
|
+
result = await _tool_get_issue(request, db_infra, arguments)
|
|
93
|
+
elif name == "subscribe_to_bead":
|
|
94
|
+
result = await _tool_subscribe_to_bead(request, db_infra, arguments)
|
|
95
|
+
elif name == "list_subscriptions":
|
|
96
|
+
result = await _tool_list_subscriptions(request, db_infra, arguments)
|
|
97
|
+
elif name == "unsubscribe":
|
|
98
|
+
result = await _tool_unsubscribe(request, db_infra, arguments)
|
|
99
|
+
elif name == "escalate":
|
|
100
|
+
result = await _tool_escalate(request, redis, db_infra, arguments)
|
|
101
|
+
elif name == "get_escalation":
|
|
102
|
+
result = await _tool_get_escalation(request, db_infra, arguments)
|
|
103
|
+
else:
|
|
104
|
+
return _rpc_error(rpc_id, -32601, f"Unknown tool: {name}")
|
|
105
|
+
except HTTPException as exc:
|
|
106
|
+
return _rpc_error(rpc_id, exc.status_code, str(exc.detail))
|
|
107
|
+
except Exception:
|
|
108
|
+
logger.exception("MCP tool call failed: %s", name)
|
|
109
|
+
return _rpc_error(rpc_id, -32000, "Internal error")
|
|
110
|
+
|
|
111
|
+
return _rpc_result(rpc_id, result)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
async def _tool_register_agent(
|
|
115
|
+
request: Request,
|
|
116
|
+
redis: Redis,
|
|
117
|
+
db_infra: DatabaseInfra,
|
|
118
|
+
args: dict[str, Any],
|
|
119
|
+
) -> dict[str, Any]:
|
|
120
|
+
workspace_id = str(args.get("workspace_id") or "").strip()
|
|
121
|
+
alias = str(args.get("alias") or "").strip()
|
|
122
|
+
human_name = str(args.get("human_name") or "").strip()
|
|
123
|
+
program = str(args.get("program") or "").strip() or None
|
|
124
|
+
model = str(args.get("model") or "").strip() or None
|
|
125
|
+
role = str(args.get("role") or "").strip() or None
|
|
126
|
+
|
|
127
|
+
if not workspace_id or not alias:
|
|
128
|
+
raise HTTPException(status_code=422, detail="workspace_id and alias are required")
|
|
129
|
+
|
|
130
|
+
project_id = await verify_workspace_access(request, workspace_id, db_infra)
|
|
131
|
+
await update_agent_presence(
|
|
132
|
+
redis,
|
|
133
|
+
workspace_id=workspace_id,
|
|
134
|
+
alias=alias,
|
|
135
|
+
human_name=human_name,
|
|
136
|
+
project_id=project_id,
|
|
137
|
+
project_slug=None,
|
|
138
|
+
repo_id=None,
|
|
139
|
+
program=program,
|
|
140
|
+
model=model,
|
|
141
|
+
current_branch=None,
|
|
142
|
+
role=role,
|
|
143
|
+
ttl_seconds=1800,
|
|
144
|
+
)
|
|
145
|
+
return {"ok": True}
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
async def _tool_list_agents(
|
|
149
|
+
request: Request,
|
|
150
|
+
redis: Redis,
|
|
151
|
+
db_infra: DatabaseInfra,
|
|
152
|
+
args: dict[str, Any],
|
|
153
|
+
) -> dict[str, Any]:
|
|
154
|
+
workspace_id = str(args.get("workspace_id") or "").strip()
|
|
155
|
+
if not workspace_id:
|
|
156
|
+
raise HTTPException(status_code=422, detail="workspace_id is required")
|
|
157
|
+
project_id = await verify_workspace_access(request, workspace_id, db_infra)
|
|
158
|
+
workspace_ids = await get_workspace_ids_by_project_id(redis, project_id)
|
|
159
|
+
agents = await list_agent_presences_by_workspace_ids(redis, workspace_ids)
|
|
160
|
+
return {"agents": agents}
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
async def get_workspace_project_id_or_404(db_infra: DatabaseInfra, workspace_id: str) -> str:
|
|
164
|
+
server_db = db_infra.get_manager("server")
|
|
165
|
+
row = await server_db.fetch_one(
|
|
166
|
+
"SELECT project_id, deleted_at FROM {{tables.workspaces}} WHERE workspace_id = $1",
|
|
167
|
+
workspace_id,
|
|
168
|
+
)
|
|
169
|
+
if not row:
|
|
170
|
+
raise HTTPException(status_code=404, detail="Workspace not found")
|
|
171
|
+
if row.get("deleted_at") is not None:
|
|
172
|
+
raise HTTPException(status_code=410, detail="Workspace was deleted")
|
|
173
|
+
return str(row["project_id"])
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
async def _tool_status(
|
|
177
|
+
request: Request,
|
|
178
|
+
redis: Redis,
|
|
179
|
+
db_infra: DatabaseInfra,
|
|
180
|
+
args: dict[str, Any],
|
|
181
|
+
) -> dict[str, Any]:
|
|
182
|
+
workspace_id = str(args.get("workspace_id") or "").strip()
|
|
183
|
+
if not workspace_id:
|
|
184
|
+
raise HTTPException(status_code=422, detail="workspace_id is required")
|
|
185
|
+
await verify_workspace_access(request, workspace_id, db_infra)
|
|
186
|
+
return await http_status(request, workspace_id=workspace_id, redis=redis, db_infra=db_infra)
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
async def _tool_get_ready_issues(
|
|
190
|
+
request: Request,
|
|
191
|
+
db_infra: DatabaseInfra,
|
|
192
|
+
args: dict[str, Any],
|
|
193
|
+
) -> dict[str, Any]:
|
|
194
|
+
workspace_id = str(args.get("workspace_id") or "").strip()
|
|
195
|
+
repo = str(args.get("repo") or "").strip() or None
|
|
196
|
+
branch = str(args.get("branch") or "").strip() or None
|
|
197
|
+
limit_raw = args.get("limit")
|
|
198
|
+
limit = 10
|
|
199
|
+
if limit_raw is not None:
|
|
200
|
+
try:
|
|
201
|
+
limit = int(limit_raw)
|
|
202
|
+
except (TypeError, ValueError):
|
|
203
|
+
raise HTTPException(status_code=422, detail="limit must be an integer")
|
|
204
|
+
if not workspace_id:
|
|
205
|
+
raise HTTPException(status_code=422, detail="workspace_id is required")
|
|
206
|
+
await verify_workspace_access(request, workspace_id, db_infra)
|
|
207
|
+
return await http_beads_ready(
|
|
208
|
+
request,
|
|
209
|
+
workspace_id=workspace_id,
|
|
210
|
+
repo=repo,
|
|
211
|
+
branch=branch,
|
|
212
|
+
limit=limit,
|
|
213
|
+
db_infra=db_infra,
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
async def _tool_get_issue(
|
|
218
|
+
request: Request,
|
|
219
|
+
db_infra: DatabaseInfra,
|
|
220
|
+
args: dict[str, Any],
|
|
221
|
+
) -> dict[str, Any]:
|
|
222
|
+
bead_id = str(args.get("bead_id") or "").strip()
|
|
223
|
+
if not bead_id:
|
|
224
|
+
raise HTTPException(status_code=422, detail="bead_id is required")
|
|
225
|
+
# get_issue endpoint already enforces project scoping.
|
|
226
|
+
return await http_get_issue(bead_id=bead_id, request=request, db_infra=db_infra)
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
async def _tool_subscribe_to_bead(
|
|
230
|
+
request: Request,
|
|
231
|
+
db_infra: DatabaseInfra,
|
|
232
|
+
args: dict[str, Any],
|
|
233
|
+
) -> dict[str, Any]:
|
|
234
|
+
workspace_id = str(args.get("workspace_id") or "").strip()
|
|
235
|
+
bead_id = str(args.get("bead_id") or "").strip()
|
|
236
|
+
repo = args.get("repo")
|
|
237
|
+
event_types = args.get("event_types")
|
|
238
|
+
if not workspace_id or not bead_id:
|
|
239
|
+
raise HTTPException(status_code=422, detail="workspace_id and bead_id are required")
|
|
240
|
+
project_id = await verify_workspace_access(request, workspace_id, db_infra)
|
|
241
|
+
alias = await _get_workspace_alias_or_403(db_infra, project_id, workspace_id)
|
|
242
|
+
payload_kwargs: dict[str, Any] = {
|
|
243
|
+
"workspace_id": workspace_id,
|
|
244
|
+
"alias": alias,
|
|
245
|
+
"bead_id": bead_id,
|
|
246
|
+
"repo": repo,
|
|
247
|
+
}
|
|
248
|
+
if isinstance(event_types, list):
|
|
249
|
+
payload_kwargs["event_types"] = event_types
|
|
250
|
+
payload = SubscribeRequest.model_validate(payload_kwargs)
|
|
251
|
+
response = await http_subscribe(payload=payload, request=request, db_infra=db_infra)
|
|
252
|
+
return response.model_dump()
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
async def _tool_list_subscriptions(
|
|
256
|
+
request: Request,
|
|
257
|
+
db_infra: DatabaseInfra,
|
|
258
|
+
args: dict[str, Any],
|
|
259
|
+
) -> dict[str, Any]:
|
|
260
|
+
workspace_id = str(args.get("workspace_id") or "").strip()
|
|
261
|
+
if not workspace_id:
|
|
262
|
+
raise HTTPException(status_code=422, detail="workspace_id is required")
|
|
263
|
+
project_id = await verify_workspace_access(request, workspace_id, db_infra)
|
|
264
|
+
alias = await _get_workspace_alias_or_403(db_infra, project_id, workspace_id)
|
|
265
|
+
response = await http_list_subscriptions(
|
|
266
|
+
request=request,
|
|
267
|
+
workspace_id=workspace_id,
|
|
268
|
+
alias=alias,
|
|
269
|
+
db_infra=db_infra,
|
|
270
|
+
)
|
|
271
|
+
return response.model_dump()
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
async def _tool_unsubscribe(
|
|
275
|
+
request: Request,
|
|
276
|
+
db_infra: DatabaseInfra,
|
|
277
|
+
args: dict[str, Any],
|
|
278
|
+
) -> dict[str, Any]:
|
|
279
|
+
workspace_id = str(args.get("workspace_id") or "").strip()
|
|
280
|
+
subscription_id = str(args.get("subscription_id") or "").strip()
|
|
281
|
+
if not workspace_id or not subscription_id:
|
|
282
|
+
raise HTTPException(status_code=422, detail="workspace_id and subscription_id are required")
|
|
283
|
+
project_id = await verify_workspace_access(request, workspace_id, db_infra)
|
|
284
|
+
alias = await _get_workspace_alias_or_403(db_infra, project_id, workspace_id)
|
|
285
|
+
response = await http_unsubscribe(
|
|
286
|
+
request=request,
|
|
287
|
+
subscription_id=subscription_id,
|
|
288
|
+
workspace_id=workspace_id,
|
|
289
|
+
alias=alias,
|
|
290
|
+
db_infra=db_infra,
|
|
291
|
+
)
|
|
292
|
+
return response.model_dump()
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
async def _get_workspace_alias_or_403(
|
|
296
|
+
db_infra: DatabaseInfra, project_id: str, workspace_id: str
|
|
297
|
+
) -> str:
|
|
298
|
+
server_db = db_infra.get_manager("server")
|
|
299
|
+
row = await server_db.fetch_one(
|
|
300
|
+
"""
|
|
301
|
+
SELECT alias
|
|
302
|
+
FROM {{tables.workspaces}}
|
|
303
|
+
WHERE workspace_id = $1 AND project_id = $2 AND deleted_at IS NULL
|
|
304
|
+
""",
|
|
305
|
+
UUID(workspace_id),
|
|
306
|
+
UUID(project_id),
|
|
307
|
+
)
|
|
308
|
+
if not row:
|
|
309
|
+
raise HTTPException(
|
|
310
|
+
status_code=403, detail="Workspace not found or does not belong to your project"
|
|
311
|
+
)
|
|
312
|
+
return row["alias"]
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
async def _tool_escalate(
|
|
316
|
+
request: Request,
|
|
317
|
+
redis: Redis,
|
|
318
|
+
db_infra: DatabaseInfra,
|
|
319
|
+
args: dict[str, Any],
|
|
320
|
+
) -> dict[str, Any]:
|
|
321
|
+
payload = CreateEscalationRequest.model_validate(args)
|
|
322
|
+
response: CreateEscalationResponse = await create_escalation(
|
|
323
|
+
request=request, payload=payload, redis=redis, db_infra=db_infra
|
|
324
|
+
)
|
|
325
|
+
return response.model_dump()
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
async def _tool_get_escalation(
|
|
329
|
+
request: Request,
|
|
330
|
+
db_infra: DatabaseInfra,
|
|
331
|
+
args: dict[str, Any],
|
|
332
|
+
) -> dict[str, Any]:
|
|
333
|
+
escalation_id = str(args.get("escalation_id") or "").strip()
|
|
334
|
+
if not escalation_id:
|
|
335
|
+
raise HTTPException(status_code=422, detail="escalation_id is required")
|
|
336
|
+
return await http_get_escalation(
|
|
337
|
+
escalation_id=escalation_id, request=request, db_infra=db_infra
|
|
338
|
+
)
|