beadhub 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beadhub/__init__.py +12 -0
- beadhub/api.py +260 -0
- beadhub/auth.py +101 -0
- beadhub/aweb_context.py +65 -0
- beadhub/aweb_introspection.py +70 -0
- beadhub/beads_sync.py +514 -0
- beadhub/cli.py +330 -0
- beadhub/config.py +65 -0
- beadhub/db.py +129 -0
- beadhub/defaults/invariants/01-tracking-bdh-only.md +11 -0
- beadhub/defaults/invariants/02-communication-mail-first.md +36 -0
- beadhub/defaults/invariants/03-communication-chat.md +60 -0
- beadhub/defaults/invariants/04-identity-no-impersonation.md +17 -0
- beadhub/defaults/invariants/05-collaborate.md +12 -0
- beadhub/defaults/roles/backend.md +55 -0
- beadhub/defaults/roles/coordinator.md +44 -0
- beadhub/defaults/roles/frontend.md +77 -0
- beadhub/defaults/roles/implementer.md +73 -0
- beadhub/defaults/roles/reviewer.md +56 -0
- beadhub/defaults/roles/startup-expert.md +93 -0
- beadhub/defaults.py +262 -0
- beadhub/events.py +704 -0
- beadhub/internal_auth.py +121 -0
- beadhub/jsonl.py +68 -0
- beadhub/logging.py +62 -0
- beadhub/migrations/beads/001_initial.sql +70 -0
- beadhub/migrations/beads/002_search_indexes.sql +20 -0
- beadhub/migrations/server/001_initial.sql +279 -0
- beadhub/names.py +33 -0
- beadhub/notifications.py +275 -0
- beadhub/pagination.py +125 -0
- beadhub/presence.py +495 -0
- beadhub/rate_limit.py +152 -0
- beadhub/redis_client.py +11 -0
- beadhub/roles.py +35 -0
- beadhub/routes/__init__.py +1 -0
- beadhub/routes/agents.py +303 -0
- beadhub/routes/bdh.py +655 -0
- beadhub/routes/beads.py +778 -0
- beadhub/routes/claims.py +141 -0
- beadhub/routes/escalations.py +471 -0
- beadhub/routes/init.py +348 -0
- beadhub/routes/mcp.py +338 -0
- beadhub/routes/policies.py +833 -0
- beadhub/routes/repos.py +538 -0
- beadhub/routes/status.py +568 -0
- beadhub/routes/subscriptions.py +362 -0
- beadhub/routes/workspaces.py +1642 -0
- beadhub/workspace_config.py +202 -0
- beadhub-0.1.0.dist-info/METADATA +254 -0
- beadhub-0.1.0.dist-info/RECORD +54 -0
- beadhub-0.1.0.dist-info/WHEEL +4 -0
- beadhub-0.1.0.dist-info/entry_points.txt +2 -0
- beadhub-0.1.0.dist-info/licenses/LICENSE +21 -0
beadhub/routes/repos.py
ADDED
|
@@ -0,0 +1,538 @@
|
|
|
1
|
+
"""BeadHub repos endpoints.
|
|
2
|
+
|
|
3
|
+
Provides repository registration for OSS mode. Used by `bdh init` to register
|
|
4
|
+
git repos within a project and obtain a repo_id (UUID).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
import re
|
|
11
|
+
import uuid as uuid_module
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
from typing import Optional
|
|
14
|
+
from urllib.parse import urlparse
|
|
15
|
+
from uuid import UUID
|
|
16
|
+
|
|
17
|
+
from fastapi import APIRouter, Depends, HTTPException, Query
|
|
18
|
+
from pydantic import BaseModel, Field, field_validator
|
|
19
|
+
from redis.asyncio import Redis
|
|
20
|
+
|
|
21
|
+
from ..db import DatabaseInfra, get_db_infra
|
|
22
|
+
from ..pagination import encode_cursor, validate_pagination_params
|
|
23
|
+
from ..presence import clear_workspace_presence
|
|
24
|
+
from ..redis_client import get_redis
|
|
25
|
+
|
|
26
|
+
logger = logging.getLogger(__name__)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
router = APIRouter(prefix="/v1/repos", tags=["repos"])
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def canonicalize_git_url(origin_url: str) -> str:
|
|
33
|
+
"""
|
|
34
|
+
Normalize a git origin URL to canonical form.
|
|
35
|
+
|
|
36
|
+
Converts various git URL formats to a consistent canonical form:
|
|
37
|
+
- git@github.com:org/repo.git -> github.com/org/repo
|
|
38
|
+
- https://github.com/org/repo.git -> github.com/org/repo
|
|
39
|
+
- ssh://git@github.com:22/org/repo.git -> github.com/org/repo
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
origin_url: Git origin URL in any format
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
Canonical form: host/path (e.g., github.com/org/repo)
|
|
46
|
+
|
|
47
|
+
Raises:
|
|
48
|
+
ValueError: If the URL cannot be parsed
|
|
49
|
+
"""
|
|
50
|
+
if not origin_url or not origin_url.strip():
|
|
51
|
+
raise ValueError("Empty origin URL")
|
|
52
|
+
|
|
53
|
+
url = origin_url.strip()
|
|
54
|
+
|
|
55
|
+
# Handle SSH format: git@host:path
|
|
56
|
+
ssh_match = re.match(r"^git@([^:]+):(.+)$", url)
|
|
57
|
+
if ssh_match:
|
|
58
|
+
host = ssh_match.group(1)
|
|
59
|
+
path = ssh_match.group(2)
|
|
60
|
+
else:
|
|
61
|
+
# Handle URL format (https://, http://, ssh://)
|
|
62
|
+
parsed = urlparse(url)
|
|
63
|
+
if not parsed.scheme or not parsed.netloc:
|
|
64
|
+
raise ValueError(f"Invalid git URL: {origin_url}")
|
|
65
|
+
|
|
66
|
+
host = parsed.hostname
|
|
67
|
+
if not host:
|
|
68
|
+
raise ValueError(f"Invalid git URL: {origin_url}")
|
|
69
|
+
|
|
70
|
+
# For ssh:// format with user@host:port, parsed.path starts with /
|
|
71
|
+
path = parsed.path.lstrip("/")
|
|
72
|
+
|
|
73
|
+
# Remove .git extension
|
|
74
|
+
if path.endswith(".git"):
|
|
75
|
+
path = path[:-4]
|
|
76
|
+
|
|
77
|
+
# Remove trailing slash
|
|
78
|
+
path = path.rstrip("/")
|
|
79
|
+
|
|
80
|
+
if not path:
|
|
81
|
+
raise ValueError(f"Invalid git URL (no path): {origin_url}")
|
|
82
|
+
|
|
83
|
+
return f"{host}/{path}"
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def extract_repo_name(canonical_origin: str) -> str:
|
|
87
|
+
"""
|
|
88
|
+
Extract repo name from canonical origin.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
canonical_origin: Canonical origin (e.g., github.com/org/repo)
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
Repo name (last path component, e.g., repo)
|
|
95
|
+
"""
|
|
96
|
+
return canonical_origin.rsplit("/", 1)[-1]
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
class RepoLookupRequest(BaseModel):
|
|
100
|
+
"""Request body for POST /v1/repos/lookup."""
|
|
101
|
+
|
|
102
|
+
origin_url: str = Field(..., min_length=1, max_length=2048)
|
|
103
|
+
|
|
104
|
+
@field_validator("origin_url")
|
|
105
|
+
@classmethod
|
|
106
|
+
def validate_origin_url(cls, v: str) -> str:
|
|
107
|
+
"""Validate origin_url can be canonicalized."""
|
|
108
|
+
try:
|
|
109
|
+
canonicalize_git_url(v)
|
|
110
|
+
except ValueError as e:
|
|
111
|
+
raise ValueError(f"Invalid origin_url: {e}")
|
|
112
|
+
return v
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class RepoLookupResponse(BaseModel):
|
|
116
|
+
"""Response for POST /v1/repos/lookup."""
|
|
117
|
+
|
|
118
|
+
repo_id: str
|
|
119
|
+
project_id: str
|
|
120
|
+
project_slug: str
|
|
121
|
+
canonical_origin: str
|
|
122
|
+
name: str
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
class RepoLookupCandidate(BaseModel):
|
|
126
|
+
"""A candidate repo/project pair when lookup is ambiguous."""
|
|
127
|
+
|
|
128
|
+
repo_id: str
|
|
129
|
+
project_id: str
|
|
130
|
+
project_slug: str
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
@router.post("/lookup")
|
|
134
|
+
async def lookup_repo(
|
|
135
|
+
payload: RepoLookupRequest,
|
|
136
|
+
db: DatabaseInfra = Depends(get_db_infra),
|
|
137
|
+
) -> RepoLookupResponse:
|
|
138
|
+
"""
|
|
139
|
+
Look up a repo by origin URL. Returns the repo and its project if found.
|
|
140
|
+
|
|
141
|
+
This is used by `bdh init` to detect if a repo is already registered,
|
|
142
|
+
allowing automatic project detection.
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
- 200 with repo info if exactly one match
|
|
146
|
+
- 404 if no matches
|
|
147
|
+
- 409 with candidates if multiple projects have the same repo
|
|
148
|
+
"""
|
|
149
|
+
server_db = db.get_manager("server")
|
|
150
|
+
|
|
151
|
+
canonical_origin = canonicalize_git_url(payload.origin_url)
|
|
152
|
+
|
|
153
|
+
# Fetch ALL matching repos (not just the first one)
|
|
154
|
+
results = await server_db.fetch_all(
|
|
155
|
+
"""
|
|
156
|
+
SELECT r.id as repo_id, r.canonical_origin, r.name,
|
|
157
|
+
p.id as project_id, p.slug as project_slug
|
|
158
|
+
FROM {{tables.repos}} r
|
|
159
|
+
JOIN {{tables.projects}} p ON r.project_id = p.id AND p.deleted_at IS NULL
|
|
160
|
+
WHERE r.canonical_origin = $1 AND r.deleted_at IS NULL
|
|
161
|
+
ORDER BY p.slug
|
|
162
|
+
""",
|
|
163
|
+
canonical_origin,
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
if not results:
|
|
167
|
+
raise HTTPException(
|
|
168
|
+
status_code=404,
|
|
169
|
+
detail=f"Repo not found: {canonical_origin}",
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
if len(results) == 1:
|
|
173
|
+
result = results[0]
|
|
174
|
+
return RepoLookupResponse(
|
|
175
|
+
repo_id=str(result["repo_id"]),
|
|
176
|
+
project_id=str(result["project_id"]),
|
|
177
|
+
project_slug=result["project_slug"],
|
|
178
|
+
canonical_origin=result["canonical_origin"],
|
|
179
|
+
name=result["name"],
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
# Multiple matches - return 409 with candidates
|
|
183
|
+
candidates = [
|
|
184
|
+
RepoLookupCandidate(
|
|
185
|
+
repo_id=str(r["repo_id"]),
|
|
186
|
+
project_id=str(r["project_id"]),
|
|
187
|
+
project_slug=r["project_slug"],
|
|
188
|
+
)
|
|
189
|
+
for r in results
|
|
190
|
+
]
|
|
191
|
+
project_slugs = [c.project_slug for c in candidates]
|
|
192
|
+
|
|
193
|
+
raise HTTPException(
|
|
194
|
+
status_code=409,
|
|
195
|
+
detail={
|
|
196
|
+
"message": f"Repo {canonical_origin} exists in multiple projects: {', '.join(project_slugs)}. "
|
|
197
|
+
"Choose a project slug and run 'bdh :init --project <slug>' (or authenticate with the correct project API key).",
|
|
198
|
+
"canonical_origin": canonical_origin,
|
|
199
|
+
"candidates": [c.model_dump() for c in candidates],
|
|
200
|
+
},
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
class RepoEnsureRequest(BaseModel):
|
|
205
|
+
"""Request body for POST /v1/repos/ensure."""
|
|
206
|
+
|
|
207
|
+
project_id: str = Field(..., min_length=36, max_length=36)
|
|
208
|
+
origin_url: str = Field(..., min_length=1, max_length=2048)
|
|
209
|
+
|
|
210
|
+
@field_validator("project_id")
|
|
211
|
+
@classmethod
|
|
212
|
+
def validate_project_id(cls, v: str) -> str:
|
|
213
|
+
"""Validate project_id is a valid UUID."""
|
|
214
|
+
try:
|
|
215
|
+
uuid_module.UUID(v)
|
|
216
|
+
except ValueError:
|
|
217
|
+
raise ValueError("Invalid project_id: must be a valid UUID")
|
|
218
|
+
return v
|
|
219
|
+
|
|
220
|
+
@field_validator("origin_url")
|
|
221
|
+
@classmethod
|
|
222
|
+
def validate_origin_url(cls, v: str) -> str:
|
|
223
|
+
"""Validate origin_url can be canonicalized."""
|
|
224
|
+
try:
|
|
225
|
+
canonicalize_git_url(v)
|
|
226
|
+
except ValueError as e:
|
|
227
|
+
raise ValueError(f"Invalid origin_url: {e}")
|
|
228
|
+
return v
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
class RepoEnsureResponse(BaseModel):
|
|
232
|
+
"""Response for POST /v1/repos/ensure."""
|
|
233
|
+
|
|
234
|
+
repo_id: str
|
|
235
|
+
canonical_origin: str
|
|
236
|
+
name: str
|
|
237
|
+
created: bool
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
@router.post("/ensure")
|
|
241
|
+
async def ensure_repo(
|
|
242
|
+
payload: RepoEnsureRequest,
|
|
243
|
+
db: DatabaseInfra = Depends(get_db_infra),
|
|
244
|
+
) -> RepoEnsureResponse:
|
|
245
|
+
"""
|
|
246
|
+
Get or create a repo by origin URL. Used by `bdh init` in OSS mode.
|
|
247
|
+
|
|
248
|
+
If a repo with the same canonical origin exists in the project, returns it
|
|
249
|
+
with created=false (and updates the origin_url to the new value).
|
|
250
|
+
If it doesn't exist, creates it and returns with created=true.
|
|
251
|
+
|
|
252
|
+
The canonical_origin is computed by normalizing the origin_url. Different
|
|
253
|
+
URL formats (SSH vs HTTPS) that refer to the same repo will match.
|
|
254
|
+
"""
|
|
255
|
+
server_db = db.get_manager("server")
|
|
256
|
+
|
|
257
|
+
# First verify the project exists and is not soft-deleted
|
|
258
|
+
project = await server_db.fetch_one(
|
|
259
|
+
"SELECT id FROM {{tables.projects}} WHERE id = $1 AND deleted_at IS NULL",
|
|
260
|
+
payload.project_id,
|
|
261
|
+
)
|
|
262
|
+
if not project:
|
|
263
|
+
raise HTTPException(status_code=404, detail="Project not found")
|
|
264
|
+
|
|
265
|
+
canonical_origin = canonicalize_git_url(payload.origin_url)
|
|
266
|
+
name = extract_repo_name(canonical_origin)
|
|
267
|
+
|
|
268
|
+
# Use INSERT ON CONFLICT DO UPDATE to handle race conditions.
|
|
269
|
+
# The (xmax = 0) check detects INSERT vs UPDATE: xmax is 0 for new rows,
|
|
270
|
+
# non-zero when updated (PostgreSQL stores the updating transaction ID there).
|
|
271
|
+
# Also clear deleted_at to undelete soft-deleted repos when re-registered.
|
|
272
|
+
result = await server_db.fetch_one(
|
|
273
|
+
"""
|
|
274
|
+
INSERT INTO {{tables.repos}} (project_id, origin_url, canonical_origin, name)
|
|
275
|
+
VALUES ($1, $2, $3, $4)
|
|
276
|
+
ON CONFLICT (project_id, canonical_origin)
|
|
277
|
+
DO UPDATE SET origin_url = EXCLUDED.origin_url, deleted_at = NULL
|
|
278
|
+
RETURNING id, canonical_origin, name, (xmax = 0) AS created
|
|
279
|
+
""",
|
|
280
|
+
payload.project_id,
|
|
281
|
+
payload.origin_url,
|
|
282
|
+
canonical_origin,
|
|
283
|
+
name,
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
created = result["created"]
|
|
287
|
+
if created:
|
|
288
|
+
logger.info(
|
|
289
|
+
"Repo created: project=%s canonical=%s id=%s",
|
|
290
|
+
payload.project_id,
|
|
291
|
+
canonical_origin,
|
|
292
|
+
result["id"],
|
|
293
|
+
)
|
|
294
|
+
else:
|
|
295
|
+
logger.info(
|
|
296
|
+
"Repo found: project=%s canonical=%s id=%s",
|
|
297
|
+
payload.project_id,
|
|
298
|
+
canonical_origin,
|
|
299
|
+
result["id"],
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
return RepoEnsureResponse(
|
|
303
|
+
repo_id=str(result["id"]),
|
|
304
|
+
canonical_origin=result["canonical_origin"],
|
|
305
|
+
name=result["name"],
|
|
306
|
+
created=created,
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
class RepoSummary(BaseModel):
|
|
311
|
+
"""Summary of a repo for list view."""
|
|
312
|
+
|
|
313
|
+
id: str
|
|
314
|
+
project_id: str
|
|
315
|
+
canonical_origin: str
|
|
316
|
+
name: str
|
|
317
|
+
created_at: datetime
|
|
318
|
+
workspace_count: int
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
class RepoListResponse(BaseModel):
|
|
322
|
+
"""Response for GET /v1/repos."""
|
|
323
|
+
|
|
324
|
+
repos: list[RepoSummary]
|
|
325
|
+
has_more: bool = False
|
|
326
|
+
next_cursor: Optional[str] = None
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
@router.get("")
|
|
330
|
+
async def list_repos(
|
|
331
|
+
project_id: Optional[UUID] = Query(default=None, description="Filter by project ID"),
|
|
332
|
+
limit: Optional[int] = Query(
|
|
333
|
+
default=None,
|
|
334
|
+
ge=1,
|
|
335
|
+
le=200,
|
|
336
|
+
description="Maximum number of repos to return (default 50, max 200)",
|
|
337
|
+
),
|
|
338
|
+
cursor: Optional[str] = Query(
|
|
339
|
+
default=None, description="Pagination cursor from previous response"
|
|
340
|
+
),
|
|
341
|
+
db: DatabaseInfra = Depends(get_db_infra),
|
|
342
|
+
) -> RepoListResponse:
|
|
343
|
+
"""
|
|
344
|
+
List repos with optional project filter and cursor-based pagination.
|
|
345
|
+
|
|
346
|
+
Returns active (non-deleted) repos, optionally filtered by project_id.
|
|
347
|
+
Each repo includes a count of active workspaces.
|
|
348
|
+
|
|
349
|
+
Results are ordered by (created_at, id) for deterministic pagination
|
|
350
|
+
that remains stable across inserts.
|
|
351
|
+
"""
|
|
352
|
+
try:
|
|
353
|
+
validated_limit, cursor_data = validate_pagination_params(limit, cursor)
|
|
354
|
+
except ValueError as e:
|
|
355
|
+
raise HTTPException(status_code=422, detail=str(e))
|
|
356
|
+
|
|
357
|
+
server_db = db.get_manager("server")
|
|
358
|
+
|
|
359
|
+
query = """
|
|
360
|
+
SELECT
|
|
361
|
+
r.id,
|
|
362
|
+
r.project_id,
|
|
363
|
+
r.canonical_origin,
|
|
364
|
+
r.name,
|
|
365
|
+
r.created_at,
|
|
366
|
+
COUNT(w.workspace_id) FILTER (WHERE w.deleted_at IS NULL) AS workspace_count
|
|
367
|
+
FROM {{tables.repos}} r
|
|
368
|
+
LEFT JOIN {{tables.workspaces}} w ON w.repo_id = r.id
|
|
369
|
+
WHERE r.deleted_at IS NULL
|
|
370
|
+
"""
|
|
371
|
+
|
|
372
|
+
params: list = []
|
|
373
|
+
param_idx = 1
|
|
374
|
+
|
|
375
|
+
if project_id:
|
|
376
|
+
query += f" AND r.project_id = ${param_idx}"
|
|
377
|
+
params.append(str(project_id))
|
|
378
|
+
param_idx += 1
|
|
379
|
+
|
|
380
|
+
# Apply cursor filter (created_at, id) for deterministic pagination
|
|
381
|
+
if cursor_data and "created_at" in cursor_data and "id" in cursor_data:
|
|
382
|
+
try:
|
|
383
|
+
cursor_created_at = datetime.fromisoformat(cursor_data["created_at"])
|
|
384
|
+
cursor_id = UUID(cursor_data["id"])
|
|
385
|
+
except (ValueError, TypeError) as e:
|
|
386
|
+
raise HTTPException(status_code=422, detail=f"Invalid cursor: {e}")
|
|
387
|
+
query += f" AND (r.created_at, r.id) > (${param_idx}, ${param_idx + 1})"
|
|
388
|
+
params.extend([cursor_created_at, cursor_id])
|
|
389
|
+
param_idx += 2
|
|
390
|
+
|
|
391
|
+
query += """
|
|
392
|
+
GROUP BY r.id, r.project_id, r.canonical_origin, r.name, r.created_at
|
|
393
|
+
ORDER BY r.created_at, r.id
|
|
394
|
+
"""
|
|
395
|
+
|
|
396
|
+
# Fetch limit + 1 to detect has_more
|
|
397
|
+
query += f" LIMIT ${param_idx}"
|
|
398
|
+
params.append(validated_limit + 1)
|
|
399
|
+
|
|
400
|
+
rows = await server_db.fetch_all(query, *params)
|
|
401
|
+
|
|
402
|
+
# Check if there are more results
|
|
403
|
+
has_more = len(rows) > validated_limit
|
|
404
|
+
rows = rows[:validated_limit] # Trim to requested limit
|
|
405
|
+
|
|
406
|
+
# Generate next_cursor if there are more results
|
|
407
|
+
next_cursor = None
|
|
408
|
+
if has_more and rows:
|
|
409
|
+
last_row = rows[-1]
|
|
410
|
+
next_cursor = encode_cursor(
|
|
411
|
+
{
|
|
412
|
+
"created_at": last_row["created_at"].isoformat(),
|
|
413
|
+
"id": str(last_row["id"]),
|
|
414
|
+
}
|
|
415
|
+
)
|
|
416
|
+
|
|
417
|
+
return RepoListResponse(
|
|
418
|
+
repos=[
|
|
419
|
+
RepoSummary(
|
|
420
|
+
id=str(row["id"]),
|
|
421
|
+
project_id=str(row["project_id"]),
|
|
422
|
+
canonical_origin=row["canonical_origin"],
|
|
423
|
+
name=row["name"],
|
|
424
|
+
created_at=row["created_at"],
|
|
425
|
+
workspace_count=row["workspace_count"],
|
|
426
|
+
)
|
|
427
|
+
for row in rows
|
|
428
|
+
],
|
|
429
|
+
has_more=has_more,
|
|
430
|
+
next_cursor=next_cursor,
|
|
431
|
+
)
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
class RepoDeleteResponse(BaseModel):
|
|
435
|
+
"""Response for DELETE /v1/repos/{id}."""
|
|
436
|
+
|
|
437
|
+
id: str
|
|
438
|
+
workspaces_deleted: int
|
|
439
|
+
claims_deleted: int
|
|
440
|
+
presence_cleared: int
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
@router.delete("/{repo_id}")
|
|
444
|
+
async def delete_repo(
|
|
445
|
+
repo_id: UUID,
|
|
446
|
+
db: DatabaseInfra = Depends(get_db_infra),
|
|
447
|
+
redis: Redis = Depends(get_redis),
|
|
448
|
+
) -> RepoDeleteResponse:
|
|
449
|
+
"""
|
|
450
|
+
Soft-delete a repo and cascade to workspaces.
|
|
451
|
+
|
|
452
|
+
This operation:
|
|
453
|
+
1. Sets deleted_at on the repo
|
|
454
|
+
2. Soft-deletes all workspaces in the repo (sets deleted_at)
|
|
455
|
+
3. Deletes all bead claims for those workspaces
|
|
456
|
+
4. Clears Redis presence for those workspaces
|
|
457
|
+
|
|
458
|
+
Returns counts of affected resources.
|
|
459
|
+
"""
|
|
460
|
+
server_db = db.get_manager("server")
|
|
461
|
+
|
|
462
|
+
# Verify repo exists and is not already deleted
|
|
463
|
+
repo = await server_db.fetch_one(
|
|
464
|
+
"""
|
|
465
|
+
SELECT id, project_id FROM {{tables.repos}}
|
|
466
|
+
WHERE id = $1 AND deleted_at IS NULL
|
|
467
|
+
""",
|
|
468
|
+
str(repo_id),
|
|
469
|
+
)
|
|
470
|
+
if not repo:
|
|
471
|
+
raise HTTPException(status_code=404, detail="Repo not found")
|
|
472
|
+
|
|
473
|
+
# Get all workspace_ids for this repo
|
|
474
|
+
workspace_rows = await server_db.fetch_all(
|
|
475
|
+
"""
|
|
476
|
+
SELECT workspace_id FROM {{tables.workspaces}}
|
|
477
|
+
WHERE repo_id = $1 AND deleted_at IS NULL
|
|
478
|
+
""",
|
|
479
|
+
str(repo_id),
|
|
480
|
+
)
|
|
481
|
+
workspace_ids = [str(row["workspace_id"]) for row in workspace_rows]
|
|
482
|
+
|
|
483
|
+
# Soft-delete workspaces manually (cannot use FK cascade for soft-delete).
|
|
484
|
+
# The FK SET NULL only triggers when repo is hard-deleted (e.g., via project cascade),
|
|
485
|
+
# at which point the trigger in 005_workspaces.sql auto-sets deleted_at.
|
|
486
|
+
if workspace_ids:
|
|
487
|
+
await server_db.execute(
|
|
488
|
+
"""
|
|
489
|
+
UPDATE {{tables.workspaces}}
|
|
490
|
+
SET deleted_at = NOW()
|
|
491
|
+
WHERE repo_id = $1 AND deleted_at IS NULL
|
|
492
|
+
""",
|
|
493
|
+
str(repo_id),
|
|
494
|
+
)
|
|
495
|
+
|
|
496
|
+
# Delete claims for these workspaces
|
|
497
|
+
claims_deleted = 0
|
|
498
|
+
if workspace_ids:
|
|
499
|
+
result = await server_db.fetch_one(
|
|
500
|
+
"""
|
|
501
|
+
WITH deleted AS (
|
|
502
|
+
DELETE FROM {{tables.bead_claims}}
|
|
503
|
+
WHERE workspace_id = ANY($1::uuid[])
|
|
504
|
+
RETURNING id
|
|
505
|
+
)
|
|
506
|
+
SELECT COUNT(*) as count FROM deleted
|
|
507
|
+
""",
|
|
508
|
+
workspace_ids,
|
|
509
|
+
)
|
|
510
|
+
claims_deleted = result["count"] if result else 0
|
|
511
|
+
|
|
512
|
+
# Clear Redis presence
|
|
513
|
+
presence_cleared = await clear_workspace_presence(redis, workspace_ids)
|
|
514
|
+
|
|
515
|
+
# Soft-delete the repo
|
|
516
|
+
await server_db.execute(
|
|
517
|
+
"""
|
|
518
|
+
UPDATE {{tables.repos}}
|
|
519
|
+
SET deleted_at = NOW()
|
|
520
|
+
WHERE id = $1
|
|
521
|
+
""",
|
|
522
|
+
str(repo_id),
|
|
523
|
+
)
|
|
524
|
+
|
|
525
|
+
logger.info(
|
|
526
|
+
"Repo soft-deleted: id=%s workspaces=%d claims=%d presence=%d",
|
|
527
|
+
repo_id,
|
|
528
|
+
len(workspace_ids),
|
|
529
|
+
claims_deleted,
|
|
530
|
+
presence_cleared,
|
|
531
|
+
)
|
|
532
|
+
|
|
533
|
+
return RepoDeleteResponse(
|
|
534
|
+
id=str(repo_id),
|
|
535
|
+
workspaces_deleted=len(workspace_ids),
|
|
536
|
+
claims_deleted=claims_deleted,
|
|
537
|
+
presence_cleared=presence_cleared,
|
|
538
|
+
)
|