beadhub 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. beadhub/__init__.py +12 -0
  2. beadhub/api.py +260 -0
  3. beadhub/auth.py +101 -0
  4. beadhub/aweb_context.py +65 -0
  5. beadhub/aweb_introspection.py +70 -0
  6. beadhub/beads_sync.py +514 -0
  7. beadhub/cli.py +330 -0
  8. beadhub/config.py +65 -0
  9. beadhub/db.py +129 -0
  10. beadhub/defaults/invariants/01-tracking-bdh-only.md +11 -0
  11. beadhub/defaults/invariants/02-communication-mail-first.md +36 -0
  12. beadhub/defaults/invariants/03-communication-chat.md +60 -0
  13. beadhub/defaults/invariants/04-identity-no-impersonation.md +17 -0
  14. beadhub/defaults/invariants/05-collaborate.md +12 -0
  15. beadhub/defaults/roles/backend.md +55 -0
  16. beadhub/defaults/roles/coordinator.md +44 -0
  17. beadhub/defaults/roles/frontend.md +77 -0
  18. beadhub/defaults/roles/implementer.md +73 -0
  19. beadhub/defaults/roles/reviewer.md +56 -0
  20. beadhub/defaults/roles/startup-expert.md +93 -0
  21. beadhub/defaults.py +262 -0
  22. beadhub/events.py +704 -0
  23. beadhub/internal_auth.py +121 -0
  24. beadhub/jsonl.py +68 -0
  25. beadhub/logging.py +62 -0
  26. beadhub/migrations/beads/001_initial.sql +70 -0
  27. beadhub/migrations/beads/002_search_indexes.sql +20 -0
  28. beadhub/migrations/server/001_initial.sql +279 -0
  29. beadhub/names.py +33 -0
  30. beadhub/notifications.py +275 -0
  31. beadhub/pagination.py +125 -0
  32. beadhub/presence.py +495 -0
  33. beadhub/rate_limit.py +152 -0
  34. beadhub/redis_client.py +11 -0
  35. beadhub/roles.py +35 -0
  36. beadhub/routes/__init__.py +1 -0
  37. beadhub/routes/agents.py +303 -0
  38. beadhub/routes/bdh.py +655 -0
  39. beadhub/routes/beads.py +778 -0
  40. beadhub/routes/claims.py +141 -0
  41. beadhub/routes/escalations.py +471 -0
  42. beadhub/routes/init.py +348 -0
  43. beadhub/routes/mcp.py +338 -0
  44. beadhub/routes/policies.py +833 -0
  45. beadhub/routes/repos.py +538 -0
  46. beadhub/routes/status.py +568 -0
  47. beadhub/routes/subscriptions.py +362 -0
  48. beadhub/routes/workspaces.py +1642 -0
  49. beadhub/workspace_config.py +202 -0
  50. beadhub-0.1.0.dist-info/METADATA +254 -0
  51. beadhub-0.1.0.dist-info/RECORD +54 -0
  52. beadhub-0.1.0.dist-info/WHEEL +4 -0
  53. beadhub-0.1.0.dist-info/entry_points.txt +2 -0
  54. beadhub-0.1.0.dist-info/licenses/LICENSE +21 -0
beadhub/routes/bdh.py ADDED
@@ -0,0 +1,655 @@
1
+ """bdh coordination endpoints.
2
+
3
+ These endpoints are used by the `bdh` CLI wrapper to:
4
+ - preflight/record command execution (`POST /v1/bdh/command`)
5
+ - upload beads issue updates (`POST /v1/bdh/sync`)
6
+
7
+ They are intentionally thin wrappers around existing BeadHub primitives:
8
+ - auth comes from embedded aweb (`aw_sk_*` keys)
9
+ - bead sync uses `beadhub.beads_sync`
10
+ - notifications use the outbox pipeline in `beadhub.notifications`
11
+ """
12
+
13
+ from __future__ import annotations
14
+
15
+ import json
16
+ import logging
17
+ from datetime import datetime, timezone
18
+ from typing import Any, Optional
19
+ from uuid import UUID
20
+
21
+ from fastapi import APIRouter, Depends, HTTPException, Request
22
+ from pydantic import BaseModel, ConfigDict, Field, field_validator
23
+ from redis.asyncio import Redis
24
+
25
+ from beadhub.auth import validate_workspace_id
26
+ from beadhub.aweb_context import resolve_aweb_identity
27
+ from beadhub.aweb_introspection import get_identity_from_auth
28
+ from beadhub.beads_sync import (
29
+ DEFAULT_BRANCH,
30
+ BeadsSyncResult,
31
+ _sync_issues_to_db,
32
+ delete_issues_by_id,
33
+ is_valid_alias,
34
+ is_valid_canonical_origin,
35
+ is_valid_human_name,
36
+ validate_issues_from_list,
37
+ )
38
+ from beadhub.routes.repos import canonicalize_git_url, extract_repo_name
39
+
40
+ from ..db import DatabaseInfra, get_db_infra
41
+ from ..jsonl import JSONLParseError, parse_jsonl
42
+ from ..notifications import process_notification_outbox, record_notification_intents
43
+ from ..presence import get_workspace_id_by_alias
44
+ from ..redis_client import get_redis
45
+
46
+ logger = logging.getLogger(__name__)
47
+
48
+ router = APIRouter(prefix="/v1/bdh", tags=["bdh"])
49
+
50
+ MAX_JSONL_SIZE = 10 * 1024 * 1024 # 10MB
51
+ MAX_ISSUES_COUNT = 10000
52
+ MAX_JSON_DEPTH = 10
53
+
54
+
55
+ def _now() -> datetime:
56
+ return datetime.now(timezone.utc)
57
+
58
+
59
+ def _parse_command_line(command_line: str) -> tuple[Optional[str], Optional[str], Optional[str]]:
60
+ """Return (command, bead_id, status) best-effort, or (None, None, None)."""
61
+
62
+ parts = command_line.split()
63
+ if not parts:
64
+ return None, None, None
65
+ cmd = parts[0].strip()
66
+ bead_id: Optional[str] = None
67
+ status: Optional[str] = None
68
+ if cmd in ("update", "close", "delete", "reopen") and len(parts) >= 2:
69
+ bead_id = parts[1].strip()
70
+
71
+ if cmd == "update":
72
+ # Handle: --status in_progress OR --status=in_progress
73
+ for i, p in enumerate(parts):
74
+ if p == "--status" and i + 1 < len(parts):
75
+ status = parts[i + 1].strip()
76
+ break
77
+ if p.startswith("--status="):
78
+ status = p.split("=", 1)[1].strip()
79
+ break
80
+
81
+ return cmd, bead_id, status
82
+
83
+
84
+ async def _ensure_workspace_alive_or_410(
85
+ db_infra: DatabaseInfra,
86
+ *,
87
+ project_id: str,
88
+ workspace_id: str,
89
+ ) -> dict[str, Any]:
90
+ server_db = db_infra.get_manager("server")
91
+ row = await server_db.fetch_one(
92
+ """
93
+ SELECT workspace_id, alias, human_name, role, deleted_at
94
+ FROM {{tables.workspaces}}
95
+ WHERE workspace_id = $1 AND project_id = $2
96
+ """,
97
+ UUID(workspace_id),
98
+ UUID(project_id),
99
+ )
100
+ if row is None:
101
+ raise HTTPException(status_code=404, detail="Workspace not found")
102
+ if row.get("deleted_at") is not None:
103
+ raise HTTPException(status_code=410, detail="Workspace was deleted")
104
+ return row
105
+
106
+
107
+ async def _touch_workspace_last_seen(
108
+ db_infra: DatabaseInfra,
109
+ *,
110
+ project_id: str,
111
+ workspace_id: str,
112
+ human_name: str,
113
+ role: Optional[str],
114
+ ) -> None:
115
+ server_db = db_infra.get_manager("server")
116
+ await server_db.execute(
117
+ """
118
+ UPDATE {{tables.workspaces}}
119
+ SET last_seen_at = $3,
120
+ human_name = $4,
121
+ role = $5
122
+ WHERE workspace_id = $1 AND project_id = $2 AND deleted_at IS NULL
123
+ """,
124
+ UUID(workspace_id),
125
+ UUID(project_id),
126
+ _now(),
127
+ human_name,
128
+ role,
129
+ )
130
+
131
+
132
+ async def _list_beads_in_progress(
133
+ db_infra: DatabaseInfra, *, project_id: str
134
+ ) -> list[dict[str, Any]]:
135
+ server_db = db_infra.get_manager("server")
136
+ rows = await server_db.fetch_all(
137
+ """
138
+ SELECT bead_id, workspace_id, alias, human_name, claimed_at
139
+ FROM {{tables.bead_claims}}
140
+ WHERE project_id = $1
141
+ ORDER BY claimed_at DESC
142
+ LIMIT 200
143
+ """,
144
+ UUID(project_id),
145
+ )
146
+ return [
147
+ {
148
+ "bead_id": r["bead_id"],
149
+ "workspace_id": str(r["workspace_id"]),
150
+ "alias": r["alias"],
151
+ "human_name": r["human_name"],
152
+ "started_at": r["claimed_at"].isoformat(),
153
+ "title": None,
154
+ "role": None,
155
+ }
156
+ for r in rows
157
+ ]
158
+
159
+
160
+ async def _upsert_claim(
161
+ db_infra: DatabaseInfra,
162
+ *,
163
+ project_id: str,
164
+ workspace_id: str,
165
+ alias: str,
166
+ human_name: str,
167
+ bead_id: str,
168
+ ) -> None:
169
+ server_db = db_infra.get_manager("server")
170
+ await server_db.execute(
171
+ """
172
+ INSERT INTO {{tables.bead_claims}} (project_id, workspace_id, alias, human_name, bead_id, claimed_at)
173
+ VALUES ($1, $2, $3, $4, $5, $6)
174
+ ON CONFLICT (project_id, bead_id, workspace_id)
175
+ DO UPDATE SET alias = EXCLUDED.alias, human_name = EXCLUDED.human_name, claimed_at = EXCLUDED.claimed_at
176
+ """,
177
+ UUID(project_id),
178
+ UUID(workspace_id),
179
+ alias,
180
+ human_name,
181
+ bead_id,
182
+ _now(),
183
+ )
184
+
185
+
186
+ async def _delete_claim(
187
+ db_infra: DatabaseInfra,
188
+ *,
189
+ project_id: str,
190
+ workspace_id: str,
191
+ bead_id: str,
192
+ ) -> None:
193
+ server_db = db_infra.get_manager("server")
194
+ await server_db.execute(
195
+ """
196
+ DELETE FROM {{tables.bead_claims}}
197
+ WHERE project_id = $1 AND workspace_id = $2 AND bead_id = $3
198
+ """,
199
+ UUID(project_id),
200
+ UUID(workspace_id),
201
+ bead_id,
202
+ )
203
+
204
+
205
+ async def ensure_repo(
206
+ db: DatabaseInfra,
207
+ project_id: UUID,
208
+ origin_url: str,
209
+ ) -> UUID:
210
+ """Ensure a repo exists for the given project and origin.
211
+
212
+ Returns the repo_id (existing or newly created).
213
+ """
214
+ canonical_origin = canonicalize_git_url(origin_url)
215
+ repo_name = extract_repo_name(canonical_origin)
216
+
217
+ server_db = db.get_manager("server")
218
+ # Also clear deleted_at to undelete soft-deleted repos when re-registered
219
+ result = await server_db.fetch_one(
220
+ """
221
+ INSERT INTO {{tables.repos}} (project_id, origin_url, canonical_origin, name)
222
+ VALUES ($1, $2, $3, $4)
223
+ ON CONFLICT (project_id, canonical_origin)
224
+ DO UPDATE SET origin_url = EXCLUDED.origin_url, deleted_at = NULL
225
+ RETURNING id
226
+ """,
227
+ project_id,
228
+ origin_url,
229
+ canonical_origin,
230
+ repo_name,
231
+ )
232
+ return result["id"]
233
+
234
+
235
+ async def upsert_workspace(
236
+ db: DatabaseInfra,
237
+ workspace_id: str,
238
+ project_id: UUID,
239
+ repo_id: UUID,
240
+ alias: str,
241
+ human_name: str,
242
+ role: Optional[str] = None,
243
+ hostname: Optional[str] = None,
244
+ workspace_path: Optional[str] = None,
245
+ ) -> None:
246
+ """Upsert workspace into persistent workspaces table.
247
+
248
+ Creates or updates the workspace record. The workspace_id is the constant
249
+ identifier. project_id, repo_id, alias, hostname, and workspace_path are
250
+ immutable after creation (hostname/workspace_path can be set once if NULL).
251
+ Only human_name, role, current_branch, deleted_at, and last_seen_at can be updated.
252
+
253
+ last_seen_at is updated on every bdh command to track workspace activity.
254
+ """
255
+ server_db = db.get_manager("server")
256
+ await server_db.execute(
257
+ """
258
+ INSERT INTO {{tables.workspaces}} (workspace_id, project_id, repo_id, alias, human_name, role, hostname, workspace_path, last_seen_at)
259
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW())
260
+ ON CONFLICT (workspace_id) DO UPDATE SET
261
+ human_name = EXCLUDED.human_name,
262
+ role = COALESCE(EXCLUDED.role, {{tables.workspaces}}.role),
263
+ hostname = COALESCE({{tables.workspaces}}.hostname, EXCLUDED.hostname),
264
+ workspace_path = COALESCE({{tables.workspaces}}.workspace_path, EXCLUDED.workspace_path),
265
+ last_seen_at = NOW(),
266
+ updated_at = NOW()
267
+ """,
268
+ workspace_id,
269
+ project_id,
270
+ repo_id,
271
+ alias,
272
+ human_name,
273
+ role,
274
+ hostname,
275
+ workspace_path,
276
+ )
277
+
278
+
279
+ async def check_alias_collision(
280
+ db: DatabaseInfra,
281
+ redis: Redis,
282
+ project_id: UUID,
283
+ workspace_id: str,
284
+ alias: str,
285
+ ) -> Optional[str]:
286
+ """Check if alias is already used by another workspace within the same project.
287
+
288
+ Aliases are unique per project (not globally). Projects are tenant boundaries
289
+ with no cross-project communication, so per-project uniqueness is sufficient.
290
+
291
+ Returns:
292
+ The workspace_id using this alias if collision, None if available.
293
+ """
294
+ server_db = db.get_manager("server")
295
+
296
+ # Check workspaces table first (authoritative source)
297
+ row = await server_db.fetch_one(
298
+ """
299
+ SELECT workspace_id
300
+ FROM {{tables.workspaces}}
301
+ WHERE project_id = $1 AND alias = $2 AND workspace_id != $3 AND deleted_at IS NULL
302
+ LIMIT 1
303
+ """,
304
+ project_id,
305
+ alias,
306
+ UUID(workspace_id),
307
+ )
308
+ if row:
309
+ return str(row["workspace_id"])
310
+
311
+ # Also check bead_claims for another workspace with same alias
312
+ # (handles race conditions before workspace is persisted)
313
+ row = await server_db.fetch_one(
314
+ """
315
+ SELECT DISTINCT workspace_id
316
+ FROM {{tables.bead_claims}}
317
+ WHERE project_id = $1 AND alias = $2 AND workspace_id != $3
318
+ LIMIT 1
319
+ """,
320
+ project_id,
321
+ alias,
322
+ UUID(workspace_id),
323
+ )
324
+ if row:
325
+ return str(row["workspace_id"])
326
+
327
+ # Check Redis presence for another workspace with same alias
328
+ # Uses O(1) secondary index instead of SCAN
329
+ colliding_workspace = await get_workspace_id_by_alias(redis, str(project_id), alias)
330
+ if colliding_workspace and colliding_workspace != workspace_id:
331
+ return colliding_workspace
332
+
333
+ return None
334
+
335
+
336
+ class CommandRequest(BaseModel):
337
+ model_config = ConfigDict(extra="forbid")
338
+
339
+ workspace_id: str = Field(..., min_length=1)
340
+ repo_id: str | None = None
341
+ alias: str = Field(..., min_length=1, max_length=64)
342
+ human_name: str = Field(..., min_length=0, max_length=64)
343
+ repo_origin: str = Field(..., min_length=1, max_length=2048)
344
+ role: str | None = Field(default=None, max_length=50)
345
+ command_line: str = Field(..., min_length=1, max_length=8192)
346
+
347
+ @field_validator("workspace_id")
348
+ @classmethod
349
+ def _validate_workspace_id(cls, v: str) -> str:
350
+ return validate_workspace_id(v)
351
+
352
+ @field_validator("alias")
353
+ @classmethod
354
+ def _validate_alias(cls, v: str) -> str:
355
+ if not is_valid_alias(v):
356
+ raise ValueError("Invalid alias format")
357
+ return v
358
+
359
+ @field_validator("human_name")
360
+ @classmethod
361
+ def _validate_human_name(cls, v: str) -> str:
362
+ v = (v or "").strip()
363
+ if v and not is_valid_human_name(v):
364
+ raise ValueError("Invalid human_name format")
365
+ return v
366
+
367
+
368
+ class CommandContext(BaseModel):
369
+ messages_waiting: int = 0
370
+ beads_in_progress: list[dict[str, Any]] = Field(default_factory=list)
371
+
372
+
373
+ class CommandResponse(BaseModel):
374
+ approved: bool
375
+ reason: str = ""
376
+ context: CommandContext | None = None
377
+
378
+
379
+ @router.post("/command", response_model=CommandResponse)
380
+ async def command(
381
+ request: Request,
382
+ payload: CommandRequest,
383
+ db_infra: DatabaseInfra = Depends(get_db_infra),
384
+ ) -> CommandResponse:
385
+ identity = await get_identity_from_auth(request, db_infra)
386
+ project_id = identity.project_id
387
+
388
+ # If auth provides an agent identity, it must match the claimed workspace_id.
389
+ if identity.agent_id is not None and identity.agent_id != payload.workspace_id:
390
+ raise HTTPException(status_code=403, detail="workspace_id does not match API key identity")
391
+
392
+ # Ensure workspace exists, belongs to project, and is not deleted (410).
393
+ await _ensure_workspace_alive_or_410(
394
+ db_infra, project_id=project_id, workspace_id=payload.workspace_id
395
+ )
396
+
397
+ await _touch_workspace_last_seen(
398
+ db_infra,
399
+ project_id=project_id,
400
+ workspace_id=payload.workspace_id,
401
+ human_name=payload.human_name or "",
402
+ role=payload.role,
403
+ )
404
+
405
+ beads_in_progress = await _list_beads_in_progress(db_infra, project_id=project_id)
406
+ return CommandResponse(
407
+ approved=True,
408
+ context=CommandContext(messages_waiting=0, beads_in_progress=beads_in_progress),
409
+ )
410
+
411
+
412
+ class SyncStats(BaseModel):
413
+ received: int = 0
414
+ inserted: int = 0
415
+ updated: int = 0
416
+ deleted: int = 0
417
+
418
+
419
+ class SyncRequest(BaseModel):
420
+ model_config = ConfigDict(extra="forbid")
421
+
422
+ workspace_id: str = Field(..., min_length=1)
423
+ repo_id: str | None = None
424
+ alias: str = Field(..., min_length=1, max_length=64)
425
+ human_name: str = Field(..., min_length=0, max_length=64)
426
+ repo_origin: str = Field(..., min_length=1, max_length=2048)
427
+ role: str | None = Field(default=None, max_length=50)
428
+
429
+ # Full sync mode
430
+ issues_jsonl: str | None = Field(default=None, max_length=MAX_JSONL_SIZE)
431
+
432
+ # Incremental sync mode
433
+ sync_mode: str | None = Field(default=None, max_length=32)
434
+ changed_issues: str | None = Field(default=None, max_length=MAX_JSONL_SIZE)
435
+ deleted_ids: list[str] = Field(default_factory=list)
436
+ sync_protocol_version: int | None = None
437
+
438
+ # Command context for claim attribution (best-effort)
439
+ command_line: str | None = Field(default=None, max_length=8192)
440
+
441
+ @field_validator("workspace_id")
442
+ @classmethod
443
+ def _validate_workspace_id(cls, v: str) -> str:
444
+ return validate_workspace_id(v)
445
+
446
+ @field_validator("alias")
447
+ @classmethod
448
+ def _validate_alias(cls, v: str) -> str:
449
+ if not is_valid_alias(v):
450
+ raise ValueError("Invalid alias format")
451
+ return v
452
+
453
+ @field_validator("human_name")
454
+ @classmethod
455
+ def _validate_human_name(cls, v: str) -> str:
456
+ v = (v or "").strip()
457
+ if v and not is_valid_human_name(v):
458
+ raise ValueError("Invalid human_name format")
459
+ return v
460
+
461
+
462
+ class SyncResponse(BaseModel):
463
+ synced: bool = True
464
+ issues_count: int = 0
465
+ context: CommandContext | None = None
466
+ stats: SyncStats | None = None
467
+ sync_protocol_version: int = 1
468
+
469
+
470
+ @router.post("/sync", response_model=SyncResponse)
471
+ async def sync(
472
+ request: Request,
473
+ payload: SyncRequest,
474
+ db_infra: DatabaseInfra = Depends(get_db_infra),
475
+ redis: Redis = Depends(get_redis),
476
+ ) -> SyncResponse:
477
+ identity = await get_identity_from_auth(request, db_infra)
478
+ project_id = identity.project_id
479
+
480
+ if identity.agent_id is not None and identity.agent_id != payload.workspace_id:
481
+ raise HTTPException(status_code=403, detail="workspace_id does not match API key identity")
482
+
483
+ await _ensure_workspace_alive_or_410(
484
+ db_infra, project_id=project_id, workspace_id=payload.workspace_id
485
+ )
486
+
487
+ # Touch workspace activity for "gone workspace" and status displays.
488
+ await _touch_workspace_last_seen(
489
+ db_infra,
490
+ project_id=project_id,
491
+ workspace_id=payload.workspace_id,
492
+ human_name=payload.human_name or "",
493
+ role=payload.role,
494
+ )
495
+
496
+ canonical_origin = canonicalize_git_url(payload.repo_origin)
497
+ if not is_valid_canonical_origin(canonical_origin):
498
+ raise HTTPException(status_code=422, detail="Invalid repo_origin")
499
+
500
+ beads_db = db_infra.get_manager("beads")
501
+
502
+ mode = (payload.sync_mode or "full").strip().lower()
503
+ if mode not in ("full", "incremental"):
504
+ raise HTTPException(status_code=422, detail="sync_mode must be 'full' or 'incremental'")
505
+
506
+ received = 0
507
+ inserted = 0
508
+ updated = 0
509
+ deleted = 0
510
+
511
+ result: Optional[BeadsSyncResult] = None
512
+ if mode == "full":
513
+ body = (payload.issues_jsonl or "").strip()
514
+ if not body:
515
+ raise HTTPException(status_code=422, detail="issues_jsonl is required for full sync")
516
+ try:
517
+ issues_raw = parse_jsonl(body, max_depth=MAX_JSON_DEPTH, max_count=MAX_ISSUES_COUNT)
518
+ except JSONLParseError as e:
519
+ raise HTTPException(status_code=400, detail=str(e)) from e
520
+ issues = validate_issues_from_list(issues_raw)
521
+ received = len(issues)
522
+ result = await _sync_issues_to_db(
523
+ issues,
524
+ beads_db,
525
+ project_id=project_id,
526
+ repo=canonical_origin,
527
+ branch=DEFAULT_BRANCH,
528
+ )
529
+ inserted = result.issues_added
530
+ updated = result.issues_updated
531
+ else:
532
+ if (
533
+ payload.changed_issues is None or payload.changed_issues.strip() == ""
534
+ ) and not payload.deleted_ids:
535
+ raise HTTPException(
536
+ status_code=422, detail="incremental sync requires changes or deletions"
537
+ )
538
+
539
+ if payload.changed_issues is not None and payload.changed_issues.strip():
540
+ try:
541
+ issues_raw = parse_jsonl(
542
+ payload.changed_issues, max_depth=MAX_JSON_DEPTH, max_count=MAX_ISSUES_COUNT
543
+ )
544
+ except JSONLParseError as e:
545
+ raise HTTPException(status_code=400, detail=str(e)) from e
546
+ issues = validate_issues_from_list(issues_raw)
547
+ received = len(issues)
548
+ result = await _sync_issues_to_db(
549
+ issues,
550
+ beads_db,
551
+ project_id=project_id,
552
+ repo=canonical_origin,
553
+ branch=DEFAULT_BRANCH,
554
+ )
555
+ inserted = result.issues_added
556
+ updated = result.issues_updated
557
+
558
+ if payload.deleted_ids:
559
+ deleted = await delete_issues_by_id(
560
+ beads_db,
561
+ project_id=project_id,
562
+ bead_ids=payload.deleted_ids,
563
+ repo=canonical_origin,
564
+ branch=DEFAULT_BRANCH,
565
+ )
566
+
567
+ # Update claims based on the bd command that succeeded (best-effort).
568
+ cmd, bead_id, status = _parse_command_line(payload.command_line or "")
569
+ if bead_id:
570
+ if cmd == "update" and status == "in_progress":
571
+ await _upsert_claim(
572
+ db_infra,
573
+ project_id=project_id,
574
+ workspace_id=payload.workspace_id,
575
+ alias=payload.alias,
576
+ human_name=payload.human_name or "",
577
+ bead_id=bead_id,
578
+ )
579
+ elif cmd in ("close", "delete") or (cmd == "update" and status and status != "in_progress"):
580
+ await _delete_claim(
581
+ db_infra,
582
+ project_id=project_id,
583
+ workspace_id=payload.workspace_id,
584
+ bead_id=bead_id,
585
+ )
586
+
587
+ if payload.deleted_ids:
588
+ # Ensure deletions also remove claims for this workspace.
589
+ for bid in payload.deleted_ids:
590
+ await _delete_claim(
591
+ db_infra,
592
+ project_id=project_id,
593
+ workspace_id=payload.workspace_id,
594
+ bead_id=bid,
595
+ )
596
+
597
+ # Record notification intents in outbox, then process them.
598
+ notifications_sent = 0
599
+ notifications_failed = 0
600
+ if result is not None and result.status_changes:
601
+ await record_notification_intents(result.status_changes, project_id, db_infra)
602
+ sender = await resolve_aweb_identity(request, db_infra)
603
+ notifications_sent, notifications_failed = await process_notification_outbox(
604
+ project_id,
605
+ db_infra,
606
+ sender_agent_id=sender.agent_id,
607
+ sender_alias=sender.alias,
608
+ )
609
+
610
+ # Update audit log (best-effort; don't fail the sync on logging issues).
611
+ try:
612
+ server_db = db_infra.get_manager("server")
613
+ await server_db.execute(
614
+ """
615
+ INSERT INTO {{tables.audit_log}} (project_id, event_type, details)
616
+ VALUES ($1, $2, $3::jsonb)
617
+ """,
618
+ UUID(project_id),
619
+ "bdh_sync",
620
+ json.dumps(
621
+ {
622
+ "project_id": project_id,
623
+ "repo": canonical_origin,
624
+ "mode": mode,
625
+ "received": received,
626
+ "inserted": inserted,
627
+ "updated": updated,
628
+ "deleted": deleted,
629
+ "notifications_sent": notifications_sent,
630
+ "notifications_failed": notifications_failed,
631
+ }
632
+ ),
633
+ )
634
+ except Exception:
635
+ logger.exception("Failed to write audit log for bdh sync")
636
+
637
+ # Count total issues for this (project, repo, branch) after sync.
638
+ count_row = await beads_db.fetch_one(
639
+ """
640
+ SELECT COUNT(*) AS c
641
+ FROM {{tables.beads_issues}}
642
+ WHERE project_id = $1 AND repo = $2 AND branch = $3
643
+ """,
644
+ UUID(project_id),
645
+ canonical_origin,
646
+ DEFAULT_BRANCH,
647
+ )
648
+ issues_count = int(count_row["c"]) if count_row else 0
649
+
650
+ return SyncResponse(
651
+ synced=True,
652
+ issues_count=issues_count,
653
+ stats=SyncStats(received=received, inserted=inserted, updated=updated, deleted=deleted),
654
+ sync_protocol_version=1,
655
+ )