draft-board 0.1.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/app/backend/.env.example +9 -0
- package/app/backend/.smartkanban/evidence/8b383839-cbec-45af-86ee-c7708d075cbe/bddf2ed5-2e21-4d46-a62b-10b87f1642a6_patch.txt +195 -0
- package/app/backend/.smartkanban/evidence/8b383839-cbec-45af-86ee-c7708d075cbe/bddf2ed5-2e21-4d46-a62b-10b87f1642a6_stat.txt +6 -0
- package/app/backend/CURL_EXAMPLES.md +335 -0
- package/app/backend/ENV_SETUP.md +65 -0
- package/app/backend/alembic/env.py +71 -0
- package/app/backend/alembic/script.py.mako +28 -0
- package/app/backend/alembic/versions/001_initial_schema.py +104 -0
- package/app/backend/alembic/versions/002_add_jobs_table.py +52 -0
- package/app/backend/alembic/versions/003_add_workspace_table.py +48 -0
- package/app/backend/alembic/versions/004_add_evidence_table.py +56 -0
- package/app/backend/alembic/versions/005_add_verification_commands.py +32 -0
- package/app/backend/alembic/versions/006_add_planner_lock_table.py +39 -0
- package/app/backend/alembic/versions/007_add_revision_review_tables.py +126 -0
- package/app/backend/alembic/versions/008_add_revision_idempotency_and_traceability.py +52 -0
- package/app/backend/alembic/versions/009_add_job_health_fields.py +46 -0
- package/app/backend/alembic/versions/010_add_review_comment_line_content.py +36 -0
- package/app/backend/alembic/versions/011_add_analysis_cache.py +47 -0
- package/app/backend/alembic/versions/012_add_boards_table.py +102 -0
- package/app/backend/alembic/versions/013_add_ticket_blocking.py +45 -0
- package/app/backend/alembic/versions/014_add_agent_sessions.py +220 -0
- package/app/backend/alembic/versions/015_add_ticket_sort_order.py +33 -0
- package/app/backend/alembic/versions/03220f0b93ae_add_pr_fields_to_ticket.py +49 -0
- package/app/backend/alembic/versions/0c2d89fff3b1_seed_board_configs_from_yaml.py +206 -0
- package/app/backend/alembic/versions/3348e5cf54c1_add_merge_checklist_table.py +67 -0
- package/app/backend/alembic/versions/357c780ee445_add_goal_status.py +34 -0
- package/app/backend/alembic/versions/553340b7e26c_add_autonomy_fields_to_goal.py +65 -0
- package/app/backend/alembic/versions/774dc335c679_merge_migration_heads.py +23 -0
- package/app/backend/alembic/versions/7b307e847cbd_merge_heads.py +23 -0
- package/app/backend/alembic/versions/82ecd978cc70_add_missing_indexes.py +48 -0
- package/app/backend/alembic/versions/8ef5054dc280_add_normalized_log_entries.py +173 -0
- package/app/backend/alembic/versions/8f3e2bd8ea3b_merge_migration_heads.py +23 -0
- package/app/backend/alembic/versions/9d17f0698d3b_add_config_column_to_boards_table.py +30 -0
- package/app/backend/alembic/versions/add_agent_conversation_history.py +72 -0
- package/app/backend/alembic/versions/add_job_variant.py +34 -0
- package/app/backend/alembic/versions/add_performance_indexes.py +95 -0
- package/app/backend/alembic/versions/add_repos_and_board_repos.py +174 -0
- package/app/backend/alembic/versions/add_session_id_to_jobs.py +27 -0
- package/app/backend/alembic/versions/add_sqlite_backend_tables.py +104 -0
- package/app/backend/alembic/versions/b10fb0b62240_add_diff_content_to_revisions.py +34 -0
- package/app/backend/alembic.ini +89 -0
- package/app/backend/app/__init__.py +3 -0
- package/app/backend/app/data_dir.py +85 -0
- package/app/backend/app/database.py +70 -0
- package/app/backend/app/database_sync.py +64 -0
- package/app/backend/app/dependencies/__init__.py +5 -0
- package/app/backend/app/dependencies/auth.py +80 -0
- package/app/backend/app/dependencies.py +43 -0
- package/app/backend/app/exceptions.py +178 -0
- package/app/backend/app/executors/__init__.py +1 -0
- package/app/backend/app/executors/adapters/__init__.py +1 -0
- package/app/backend/app/executors/adapters/aider.py +152 -0
- package/app/backend/app/executors/adapters/amazon_q.py +103 -0
- package/app/backend/app/executors/adapters/amp.py +123 -0
- package/app/backend/app/executors/adapters/claude.py +177 -0
- package/app/backend/app/executors/adapters/cline.py +127 -0
- package/app/backend/app/executors/adapters/codex.py +167 -0
- package/app/backend/app/executors/adapters/copilot.py +202 -0
- package/app/backend/app/executors/adapters/cursor.py +87 -0
- package/app/backend/app/executors/adapters/droid.py +123 -0
- package/app/backend/app/executors/adapters/gemini.py +132 -0
- package/app/backend/app/executors/adapters/goose.py +131 -0
- package/app/backend/app/executors/adapters/opencode.py +123 -0
- package/app/backend/app/executors/adapters/qwen.py +123 -0
- package/app/backend/app/executors/plugins/__init__.py +1 -0
- package/app/backend/app/executors/registry.py +202 -0
- package/app/backend/app/executors/spec.py +226 -0
- package/app/backend/app/main.py +486 -0
- package/app/backend/app/middleware/__init__.py +13 -0
- package/app/backend/app/middleware/idempotency.py +426 -0
- package/app/backend/app/middleware/rate_limit.py +312 -0
- package/app/backend/app/middleware/security_headers.py +43 -0
- package/app/backend/app/middleware/timeout.py +37 -0
- package/app/backend/app/models/__init__.py +56 -0
- package/app/backend/app/models/agent_conversation_history.py +56 -0
- package/app/backend/app/models/agent_session.py +127 -0
- package/app/backend/app/models/analysis_cache.py +49 -0
- package/app/backend/app/models/base.py +9 -0
- package/app/backend/app/models/board.py +79 -0
- package/app/backend/app/models/board_repo.py +68 -0
- package/app/backend/app/models/cost_budget.py +42 -0
- package/app/backend/app/models/enums.py +40 -0
- package/app/backend/app/models/evidence.py +132 -0
- package/app/backend/app/models/goal.py +102 -0
- package/app/backend/app/models/idempotency_entry.py +30 -0
- package/app/backend/app/models/job.py +163 -0
- package/app/backend/app/models/job_queue.py +39 -0
- package/app/backend/app/models/kv_store.py +28 -0
- package/app/backend/app/models/merge_checklist.py +87 -0
- package/app/backend/app/models/normalized_log.py +100 -0
- package/app/backend/app/models/planner_lock.py +43 -0
- package/app/backend/app/models/rate_limit_entry.py +25 -0
- package/app/backend/app/models/repo.py +66 -0
- package/app/backend/app/models/review_comment.py +91 -0
- package/app/backend/app/models/review_summary.py +69 -0
- package/app/backend/app/models/revision.py +130 -0
- package/app/backend/app/models/ticket.py +223 -0
- package/app/backend/app/models/ticket_event.py +83 -0
- package/app/backend/app/models/user.py +47 -0
- package/app/backend/app/models/workspace.py +71 -0
- package/app/backend/app/redis_client.py +119 -0
- package/app/backend/app/routers/__init__.py +29 -0
- package/app/backend/app/routers/agents.py +296 -0
- package/app/backend/app/routers/auth.py +94 -0
- package/app/backend/app/routers/board.py +885 -0
- package/app/backend/app/routers/dashboard.py +351 -0
- package/app/backend/app/routers/debug.py +528 -0
- package/app/backend/app/routers/evidence.py +96 -0
- package/app/backend/app/routers/executors.py +324 -0
- package/app/backend/app/routers/goals.py +574 -0
- package/app/backend/app/routers/jobs.py +448 -0
- package/app/backend/app/routers/maintenance.py +172 -0
- package/app/backend/app/routers/merge.py +360 -0
- package/app/backend/app/routers/planner.py +537 -0
- package/app/backend/app/routers/pull_requests.py +382 -0
- package/app/backend/app/routers/repos.py +263 -0
- package/app/backend/app/routers/revisions.py +939 -0
- package/app/backend/app/routers/settings.py +267 -0
- package/app/backend/app/routers/tickets.py +2003 -0
- package/app/backend/app/routers/webhooks.py +143 -0
- package/app/backend/app/routers/websocket.py +249 -0
- package/app/backend/app/schemas/__init__.py +109 -0
- package/app/backend/app/schemas/board.py +87 -0
- package/app/backend/app/schemas/common.py +33 -0
- package/app/backend/app/schemas/evidence.py +87 -0
- package/app/backend/app/schemas/goal.py +90 -0
- package/app/backend/app/schemas/job.py +97 -0
- package/app/backend/app/schemas/merge.py +139 -0
- package/app/backend/app/schemas/planner.py +500 -0
- package/app/backend/app/schemas/repo.py +187 -0
- package/app/backend/app/schemas/review.py +137 -0
- package/app/backend/app/schemas/revision.py +114 -0
- package/app/backend/app/schemas/ticket.py +238 -0
- package/app/backend/app/schemas/ticket_event.py +72 -0
- package/app/backend/app/schemas/workspace.py +19 -0
- package/app/backend/app/services/__init__.py +31 -0
- package/app/backend/app/services/agent_memory_service.py +223 -0
- package/app/backend/app/services/agent_registry.py +346 -0
- package/app/backend/app/services/agent_session_manager.py +318 -0
- package/app/backend/app/services/agent_session_service.py +219 -0
- package/app/backend/app/services/agent_tools.py +379 -0
- package/app/backend/app/services/auth_service.py +98 -0
- package/app/backend/app/services/autonomy_service.py +380 -0
- package/app/backend/app/services/board_repo_service.py +201 -0
- package/app/backend/app/services/board_service.py +326 -0
- package/app/backend/app/services/cleanup_service.py +1085 -0
- package/app/backend/app/services/config_service.py +908 -0
- package/app/backend/app/services/context_gatherer.py +557 -0
- package/app/backend/app/services/cost_tracking_service.py +293 -0
- package/app/backend/app/services/cursor_log_normalizer.py +536 -0
- package/app/backend/app/services/delivery_pipeline.py +440 -0
- package/app/backend/app/services/executor_service.py +634 -0
- package/app/backend/app/services/git_host/__init__.py +11 -0
- package/app/backend/app/services/git_host/factory.py +87 -0
- package/app/backend/app/services/git_host/github.py +270 -0
- package/app/backend/app/services/git_host/gitlab.py +194 -0
- package/app/backend/app/services/git_host/protocol.py +75 -0
- package/app/backend/app/services/git_merge_simple.py +346 -0
- package/app/backend/app/services/git_ops.py +384 -0
- package/app/backend/app/services/github_service.py +233 -0
- package/app/backend/app/services/goal_service.py +113 -0
- package/app/backend/app/services/job_service.py +423 -0
- package/app/backend/app/services/job_watchdog_service.py +424 -0
- package/app/backend/app/services/langchain_adapter.py +122 -0
- package/app/backend/app/services/llm_provider_clients.py +351 -0
- package/app/backend/app/services/llm_service.py +285 -0
- package/app/backend/app/services/log_normalizer.py +342 -0
- package/app/backend/app/services/log_stream_service.py +276 -0
- package/app/backend/app/services/merge_checklist_service.py +264 -0
- package/app/backend/app/services/merge_service.py +784 -0
- package/app/backend/app/services/orchestrator_log.py +84 -0
- package/app/backend/app/services/planner_service.py +1662 -0
- package/app/backend/app/services/planner_tick_sync.py +1040 -0
- package/app/backend/app/services/queued_message_service.py +156 -0
- package/app/backend/app/services/reliability_wrapper.py +389 -0
- package/app/backend/app/services/repo_discovery_service.py +318 -0
- package/app/backend/app/services/review_service.py +334 -0
- package/app/backend/app/services/revision_service.py +389 -0
- package/app/backend/app/services/safe_autopilot.py +510 -0
- package/app/backend/app/services/sqlite_worker.py +372 -0
- package/app/backend/app/services/task_dispatch.py +135 -0
- package/app/backend/app/services/ticket_generation_service.py +1781 -0
- package/app/backend/app/services/ticket_service.py +486 -0
- package/app/backend/app/services/udar_planner_service.py +1007 -0
- package/app/backend/app/services/webhook_service.py +126 -0
- package/app/backend/app/services/workspace_service.py +465 -0
- package/app/backend/app/services/worktree_file_service.py +92 -0
- package/app/backend/app/services/worktree_validator.py +213 -0
- package/app/backend/app/sqlite_kv.py +278 -0
- package/app/backend/app/state_machine.py +128 -0
- package/app/backend/app/templates/__init__.py +5 -0
- package/app/backend/app/templates/registry.py +243 -0
- package/app/backend/app/utils/__init__.py +5 -0
- package/app/backend/app/utils/artifact_reader.py +87 -0
- package/app/backend/app/utils/circuit_breaker.py +229 -0
- package/app/backend/app/utils/db_retry.py +136 -0
- package/app/backend/app/utils/ignored_fields.py +123 -0
- package/app/backend/app/utils/validators.py +54 -0
- package/app/backend/app/websocket/__init__.py +5 -0
- package/app/backend/app/websocket/manager.py +179 -0
- package/app/backend/app/websocket/state_tracker.py +113 -0
- package/app/backend/app/worker.py +3190 -0
- package/app/backend/calculator_tickets.json +40 -0
- package/app/backend/canary_tests.sh +591 -0
- package/app/backend/celerybeat-schedule +0 -0
- package/app/backend/celerybeat-schedule-shm +0 -0
- package/app/backend/celerybeat-schedule-wal +0 -0
- package/app/backend/logs/.gitkeep +3 -0
- package/app/backend/multiplication_division_implementation_tickets.json +55 -0
- package/app/backend/multiplication_division_tickets.json +42 -0
- package/app/backend/pyproject.toml +45 -0
- package/app/backend/requirements-dev.txt +8 -0
- package/app/backend/requirements.txt +20 -0
- package/app/backend/run.sh +30 -0
- package/app/backend/run_with_logs.sh +10 -0
- package/app/backend/scientific_calculator_tickets.json +40 -0
- package/app/backend/scripts/extract_openapi.py +21 -0
- package/app/backend/scripts/seed_demo.py +187 -0
- package/app/backend/setup_demo_review.py +302 -0
- package/app/backend/test_actual_parse.py +41 -0
- package/app/backend/test_agent_streaming.py +61 -0
- package/app/backend/test_parse.py +51 -0
- package/app/backend/test_streaming.py +51 -0
- package/app/backend/test_subprocess_streaming.py +50 -0
- package/app/backend/tests/__init__.py +1 -0
- package/app/backend/tests/conftest.py +46 -0
- package/app/backend/tests/test_auth.py +341 -0
- package/app/backend/tests/test_autonomy_service.py +391 -0
- package/app/backend/tests/test_cleanup_service_safety.py +417 -0
- package/app/backend/tests/test_middleware.py +279 -0
- package/app/backend/tests/test_planner_providers.py +290 -0
- package/app/backend/tests/test_planner_unblock.py +183 -0
- package/app/backend/tests/test_revision_invariants.py +618 -0
- package/app/backend/tests/test_sqlite_kv.py +290 -0
- package/app/backend/tests/test_sqlite_worker.py +353 -0
- package/app/backend/tests/test_task_dispatch.py +100 -0
- package/app/backend/tests/test_ticket_validation.py +304 -0
- package/app/backend/tests/test_udar_agent.py +693 -0
- package/app/backend/tests/test_webhook_service.py +184 -0
- package/app/backend/tickets_output.json +59 -0
- package/app/backend/user_management_tickets.json +50 -0
- package/app/backend/uvicorn.log +0 -0
- package/app/draft.yaml +313 -0
- package/app/frontend/dist/assets/index-LcjCczu5.js +155 -0
- package/app/frontend/dist/assets/index-_FP_279e.css +1 -0
- package/app/frontend/dist/index.html +14 -0
- package/app/frontend/dist/vite.svg +1 -0
- package/app/frontend/package.json +101 -0
- package/bin/cli.js +527 -0
- package/package.json +37 -0
|
@@ -0,0 +1,1085 @@
|
|
|
1
|
+
"""Service for cleaning up worktrees and evidence files."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import os
|
|
6
|
+
import shutil
|
|
7
|
+
import subprocess
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from datetime import UTC, datetime, timedelta
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
from sqlalchemy import select
|
|
13
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
14
|
+
from sqlalchemy.orm import selectinload
|
|
15
|
+
|
|
16
|
+
from app.data_dir import get_data_dir, get_worktrees_root
|
|
17
|
+
from app.models.enums import ActorType, EventType
|
|
18
|
+
from app.models.evidence import Evidence
|
|
19
|
+
from app.models.ticket import Ticket
|
|
20
|
+
from app.models.ticket_event import TicketEvent
|
|
21
|
+
from app.models.workspace import Workspace
|
|
22
|
+
from app.services.config_service import DraftConfig
|
|
23
|
+
from app.services.workspace_service import WorkspaceService
|
|
24
|
+
from app.state_machine import TicketState
|
|
25
|
+
|
|
26
|
+
# Event type constants - use enum values for consistency
|
|
27
|
+
MERGE_SUCCEEDED_EVENT = EventType.MERGE_SUCCEEDED.value
|
|
28
|
+
MERGE_REQUESTED_EVENT = EventType.MERGE_REQUESTED.value
|
|
29
|
+
|
|
30
|
+
logger = logging.getLogger(__name__)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
# Ticket states that should NOT have their worktrees deleted
|
|
34
|
+
PROTECTED_TICKET_STATES = {
|
|
35
|
+
TicketState.EXECUTING.value,
|
|
36
|
+
TicketState.VERIFYING.value,
|
|
37
|
+
TicketState.NEEDS_HUMAN.value,
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class CleanupResult:
|
|
43
|
+
"""Result of a cleanup operation."""
|
|
44
|
+
|
|
45
|
+
worktrees_deleted: int = 0
|
|
46
|
+
worktrees_failed: int = 0
|
|
47
|
+
worktrees_skipped: int = 0
|
|
48
|
+
evidence_files_deleted: int = 0
|
|
49
|
+
evidence_files_failed: int = 0
|
|
50
|
+
bytes_freed: int = 0
|
|
51
|
+
details: list[str] = None
|
|
52
|
+
|
|
53
|
+
def __post_init__(self):
|
|
54
|
+
if self.details is None:
|
|
55
|
+
self.details = []
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _sanitize_output(text: str | None, max_length: int = 500) -> str | None:
|
|
59
|
+
"""Sanitize git output for safe JSON storage.
|
|
60
|
+
|
|
61
|
+
Removes null bytes, carriage returns, and control characters that could
|
|
62
|
+
break JSON/logging or cause odd rendering in UI.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
text: Raw output text (may contain control chars)
|
|
66
|
+
max_length: Maximum length to keep
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
Sanitized text or None if input was None
|
|
70
|
+
"""
|
|
71
|
+
if text is None:
|
|
72
|
+
return None
|
|
73
|
+
# Remove null bytes, carriage returns (\r), and most control characters
|
|
74
|
+
# Keep only newlines (\n) and tabs (\t) as whitespace
|
|
75
|
+
sanitized = "".join(
|
|
76
|
+
c
|
|
77
|
+
for c in text
|
|
78
|
+
if c == "\n" or c == "\t" or (ord(c) >= 32 and ord(c) != 127)
|
|
79
|
+
# Note: \r (ord 13) is excluded since it's < 32 and not \n or \t
|
|
80
|
+
)
|
|
81
|
+
return sanitized[:max_length] if len(sanitized) > max_length else sanitized
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class CleanupService:
|
|
85
|
+
"""Service for cleaning up worktrees and evidence files.
|
|
86
|
+
|
|
87
|
+
Safety:
|
|
88
|
+
- Only deletes paths under .draft/
|
|
89
|
+
- Uses `git worktree remove` + `git worktree prune` (not shutil)
|
|
90
|
+
- Never deletes worktrees for tickets in executing/verifying/needs_human
|
|
91
|
+
- Validates paths before deletion
|
|
92
|
+
- Creates audit events for deletions
|
|
93
|
+
- Verifies branch is actually merged via git before deletion
|
|
94
|
+
- Hard guard: refuses if worktree path equals main repo path
|
|
95
|
+
"""
|
|
96
|
+
|
|
97
|
+
LEGACY_DRAFT_DIR = ".draft" # Legacy
|
|
98
|
+
WORKTREES_DIR = ".draft/worktrees" # Legacy
|
|
99
|
+
EVIDENCE_DIR = ".draft/evidence" # Legacy
|
|
100
|
+
|
|
101
|
+
def __init__(self, db: AsyncSession):
|
|
102
|
+
self.db = db
|
|
103
|
+
|
|
104
|
+
def _detect_default_branch(self, repo_path: Path) -> str:
|
|
105
|
+
"""Detect the default branch of the repository.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
repo_path: Path to the repository
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
Name of the default branch (main, master, etc.)
|
|
112
|
+
"""
|
|
113
|
+
# Try origin/HEAD first
|
|
114
|
+
try:
|
|
115
|
+
result = subprocess.run(
|
|
116
|
+
["git", "symbolic-ref", "refs/remotes/origin/HEAD"],
|
|
117
|
+
cwd=repo_path,
|
|
118
|
+
capture_output=True,
|
|
119
|
+
text=True,
|
|
120
|
+
timeout=10,
|
|
121
|
+
)
|
|
122
|
+
if result.returncode == 0:
|
|
123
|
+
return result.stdout.strip().split("/")[-1]
|
|
124
|
+
except Exception:
|
|
125
|
+
pass
|
|
126
|
+
|
|
127
|
+
# Check if 'main' exists locally
|
|
128
|
+
result = subprocess.run(
|
|
129
|
+
["git", "rev-parse", "--verify", "refs/heads/main"],
|
|
130
|
+
cwd=repo_path,
|
|
131
|
+
capture_output=True,
|
|
132
|
+
timeout=10,
|
|
133
|
+
)
|
|
134
|
+
if result.returncode == 0:
|
|
135
|
+
return "main"
|
|
136
|
+
|
|
137
|
+
return "master"
|
|
138
|
+
|
|
139
|
+
def _ref_exists(self, ref: str, repo_path: Path) -> bool:
|
|
140
|
+
"""Check if a git ref exists.
|
|
141
|
+
|
|
142
|
+
Args:
|
|
143
|
+
ref: Full ref path (e.g., refs/heads/main)
|
|
144
|
+
repo_path: Path to the repository
|
|
145
|
+
|
|
146
|
+
Returns:
|
|
147
|
+
True if ref exists
|
|
148
|
+
"""
|
|
149
|
+
try:
|
|
150
|
+
result = subprocess.run(
|
|
151
|
+
["git", "rev-parse", "--verify", ref],
|
|
152
|
+
cwd=repo_path,
|
|
153
|
+
capture_output=True,
|
|
154
|
+
timeout=10,
|
|
155
|
+
)
|
|
156
|
+
return result.returncode == 0
|
|
157
|
+
except Exception:
|
|
158
|
+
return False
|
|
159
|
+
|
|
160
|
+
def _is_branch_ancestor_of(
|
|
161
|
+
self, branch_name: str, target_branch: str, repo_path: Path
|
|
162
|
+
) -> tuple[bool, str]:
|
|
163
|
+
"""Check if branch is an ancestor of target branch using git merge-base.
|
|
164
|
+
|
|
165
|
+
Uses explicit refs (refs/heads/<branch>) to avoid ambiguity with tags,
|
|
166
|
+
remote refs, or detached HEAD states.
|
|
167
|
+
|
|
168
|
+
Args:
|
|
169
|
+
branch_name: The branch to check (e.g., feature branch)
|
|
170
|
+
target_branch: The branch to check against (e.g., main)
|
|
171
|
+
repo_path: Path to the repository
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
Tuple of (is_ancestor, reason_if_not)
|
|
175
|
+
"""
|
|
176
|
+
feature_ref = f"refs/heads/{branch_name}"
|
|
177
|
+
target_ref = f"refs/heads/{target_branch}"
|
|
178
|
+
|
|
179
|
+
# Verify both refs exist before checking ancestry
|
|
180
|
+
if not self._ref_exists(feature_ref, repo_path):
|
|
181
|
+
return False, f"Branch ref {feature_ref} does not exist"
|
|
182
|
+
|
|
183
|
+
if not self._ref_exists(target_ref, repo_path):
|
|
184
|
+
return False, f"Target branch ref {target_ref} does not exist"
|
|
185
|
+
|
|
186
|
+
try:
|
|
187
|
+
result = subprocess.run(
|
|
188
|
+
["git", "merge-base", "--is-ancestor", feature_ref, target_ref],
|
|
189
|
+
cwd=repo_path,
|
|
190
|
+
capture_output=True,
|
|
191
|
+
timeout=30,
|
|
192
|
+
)
|
|
193
|
+
if result.returncode == 0:
|
|
194
|
+
return True, ""
|
|
195
|
+
else:
|
|
196
|
+
return False, f"{feature_ref} is not an ancestor of {target_ref}"
|
|
197
|
+
except Exception as e:
|
|
198
|
+
return False, f"merge-base check failed: {e}"
|
|
199
|
+
|
|
200
|
+
def _is_registered_worktree(self, path: Path, repo_path: Path) -> bool:
|
|
201
|
+
"""Check if a path is registered as a git worktree.
|
|
202
|
+
|
|
203
|
+
Uses proper Path resolution and comparison (not string matching)
|
|
204
|
+
to handle trailing slashes, symlinks, and relative paths.
|
|
205
|
+
|
|
206
|
+
Safety: Only considers paths under .draft/worktrees/ as valid
|
|
207
|
+
worktrees for this check.
|
|
208
|
+
|
|
209
|
+
Args:
|
|
210
|
+
path: Path to check
|
|
211
|
+
repo_path: Repository root
|
|
212
|
+
|
|
213
|
+
Returns:
|
|
214
|
+
True if path is listed in `git worktree list`
|
|
215
|
+
"""
|
|
216
|
+
try:
|
|
217
|
+
# Resolve and normalize the path we're checking
|
|
218
|
+
check_path = path.resolve()
|
|
219
|
+
|
|
220
|
+
# Safety: Validate check_path is under central dir or legacy .draft/worktrees/
|
|
221
|
+
central_root = get_worktrees_root().resolve()
|
|
222
|
+
legacy_worktrees = (repo_path / self.WORKTREES_DIR).resolve()
|
|
223
|
+
in_valid_dir = False
|
|
224
|
+
try:
|
|
225
|
+
check_path.relative_to(central_root)
|
|
226
|
+
in_valid_dir = True
|
|
227
|
+
except ValueError:
|
|
228
|
+
pass
|
|
229
|
+
if not in_valid_dir:
|
|
230
|
+
try:
|
|
231
|
+
check_path.relative_to(legacy_worktrees)
|
|
232
|
+
in_valid_dir = True
|
|
233
|
+
except ValueError:
|
|
234
|
+
pass
|
|
235
|
+
if not in_valid_dir:
|
|
236
|
+
logger.warning(
|
|
237
|
+
f"Path {check_path} is not under {central_root} or {legacy_worktrees}, "
|
|
238
|
+
f"not checking worktree registration"
|
|
239
|
+
)
|
|
240
|
+
return False
|
|
241
|
+
|
|
242
|
+
result = subprocess.run(
|
|
243
|
+
["git", "worktree", "list", "--porcelain"],
|
|
244
|
+
cwd=repo_path,
|
|
245
|
+
capture_output=True,
|
|
246
|
+
text=True,
|
|
247
|
+
timeout=30,
|
|
248
|
+
)
|
|
249
|
+
if result.returncode != 0:
|
|
250
|
+
return True # Assume registered if we can't check (safer)
|
|
251
|
+
|
|
252
|
+
# Parse porcelain output - each worktree block starts with "worktree <path>"
|
|
253
|
+
for line in result.stdout.splitlines():
|
|
254
|
+
if line.startswith("worktree "):
|
|
255
|
+
# Extract path after "worktree " prefix
|
|
256
|
+
registered_path_str = line[9:].strip()
|
|
257
|
+
# Resolve to handle symlinks, trailing slashes, etc.
|
|
258
|
+
try:
|
|
259
|
+
registered_path = Path(registered_path_str).resolve()
|
|
260
|
+
if registered_path == check_path:
|
|
261
|
+
return True
|
|
262
|
+
except (OSError, ValueError):
|
|
263
|
+
# Invalid path in worktree list - skip
|
|
264
|
+
continue
|
|
265
|
+
return False
|
|
266
|
+
except Exception as e:
|
|
267
|
+
logger.warning(f"Failed to check worktree registration for {path}: {e}")
|
|
268
|
+
return True # Assume registered if check fails (safer)
|
|
269
|
+
|
|
270
|
+
async def delete_worktree(
|
|
271
|
+
self,
|
|
272
|
+
workspace: Workspace,
|
|
273
|
+
ticket_id: str,
|
|
274
|
+
actor_id: str = "cleanup_service",
|
|
275
|
+
force: bool = False,
|
|
276
|
+
delete_branch: bool = False,
|
|
277
|
+
) -> bool:
|
|
278
|
+
"""Delete a single worktree using git worktree remove.
|
|
279
|
+
|
|
280
|
+
Steps:
|
|
281
|
+
1. Validate path is under .draft/worktrees/
|
|
282
|
+
2. Run `git worktree remove --force <path>`
|
|
283
|
+
3. Run `git worktree prune` to clean up stale entries
|
|
284
|
+
4. Delete branch ONLY if merge succeeded or delete_branch=True
|
|
285
|
+
5. Mark workspace as cleaned up in DB
|
|
286
|
+
6. Create cleanup event
|
|
287
|
+
|
|
288
|
+
Args:
|
|
289
|
+
workspace: The workspace to delete
|
|
290
|
+
ticket_id: The ticket ID
|
|
291
|
+
actor_id: Actor ID for event
|
|
292
|
+
force: If True, skip ticket state check (use with caution)
|
|
293
|
+
delete_branch: If True, force-delete the branch even if not merged
|
|
294
|
+
|
|
295
|
+
Returns:
|
|
296
|
+
True if deletion succeeded
|
|
297
|
+
"""
|
|
298
|
+
repo_path = WorkspaceService.get_repo_path()
|
|
299
|
+
worktree_path = Path(workspace.worktree_path)
|
|
300
|
+
|
|
301
|
+
# Resolve paths canonically for consistent comparison
|
|
302
|
+
resolved_worktree = worktree_path.resolve()
|
|
303
|
+
resolved_repo = repo_path.resolve()
|
|
304
|
+
resolved_legacy = (repo_path / self.WORKTREES_DIR).resolve()
|
|
305
|
+
|
|
306
|
+
# HARD GUARD: Never allow deletion of the main repo itself
|
|
307
|
+
# Even if symlink weirdness makes it appear under .draft/worktrees
|
|
308
|
+
# Check 1: worktree equals repo
|
|
309
|
+
# Check 2 (belt-and-suspenders): repo is under worktree (worktree is parent of repo)
|
|
310
|
+
worktree_is_repo = resolved_worktree == resolved_repo
|
|
311
|
+
repo_is_under_worktree = False
|
|
312
|
+
try:
|
|
313
|
+
# If repo is relative to worktree, then worktree is a parent of repo
|
|
314
|
+
# This should NEVER be true - if it is, something is very wrong
|
|
315
|
+
repo_is_under_worktree = resolved_repo.is_relative_to(resolved_worktree)
|
|
316
|
+
except (ValueError, TypeError):
|
|
317
|
+
pass # Not relative, which is expected
|
|
318
|
+
|
|
319
|
+
if worktree_is_repo or repo_is_under_worktree:
|
|
320
|
+
failure_reason = (
|
|
321
|
+
"worktree path equals main repo path"
|
|
322
|
+
if worktree_is_repo
|
|
323
|
+
else "worktree path is parent of main repo (would delete repo)"
|
|
324
|
+
)
|
|
325
|
+
logger.critical(
|
|
326
|
+
f"CRITICAL: Refusing to delete! worktree={resolved_worktree}, "
|
|
327
|
+
f"repo={resolved_repo}, equals={worktree_is_repo}, "
|
|
328
|
+
f"repo_under_worktree={repo_is_under_worktree}"
|
|
329
|
+
)
|
|
330
|
+
event = TicketEvent(
|
|
331
|
+
ticket_id=ticket_id,
|
|
332
|
+
event_type=EventType.WORKTREE_CLEANUP_FAILED.value,
|
|
333
|
+
from_state=None,
|
|
334
|
+
to_state=None,
|
|
335
|
+
actor_type=ActorType.SYSTEM.value,
|
|
336
|
+
actor_id=actor_id,
|
|
337
|
+
reason=f"CRITICAL: Worktree cleanup BLOCKED - {failure_reason}",
|
|
338
|
+
payload_json=json.dumps(
|
|
339
|
+
{
|
|
340
|
+
"worktree_path": str(worktree_path),
|
|
341
|
+
"resolved_worktree": str(resolved_worktree),
|
|
342
|
+
"resolved_repo": str(resolved_repo),
|
|
343
|
+
"cleanup_failed": True,
|
|
344
|
+
"failure_reason": f"CRITICAL: {failure_reason}",
|
|
345
|
+
"branch_name": workspace.branch_name,
|
|
346
|
+
"worktree_equals_repo": worktree_is_repo,
|
|
347
|
+
"repo_is_under_worktree": repo_is_under_worktree,
|
|
348
|
+
}
|
|
349
|
+
),
|
|
350
|
+
)
|
|
351
|
+
self.db.add(event)
|
|
352
|
+
await self.db.flush()
|
|
353
|
+
return False
|
|
354
|
+
|
|
355
|
+
# Safety: validate path is under central data dir or legacy .draft/worktrees/
|
|
356
|
+
resolved_central = get_worktrees_root().resolve()
|
|
357
|
+
in_central = False
|
|
358
|
+
in_legacy = False
|
|
359
|
+
try:
|
|
360
|
+
resolved_worktree.relative_to(resolved_central)
|
|
361
|
+
in_central = True
|
|
362
|
+
except ValueError:
|
|
363
|
+
pass
|
|
364
|
+
try:
|
|
365
|
+
resolved_worktree.relative_to(resolved_legacy)
|
|
366
|
+
in_legacy = True
|
|
367
|
+
except ValueError:
|
|
368
|
+
pass
|
|
369
|
+
|
|
370
|
+
if not in_central and not in_legacy:
|
|
371
|
+
logger.error(
|
|
372
|
+
f"Refusing to delete worktree not under data dir or {self.WORKTREES_DIR}: {worktree_path}"
|
|
373
|
+
)
|
|
374
|
+
event = TicketEvent(
|
|
375
|
+
ticket_id=ticket_id,
|
|
376
|
+
event_type=EventType.WORKTREE_CLEANUP_FAILED.value,
|
|
377
|
+
from_state=None,
|
|
378
|
+
to_state=None,
|
|
379
|
+
actor_type=ActorType.SYSTEM.value,
|
|
380
|
+
actor_id=actor_id,
|
|
381
|
+
reason=f"Worktree cleanup REFUSED: path not under data dir or {self.WORKTREES_DIR}",
|
|
382
|
+
payload_json=json.dumps(
|
|
383
|
+
{
|
|
384
|
+
"worktree_path": str(worktree_path),
|
|
385
|
+
"cleanup_failed": True,
|
|
386
|
+
"failure_reason": "Path validation failed: not under data dir or legacy dir",
|
|
387
|
+
"branch_name": workspace.branch_name,
|
|
388
|
+
}
|
|
389
|
+
),
|
|
390
|
+
)
|
|
391
|
+
self.db.add(event)
|
|
392
|
+
await self.db.flush()
|
|
393
|
+
return False
|
|
394
|
+
|
|
395
|
+
# Check ticket state (unless force=True)
|
|
396
|
+
ticket = None
|
|
397
|
+
if not force:
|
|
398
|
+
ticket_result = await self.db.execute(
|
|
399
|
+
select(Ticket)
|
|
400
|
+
.where(Ticket.id == ticket_id)
|
|
401
|
+
.options(selectinload(Ticket.events))
|
|
402
|
+
)
|
|
403
|
+
ticket = ticket_result.scalar_one_or_none()
|
|
404
|
+
if ticket and ticket.state in PROTECTED_TICKET_STATES:
|
|
405
|
+
logger.warning(
|
|
406
|
+
f"Refusing to delete worktree for ticket {ticket_id} in state {ticket.state}"
|
|
407
|
+
)
|
|
408
|
+
event = TicketEvent(
|
|
409
|
+
ticket_id=ticket_id,
|
|
410
|
+
event_type=EventType.WORKTREE_CLEANUP_FAILED.value,
|
|
411
|
+
from_state=None,
|
|
412
|
+
to_state=None,
|
|
413
|
+
actor_type=ActorType.SYSTEM.value,
|
|
414
|
+
actor_id=actor_id,
|
|
415
|
+
reason=f"Worktree cleanup REFUSED: ticket in protected state {ticket.state}",
|
|
416
|
+
payload_json=json.dumps(
|
|
417
|
+
{
|
|
418
|
+
"worktree_path": str(worktree_path),
|
|
419
|
+
"cleanup_failed": True,
|
|
420
|
+
"failure_reason": f"Ticket in protected state: {ticket.state}",
|
|
421
|
+
"branch_name": workspace.branch_name,
|
|
422
|
+
}
|
|
423
|
+
),
|
|
424
|
+
)
|
|
425
|
+
self.db.add(event)
|
|
426
|
+
await self.db.flush()
|
|
427
|
+
return False
|
|
428
|
+
|
|
429
|
+
# Check for merge events and extract base_branch from payload
|
|
430
|
+
# Look at both MERGE_SUCCEEDED and MERGE_REQUESTED to find base_branch
|
|
431
|
+
# This ensures we can verify even if events were pruned or merge failed
|
|
432
|
+
branch_merged = False
|
|
433
|
+
merge_base_branch: str | None = None
|
|
434
|
+
|
|
435
|
+
if not ticket:
|
|
436
|
+
# Fetch ticket if not already loaded
|
|
437
|
+
ticket_result = await self.db.execute(
|
|
438
|
+
select(Ticket)
|
|
439
|
+
.where(Ticket.id == ticket_id)
|
|
440
|
+
.options(selectinload(Ticket.events))
|
|
441
|
+
)
|
|
442
|
+
ticket = ticket_result.scalar_one_or_none()
|
|
443
|
+
|
|
444
|
+
if ticket:
|
|
445
|
+
# First pass: look for MERGE_SUCCEEDED (definitive)
|
|
446
|
+
for event in ticket.events:
|
|
447
|
+
if event.event_type == MERGE_SUCCEEDED_EVENT:
|
|
448
|
+
branch_merged = True
|
|
449
|
+
try:
|
|
450
|
+
payload = (
|
|
451
|
+
json.loads(event.payload_json) if event.payload_json else {}
|
|
452
|
+
)
|
|
453
|
+
merge_base_branch = payload.get("base_branch")
|
|
454
|
+
except (json.JSONDecodeError, TypeError, AttributeError):
|
|
455
|
+
pass # Invalid JSON - continue without base_branch
|
|
456
|
+
break
|
|
457
|
+
|
|
458
|
+
# Second pass: if no base_branch yet, look in MERGE_REQUESTED
|
|
459
|
+
if not merge_base_branch:
|
|
460
|
+
for event in ticket.events:
|
|
461
|
+
if event.event_type == MERGE_REQUESTED_EVENT:
|
|
462
|
+
try:
|
|
463
|
+
payload = (
|
|
464
|
+
json.loads(event.payload_json)
|
|
465
|
+
if event.payload_json
|
|
466
|
+
else {}
|
|
467
|
+
)
|
|
468
|
+
merge_base_branch = payload.get("base_branch")
|
|
469
|
+
if merge_base_branch:
|
|
470
|
+
break # Found it
|
|
471
|
+
except (json.JSONDecodeError, TypeError, AttributeError):
|
|
472
|
+
pass # Invalid JSON - continue
|
|
473
|
+
|
|
474
|
+
# =====================================================================
|
|
475
|
+
# STEP 1: DECIDE branch deletion BEFORE worktree removal
|
|
476
|
+
# This ensures we have full git context for the ancestry check
|
|
477
|
+
# =====================================================================
|
|
478
|
+
should_delete_branch = False
|
|
479
|
+
branch_skip_reason = None
|
|
480
|
+
git_verification_reason = None
|
|
481
|
+
used_base_branch: str | None = None
|
|
482
|
+
|
|
483
|
+
if delete_branch:
|
|
484
|
+
# Force deletion requested - skip safety checks
|
|
485
|
+
should_delete_branch = True
|
|
486
|
+
logger.info(
|
|
487
|
+
f"Will force-delete branch {workspace.branch_name} (delete_branch=True)"
|
|
488
|
+
)
|
|
489
|
+
elif branch_merged:
|
|
490
|
+
# Event says merged - verify with git using explicit refs
|
|
491
|
+
# PREFER the base_branch from merge event (consistency), fallback to detection
|
|
492
|
+
if merge_base_branch:
|
|
493
|
+
used_base_branch = merge_base_branch
|
|
494
|
+
logger.info(f"Using base_branch from merge event: {used_base_branch}")
|
|
495
|
+
else:
|
|
496
|
+
used_base_branch = self._detect_default_branch(repo_path)
|
|
497
|
+
logger.warning(
|
|
498
|
+
f"Merge event missing base_branch, falling back to detection: {used_base_branch}"
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
git_verified, git_verification_reason = self._is_branch_ancestor_of(
|
|
502
|
+
workspace.branch_name, used_base_branch, repo_path
|
|
503
|
+
)
|
|
504
|
+
|
|
505
|
+
if git_verified:
|
|
506
|
+
should_delete_branch = True
|
|
507
|
+
logger.info(
|
|
508
|
+
f"Branch {workspace.branch_name} verified as ancestor of {used_base_branch}"
|
|
509
|
+
)
|
|
510
|
+
else:
|
|
511
|
+
# Event says merged but git disagrees - DO NOT DELETE
|
|
512
|
+
branch_skip_reason = f"Event claims merged but git verification failed: {git_verification_reason}"
|
|
513
|
+
logger.warning(
|
|
514
|
+
f"NOT deleting branch {workspace.branch_name}: {branch_skip_reason}"
|
|
515
|
+
)
|
|
516
|
+
else:
|
|
517
|
+
branch_skip_reason = "No merge event found"
|
|
518
|
+
logger.info(f"Keeping branch {workspace.branch_name}: {branch_skip_reason}")
|
|
519
|
+
|
|
520
|
+
try:
|
|
521
|
+
# =====================================================================
|
|
522
|
+
# STEP 2: Remove worktree via git
|
|
523
|
+
# =====================================================================
|
|
524
|
+
worktree_removed = False
|
|
525
|
+
worktree_remove_error: str | None = None
|
|
526
|
+
still_registered = False
|
|
527
|
+
|
|
528
|
+
if worktree_path.exists():
|
|
529
|
+
result = subprocess.run(
|
|
530
|
+
["git", "worktree", "remove", "--force", str(worktree_path)],
|
|
531
|
+
cwd=repo_path,
|
|
532
|
+
capture_output=True,
|
|
533
|
+
text=True,
|
|
534
|
+
timeout=60,
|
|
535
|
+
)
|
|
536
|
+
|
|
537
|
+
if result.returncode == 0:
|
|
538
|
+
worktree_removed = True
|
|
539
|
+
else:
|
|
540
|
+
worktree_remove_error = result.stderr.strip()
|
|
541
|
+
logger.warning(
|
|
542
|
+
f"git worktree remove failed for {worktree_path}: {worktree_remove_error}"
|
|
543
|
+
)
|
|
544
|
+
# SAFE FALLBACK: Only rmtree if NOT registered as worktree
|
|
545
|
+
# This prevents corrupting git state
|
|
546
|
+
if worktree_path.exists():
|
|
547
|
+
still_registered = self._is_registered_worktree(
|
|
548
|
+
worktree_path, repo_path
|
|
549
|
+
)
|
|
550
|
+
# Get worktree list for debugging
|
|
551
|
+
worktree_list_result = subprocess.run(
|
|
552
|
+
["git", "worktree", "list"],
|
|
553
|
+
cwd=repo_path,
|
|
554
|
+
capture_output=True,
|
|
555
|
+
text=True,
|
|
556
|
+
timeout=10,
|
|
557
|
+
)
|
|
558
|
+
worktree_list_excerpt = (
|
|
559
|
+
worktree_list_result.stdout[:500]
|
|
560
|
+
if worktree_list_result.returncode == 0
|
|
561
|
+
else None
|
|
562
|
+
)
|
|
563
|
+
|
|
564
|
+
if still_registered:
|
|
565
|
+
logger.error(
|
|
566
|
+
f"Path {worktree_path} is still registered as worktree, "
|
|
567
|
+
f"refusing to rmtree (would corrupt git state)"
|
|
568
|
+
)
|
|
569
|
+
# Don't return early - emit failure event first
|
|
570
|
+
else:
|
|
571
|
+
# Not registered - safe to remove directory
|
|
572
|
+
logger.info(
|
|
573
|
+
f"Path {worktree_path} not registered as worktree, "
|
|
574
|
+
f"safe to remove directory"
|
|
575
|
+
)
|
|
576
|
+
shutil.rmtree(worktree_path)
|
|
577
|
+
worktree_removed = True
|
|
578
|
+
else:
|
|
579
|
+
worktree_removed = True # Already gone
|
|
580
|
+
worktree_list_excerpt = None
|
|
581
|
+
|
|
582
|
+
# If worktree is still registered, handle based on force flag
|
|
583
|
+
if still_registered and not worktree_removed:
|
|
584
|
+
failure_payload = {
|
|
585
|
+
"worktree_path": str(worktree_path),
|
|
586
|
+
"worktree_removed": False,
|
|
587
|
+
"cleanup_failed": True,
|
|
588
|
+
"failure_reason": "Worktree still registered, cannot safely remove",
|
|
589
|
+
"git_worktree_remove_stderr": _sanitize_output(
|
|
590
|
+
worktree_remove_error
|
|
591
|
+
),
|
|
592
|
+
"git_worktree_list_excerpt": _sanitize_output(
|
|
593
|
+
worktree_list_excerpt
|
|
594
|
+
),
|
|
595
|
+
"branch_name": workspace.branch_name,
|
|
596
|
+
"branch_was_merged": branch_merged,
|
|
597
|
+
"force_used": force,
|
|
598
|
+
"still_registered": True,
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
if not force:
|
|
602
|
+
# Not forcing - emit failure event and return
|
|
603
|
+
event = TicketEvent(
|
|
604
|
+
ticket_id=ticket_id,
|
|
605
|
+
event_type=EventType.WORKTREE_CLEANUP_FAILED.value,
|
|
606
|
+
from_state=None,
|
|
607
|
+
to_state=None,
|
|
608
|
+
actor_type=ActorType.SYSTEM.value,
|
|
609
|
+
actor_id=actor_id,
|
|
610
|
+
reason=f"Worktree cleanup FAILED: {worktree_path} (still registered)",
|
|
611
|
+
payload_json=json.dumps(failure_payload),
|
|
612
|
+
)
|
|
613
|
+
self.db.add(event)
|
|
614
|
+
await self.db.flush()
|
|
615
|
+
return False
|
|
616
|
+
else:
|
|
617
|
+
# force=True but still registered = FAILURE state
|
|
618
|
+
# We emit a failure event and return False (don't set cleaned_up_at)
|
|
619
|
+
# This keeps DB state honest: cleanup did not actually succeed
|
|
620
|
+
logger.warning(
|
|
621
|
+
f"Force=True but worktree {worktree_path} still registered. "
|
|
622
|
+
f"Cannot safely proceed - returning failure."
|
|
623
|
+
)
|
|
624
|
+
event = TicketEvent(
|
|
625
|
+
ticket_id=ticket_id,
|
|
626
|
+
event_type=EventType.WORKTREE_CLEANUP_FAILED.value,
|
|
627
|
+
from_state=None,
|
|
628
|
+
to_state=None,
|
|
629
|
+
actor_type=ActorType.SYSTEM.value,
|
|
630
|
+
actor_id=actor_id,
|
|
631
|
+
reason=f"Worktree cleanup FAILED: {worktree_path} still registered (force=True cannot override)",
|
|
632
|
+
payload_json=json.dumps(failure_payload),
|
|
633
|
+
)
|
|
634
|
+
self.db.add(event)
|
|
635
|
+
await self.db.flush()
|
|
636
|
+
# Return False - cleanup did NOT succeed
|
|
637
|
+
# workspace.cleaned_up_at remains NULL
|
|
638
|
+
return False
|
|
639
|
+
|
|
640
|
+
# =====================================================================
|
|
641
|
+
# STEP 3: Prune stale worktree entries
|
|
642
|
+
# =====================================================================
|
|
643
|
+
subprocess.run(
|
|
644
|
+
["git", "worktree", "prune"],
|
|
645
|
+
cwd=repo_path,
|
|
646
|
+
capture_output=True,
|
|
647
|
+
timeout=30,
|
|
648
|
+
)
|
|
649
|
+
|
|
650
|
+
# =====================================================================
|
|
651
|
+
# STEP 4: Delete branch (decision was made in Step 1)
|
|
652
|
+
# Branch deletion failure is NON-FATAL - cleanup continues
|
|
653
|
+
# =====================================================================
|
|
654
|
+
branch_deleted = False
|
|
655
|
+
branch_delete_error = None
|
|
656
|
+
|
|
657
|
+
if should_delete_branch:
|
|
658
|
+
# Use -D for force, -d for safe (safe -d can fail if not merged, that's ok)
|
|
659
|
+
delete_flag = "-D" if delete_branch else "-d"
|
|
660
|
+
result = subprocess.run(
|
|
661
|
+
["git", "branch", delete_flag, workspace.branch_name],
|
|
662
|
+
cwd=repo_path,
|
|
663
|
+
capture_output=True,
|
|
664
|
+
text=True,
|
|
665
|
+
timeout=30,
|
|
666
|
+
)
|
|
667
|
+
branch_deleted = result.returncode == 0
|
|
668
|
+
if not branch_deleted:
|
|
669
|
+
branch_delete_error = result.stderr.strip()
|
|
670
|
+
# This is NON-FATAL - log but don't fail cleanup
|
|
671
|
+
logger.warning(
|
|
672
|
+
f"Branch deletion failed (non-fatal): {workspace.branch_name}: "
|
|
673
|
+
f"{branch_delete_error}"
|
|
674
|
+
)
|
|
675
|
+
|
|
676
|
+
# =====================================================================
|
|
677
|
+
# STEP 5: Build cleanup event payload (always - for observability)
|
|
678
|
+
# =====================================================================
|
|
679
|
+
payload = {
|
|
680
|
+
"worktree_path": str(worktree_path),
|
|
681
|
+
"worktree_removed": worktree_removed,
|
|
682
|
+
"branch_name": workspace.branch_name,
|
|
683
|
+
# Distinguish between skip vs failure:
|
|
684
|
+
# - branch_delete_attempted: True if we tried to delete, False if skipped
|
|
685
|
+
# - branch_deleted: True only if deletion succeeded
|
|
686
|
+
# - branch_delete_error: Set only if attempted and failed
|
|
687
|
+
"branch_delete_attempted": should_delete_branch,
|
|
688
|
+
"branch_deleted": branch_deleted,
|
|
689
|
+
"branch_was_merged": branch_merged,
|
|
690
|
+
}
|
|
691
|
+
if branch_skip_reason:
|
|
692
|
+
payload["branch_skip_reason"] = branch_skip_reason
|
|
693
|
+
if branch_delete_error:
|
|
694
|
+
payload["branch_delete_error"] = _sanitize_output(branch_delete_error)
|
|
695
|
+
if git_verification_reason and not should_delete_branch:
|
|
696
|
+
payload["git_verification_failed"] = git_verification_reason
|
|
697
|
+
if used_base_branch:
|
|
698
|
+
payload["base_branch_used"] = used_base_branch
|
|
699
|
+
|
|
700
|
+
# =====================================================================
|
|
701
|
+
# STEP 6: Only mark cleanup successful if worktree was actually removed
|
|
702
|
+
# =====================================================================
|
|
703
|
+
if not worktree_removed:
|
|
704
|
+
# Worktree not removed - emit failure event and return False
|
|
705
|
+
logger.error(
|
|
706
|
+
f"Cleanup FAILED for {worktree_path}: worktree was not removed"
|
|
707
|
+
)
|
|
708
|
+
payload["cleanup_failed"] = True
|
|
709
|
+
payload["failure_reason"] = "Worktree was not removed"
|
|
710
|
+
|
|
711
|
+
event = TicketEvent(
|
|
712
|
+
ticket_id=ticket_id,
|
|
713
|
+
event_type=EventType.WORKTREE_CLEANUP_FAILED.value,
|
|
714
|
+
from_state=None,
|
|
715
|
+
to_state=None,
|
|
716
|
+
actor_type=ActorType.SYSTEM.value,
|
|
717
|
+
actor_id=actor_id,
|
|
718
|
+
reason=f"Worktree cleanup FAILED: {worktree_path}",
|
|
719
|
+
payload_json=json.dumps(payload),
|
|
720
|
+
)
|
|
721
|
+
self.db.add(event)
|
|
722
|
+
await self.db.flush()
|
|
723
|
+
# Do NOT set cleaned_up_at - cleanup didn't succeed
|
|
724
|
+
return False
|
|
725
|
+
|
|
726
|
+
# Worktree was removed - mark as cleaned up
|
|
727
|
+
workspace.cleaned_up_at = datetime.now(UTC)
|
|
728
|
+
await self.db.flush()
|
|
729
|
+
|
|
730
|
+
event = TicketEvent(
|
|
731
|
+
ticket_id=ticket_id,
|
|
732
|
+
event_type=EventType.WORKTREE_CLEANED.value,
|
|
733
|
+
from_state=None,
|
|
734
|
+
to_state=None,
|
|
735
|
+
actor_type=ActorType.SYSTEM.value,
|
|
736
|
+
actor_id=actor_id,
|
|
737
|
+
reason=f"Worktree cleaned up: {worktree_path}",
|
|
738
|
+
payload_json=json.dumps(payload),
|
|
739
|
+
)
|
|
740
|
+
self.db.add(event)
|
|
741
|
+
await self.db.flush()
|
|
742
|
+
|
|
743
|
+
logger.info(f"Deleted worktree {worktree_path} for ticket {ticket_id}")
|
|
744
|
+
return True
|
|
745
|
+
|
|
746
|
+
except Exception as e:
|
|
747
|
+
logger.exception(f"Failed to delete worktree {worktree_path}: {e}")
|
|
748
|
+
# Emit failure audit event for exception
|
|
749
|
+
try:
|
|
750
|
+
event = TicketEvent(
|
|
751
|
+
ticket_id=ticket_id,
|
|
752
|
+
event_type=EventType.WORKTREE_CLEANUP_FAILED.value,
|
|
753
|
+
from_state=None,
|
|
754
|
+
to_state=None,
|
|
755
|
+
actor_type=ActorType.SYSTEM.value,
|
|
756
|
+
actor_id=actor_id,
|
|
757
|
+
reason=f"Worktree cleanup EXCEPTION: {worktree_path}",
|
|
758
|
+
payload_json=json.dumps(
|
|
759
|
+
{
|
|
760
|
+
"worktree_path": str(worktree_path),
|
|
761
|
+
"cleanup_failed": True,
|
|
762
|
+
"failure_reason": f"Exception: {_sanitize_output(str(e))}",
|
|
763
|
+
"exception_type": type(e).__name__,
|
|
764
|
+
"branch_name": workspace.branch_name,
|
|
765
|
+
}
|
|
766
|
+
),
|
|
767
|
+
)
|
|
768
|
+
self.db.add(event)
|
|
769
|
+
await self.db.flush()
|
|
770
|
+
except Exception as event_error:
|
|
771
|
+
# Don't let event emission failure mask the original error
|
|
772
|
+
logger.error(f"Failed to emit cleanup failure event: {event_error}")
|
|
773
|
+
return False
|
|
774
|
+
|
|
775
|
+
async def cleanup_stale_worktrees(
|
|
776
|
+
self,
|
|
777
|
+
dry_run: bool = True,
|
|
778
|
+
) -> CleanupResult:
|
|
779
|
+
"""Clean up stale worktrees that exceed TTL.
|
|
780
|
+
|
|
781
|
+
Only cleans worktrees for tickets in DONE or ABANDONED state,
|
|
782
|
+
or BLOCKED tickets older than TTL.
|
|
783
|
+
|
|
784
|
+
Args:
|
|
785
|
+
dry_run: If True, only report what would be deleted
|
|
786
|
+
|
|
787
|
+
Returns:
|
|
788
|
+
CleanupResult with counts and details
|
|
789
|
+
"""
|
|
790
|
+
result = CleanupResult()
|
|
791
|
+
cleanup_config = DraftConfig().cleanup_config
|
|
792
|
+
|
|
793
|
+
ttl_threshold = datetime.now(UTC) - timedelta(
|
|
794
|
+
days=cleanup_config.worktree_ttl_days
|
|
795
|
+
)
|
|
796
|
+
|
|
797
|
+
# Find stale workspaces with their tickets
|
|
798
|
+
query = (
|
|
799
|
+
select(Workspace)
|
|
800
|
+
.where(
|
|
801
|
+
Workspace.cleaned_up_at.is_(None),
|
|
802
|
+
Workspace.created_at < ttl_threshold,
|
|
803
|
+
)
|
|
804
|
+
.options(selectinload(Workspace.ticket))
|
|
805
|
+
)
|
|
806
|
+
stale_result = await self.db.execute(query)
|
|
807
|
+
stale_workspaces = list(stale_result.scalars().all())
|
|
808
|
+
|
|
809
|
+
for workspace in stale_workspaces:
|
|
810
|
+
ticket = workspace.ticket
|
|
811
|
+
worktree_path = Path(workspace.worktree_path)
|
|
812
|
+
|
|
813
|
+
# Check ticket state - only clean if in safe state
|
|
814
|
+
if ticket and ticket.state in PROTECTED_TICKET_STATES:
|
|
815
|
+
result.details.append(
|
|
816
|
+
f"[SKIPPED] Worktree {worktree_path} - ticket in {ticket.state} state"
|
|
817
|
+
)
|
|
818
|
+
result.worktrees_skipped += 1
|
|
819
|
+
continue
|
|
820
|
+
|
|
821
|
+
# Safe to delete: done, abandoned, or blocked older than TTL
|
|
822
|
+
result.details.append(
|
|
823
|
+
f"{'[DRY RUN] Would delete' if dry_run else 'Deleting'} "
|
|
824
|
+
f"stale worktree: {worktree_path} (created {workspace.created_at})"
|
|
825
|
+
)
|
|
826
|
+
|
|
827
|
+
if not dry_run:
|
|
828
|
+
success = await self.delete_worktree(
|
|
829
|
+
workspace=workspace,
|
|
830
|
+
ticket_id=workspace.ticket_id,
|
|
831
|
+
actor_id="cleanup_stale_worktrees",
|
|
832
|
+
force=True, # We already checked state above
|
|
833
|
+
)
|
|
834
|
+
if success:
|
|
835
|
+
result.worktrees_deleted += 1
|
|
836
|
+
else:
|
|
837
|
+
result.worktrees_failed += 1
|
|
838
|
+
else:
|
|
839
|
+
result.worktrees_deleted += 1 # Count as would-be-deleted for dry run
|
|
840
|
+
|
|
841
|
+
return result
|
|
842
|
+
|
|
843
|
+
async def cleanup_orphaned_worktrees(
|
|
844
|
+
self,
|
|
845
|
+
dry_run: bool = True,
|
|
846
|
+
) -> CleanupResult:
|
|
847
|
+
"""Clean up orphaned worktree directories not tracked in database.
|
|
848
|
+
|
|
849
|
+
Uses `git worktree remove` for directories that are git worktrees,
|
|
850
|
+
and falls back to directory removal for non-git directories.
|
|
851
|
+
|
|
852
|
+
Args:
|
|
853
|
+
dry_run: If True, only report what would be deleted
|
|
854
|
+
|
|
855
|
+
Returns:
|
|
856
|
+
CleanupResult with counts and details
|
|
857
|
+
"""
|
|
858
|
+
result = CleanupResult()
|
|
859
|
+
repo_path = WorkspaceService.get_repo_path()
|
|
860
|
+
|
|
861
|
+
# Get all tracked worktree paths
|
|
862
|
+
query = select(Workspace.worktree_path)
|
|
863
|
+
tracked_result = await self.db.execute(query)
|
|
864
|
+
tracked_paths = {Path(p).resolve() for p in tracked_result.scalars().all()}
|
|
865
|
+
|
|
866
|
+
# Scan both central data dir and legacy .draft/worktrees/
|
|
867
|
+
scan_dirs = []
|
|
868
|
+
central_worktrees = get_worktrees_root()
|
|
869
|
+
if central_worktrees.exists():
|
|
870
|
+
# Central dir has board-id subdirs, scan all of them
|
|
871
|
+
for board_dir in central_worktrees.iterdir():
|
|
872
|
+
if board_dir.is_dir():
|
|
873
|
+
scan_dirs.append(board_dir)
|
|
874
|
+
legacy_worktrees_dir = repo_path / self.WORKTREES_DIR
|
|
875
|
+
if legacy_worktrees_dir.exists():
|
|
876
|
+
scan_dirs.append(legacy_worktrees_dir)
|
|
877
|
+
|
|
878
|
+
for worktrees_dir in scan_dirs:
|
|
879
|
+
for entry in worktrees_dir.iterdir():
|
|
880
|
+
if not entry.is_dir():
|
|
881
|
+
continue
|
|
882
|
+
|
|
883
|
+
if entry.resolve() in tracked_paths:
|
|
884
|
+
continue
|
|
885
|
+
|
|
886
|
+
size = self._get_dir_size(entry)
|
|
887
|
+
result.details.append(
|
|
888
|
+
f"{'[DRY RUN] Would delete' if dry_run else 'Deleting'} "
|
|
889
|
+
f"orphaned worktree: {entry} ({size // 1024}KB)"
|
|
890
|
+
)
|
|
891
|
+
|
|
892
|
+
if not dry_run:
|
|
893
|
+
try:
|
|
894
|
+
# Try git worktree remove first
|
|
895
|
+
git_result = subprocess.run(
|
|
896
|
+
["git", "worktree", "remove", "--force", str(entry)],
|
|
897
|
+
cwd=repo_path,
|
|
898
|
+
capture_output=True,
|
|
899
|
+
text=True,
|
|
900
|
+
timeout=60,
|
|
901
|
+
)
|
|
902
|
+
|
|
903
|
+
if git_result.returncode != 0:
|
|
904
|
+
# Fallback to manual removal if git command fails
|
|
905
|
+
shutil.rmtree(entry)
|
|
906
|
+
|
|
907
|
+
# Always prune after removal
|
|
908
|
+
subprocess.run(
|
|
909
|
+
["git", "worktree", "prune"],
|
|
910
|
+
cwd=repo_path,
|
|
911
|
+
capture_output=True,
|
|
912
|
+
timeout=30,
|
|
913
|
+
)
|
|
914
|
+
|
|
915
|
+
result.worktrees_deleted += 1
|
|
916
|
+
result.bytes_freed += size
|
|
917
|
+
except Exception as e:
|
|
918
|
+
logger.error(f"Failed to delete orphaned worktree {entry}: {e}")
|
|
919
|
+
result.worktrees_failed += 1
|
|
920
|
+
else:
|
|
921
|
+
result.worktrees_deleted += 1
|
|
922
|
+
result.bytes_freed += size
|
|
923
|
+
|
|
924
|
+
return result
|
|
925
|
+
|
|
926
|
+
async def cleanup_old_evidence(
|
|
927
|
+
self,
|
|
928
|
+
dry_run: bool = True,
|
|
929
|
+
) -> CleanupResult:
|
|
930
|
+
"""Clean up evidence files older than TTL.
|
|
931
|
+
|
|
932
|
+
Args:
|
|
933
|
+
dry_run: If True, only report what would be deleted
|
|
934
|
+
|
|
935
|
+
Returns:
|
|
936
|
+
CleanupResult with counts and details
|
|
937
|
+
"""
|
|
938
|
+
result = CleanupResult()
|
|
939
|
+
cleanup_config = DraftConfig().cleanup_config
|
|
940
|
+
repo_path = WorkspaceService.get_repo_path()
|
|
941
|
+
|
|
942
|
+
ttl_threshold = datetime.now(UTC) - timedelta(
|
|
943
|
+
days=cleanup_config.evidence_ttl_days
|
|
944
|
+
)
|
|
945
|
+
|
|
946
|
+
# Find old evidence records
|
|
947
|
+
query = select(Evidence).where(Evidence.created_at < ttl_threshold)
|
|
948
|
+
old_result = await self.db.execute(query)
|
|
949
|
+
old_evidence = list(old_result.scalars().all())
|
|
950
|
+
|
|
951
|
+
for evidence in old_evidence:
|
|
952
|
+
# Delete stdout file
|
|
953
|
+
if evidence.stdout_path:
|
|
954
|
+
stdout_path = repo_path / evidence.stdout_path
|
|
955
|
+
if self._is_safe_path(stdout_path, repo_path):
|
|
956
|
+
size = stdout_path.stat().st_size if stdout_path.exists() else 0
|
|
957
|
+
result.details.append(
|
|
958
|
+
f"{'[DRY RUN] Would delete' if dry_run else 'Deleting'} "
|
|
959
|
+
f"evidence file: {stdout_path} ({size // 1024}KB)"
|
|
960
|
+
)
|
|
961
|
+
|
|
962
|
+
if not dry_run and stdout_path.exists():
|
|
963
|
+
try:
|
|
964
|
+
stdout_path.unlink()
|
|
965
|
+
result.evidence_files_deleted += 1
|
|
966
|
+
result.bytes_freed += size
|
|
967
|
+
except Exception as e:
|
|
968
|
+
logger.error(
|
|
969
|
+
f"Failed to delete evidence file {stdout_path}: {e}"
|
|
970
|
+
)
|
|
971
|
+
result.evidence_files_failed += 1
|
|
972
|
+
|
|
973
|
+
# Delete stderr file
|
|
974
|
+
if evidence.stderr_path:
|
|
975
|
+
stderr_path = repo_path / evidence.stderr_path
|
|
976
|
+
if self._is_safe_path(stderr_path, repo_path):
|
|
977
|
+
size = stderr_path.stat().st_size if stderr_path.exists() else 0
|
|
978
|
+
result.details.append(
|
|
979
|
+
f"{'[DRY RUN] Would delete' if dry_run else 'Deleting'} "
|
|
980
|
+
f"evidence file: {stderr_path}"
|
|
981
|
+
)
|
|
982
|
+
|
|
983
|
+
if not dry_run and stderr_path.exists():
|
|
984
|
+
try:
|
|
985
|
+
stderr_path.unlink()
|
|
986
|
+
result.evidence_files_deleted += 1
|
|
987
|
+
result.bytes_freed += size
|
|
988
|
+
except Exception as e:
|
|
989
|
+
logger.error(
|
|
990
|
+
f"Failed to delete evidence file {stderr_path}: {e}"
|
|
991
|
+
)
|
|
992
|
+
result.evidence_files_failed += 1
|
|
993
|
+
|
|
994
|
+
return result
|
|
995
|
+
|
|
996
|
+
async def run_full_cleanup(
|
|
997
|
+
self,
|
|
998
|
+
dry_run: bool = True,
|
|
999
|
+
delete_worktrees: bool = True,
|
|
1000
|
+
delete_evidence: bool = True,
|
|
1001
|
+
) -> CleanupResult:
|
|
1002
|
+
"""Run full cleanup of worktrees and evidence.
|
|
1003
|
+
|
|
1004
|
+
Args:
|
|
1005
|
+
dry_run: If True, only report what would be deleted
|
|
1006
|
+
delete_worktrees: Whether to delete stale worktrees
|
|
1007
|
+
delete_evidence: Whether to delete old evidence
|
|
1008
|
+
|
|
1009
|
+
Returns:
|
|
1010
|
+
Combined CleanupResult
|
|
1011
|
+
"""
|
|
1012
|
+
combined = CleanupResult()
|
|
1013
|
+
|
|
1014
|
+
if delete_worktrees:
|
|
1015
|
+
# Cleanup stale worktrees
|
|
1016
|
+
stale_result = await self.cleanup_stale_worktrees(dry_run=dry_run)
|
|
1017
|
+
combined.worktrees_deleted += stale_result.worktrees_deleted
|
|
1018
|
+
combined.worktrees_failed += stale_result.worktrees_failed
|
|
1019
|
+
combined.worktrees_skipped += stale_result.worktrees_skipped
|
|
1020
|
+
combined.bytes_freed += stale_result.bytes_freed
|
|
1021
|
+
combined.details.extend(stale_result.details)
|
|
1022
|
+
|
|
1023
|
+
# Cleanup orphaned worktrees
|
|
1024
|
+
orphan_result = await self.cleanup_orphaned_worktrees(dry_run=dry_run)
|
|
1025
|
+
combined.worktrees_deleted += orphan_result.worktrees_deleted
|
|
1026
|
+
combined.worktrees_failed += orphan_result.worktrees_failed
|
|
1027
|
+
combined.bytes_freed += orphan_result.bytes_freed
|
|
1028
|
+
combined.details.extend(orphan_result.details)
|
|
1029
|
+
|
|
1030
|
+
if delete_evidence:
|
|
1031
|
+
evidence_result = await self.cleanup_old_evidence(dry_run=dry_run)
|
|
1032
|
+
combined.evidence_files_deleted += evidence_result.evidence_files_deleted
|
|
1033
|
+
combined.evidence_files_failed += evidence_result.evidence_files_failed
|
|
1034
|
+
combined.bytes_freed += evidence_result.bytes_freed
|
|
1035
|
+
combined.details.extend(evidence_result.details)
|
|
1036
|
+
|
|
1037
|
+
if not dry_run:
|
|
1038
|
+
await self.db.commit()
|
|
1039
|
+
|
|
1040
|
+
return combined
|
|
1041
|
+
|
|
1042
|
+
def _is_safe_path(self, path: Path, repo_root: Path) -> bool:
|
|
1043
|
+
"""Check if a path is safe to delete (under central data dir or .draft/).
|
|
1044
|
+
|
|
1045
|
+
Args:
|
|
1046
|
+
path: Path to check
|
|
1047
|
+
repo_root: Repository root path
|
|
1048
|
+
|
|
1049
|
+
Returns:
|
|
1050
|
+
True if path is safe to delete
|
|
1051
|
+
"""
|
|
1052
|
+
try:
|
|
1053
|
+
resolved = path.resolve()
|
|
1054
|
+
# Check central data dir
|
|
1055
|
+
data_root = get_data_dir().resolve()
|
|
1056
|
+
try:
|
|
1057
|
+
common = os.path.commonpath([str(resolved), str(data_root)])
|
|
1058
|
+
if common == str(data_root):
|
|
1059
|
+
return True
|
|
1060
|
+
except ValueError:
|
|
1061
|
+
pass
|
|
1062
|
+
# Check legacy .draft/
|
|
1063
|
+
draft_root = (repo_root / self.LEGACY_DRAFT_DIR).resolve()
|
|
1064
|
+
common = os.path.commonpath([str(resolved), str(draft_root)])
|
|
1065
|
+
return common == str(draft_root)
|
|
1066
|
+
except (ValueError, OSError):
|
|
1067
|
+
return False
|
|
1068
|
+
|
|
1069
|
+
def _get_dir_size(self, path: Path) -> int:
|
|
1070
|
+
"""Get total size of a directory in bytes.
|
|
1071
|
+
|
|
1072
|
+
Args:
|
|
1073
|
+
path: Directory path
|
|
1074
|
+
|
|
1075
|
+
Returns:
|
|
1076
|
+
Total size in bytes
|
|
1077
|
+
"""
|
|
1078
|
+
total = 0
|
|
1079
|
+
try:
|
|
1080
|
+
for entry in path.rglob("*"):
|
|
1081
|
+
if entry.is_file():
|
|
1082
|
+
total += entry.stat().st_size
|
|
1083
|
+
except Exception:
|
|
1084
|
+
pass
|
|
1085
|
+
return total
|