draft-board 0.1.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/app/backend/.env.example +9 -0
- package/app/backend/.smartkanban/evidence/8b383839-cbec-45af-86ee-c7708d075cbe/bddf2ed5-2e21-4d46-a62b-10b87f1642a6_patch.txt +195 -0
- package/app/backend/.smartkanban/evidence/8b383839-cbec-45af-86ee-c7708d075cbe/bddf2ed5-2e21-4d46-a62b-10b87f1642a6_stat.txt +6 -0
- package/app/backend/CURL_EXAMPLES.md +335 -0
- package/app/backend/ENV_SETUP.md +65 -0
- package/app/backend/alembic/env.py +71 -0
- package/app/backend/alembic/script.py.mako +28 -0
- package/app/backend/alembic/versions/001_initial_schema.py +104 -0
- package/app/backend/alembic/versions/002_add_jobs_table.py +52 -0
- package/app/backend/alembic/versions/003_add_workspace_table.py +48 -0
- package/app/backend/alembic/versions/004_add_evidence_table.py +56 -0
- package/app/backend/alembic/versions/005_add_verification_commands.py +32 -0
- package/app/backend/alembic/versions/006_add_planner_lock_table.py +39 -0
- package/app/backend/alembic/versions/007_add_revision_review_tables.py +126 -0
- package/app/backend/alembic/versions/008_add_revision_idempotency_and_traceability.py +52 -0
- package/app/backend/alembic/versions/009_add_job_health_fields.py +46 -0
- package/app/backend/alembic/versions/010_add_review_comment_line_content.py +36 -0
- package/app/backend/alembic/versions/011_add_analysis_cache.py +47 -0
- package/app/backend/alembic/versions/012_add_boards_table.py +102 -0
- package/app/backend/alembic/versions/013_add_ticket_blocking.py +45 -0
- package/app/backend/alembic/versions/014_add_agent_sessions.py +220 -0
- package/app/backend/alembic/versions/015_add_ticket_sort_order.py +33 -0
- package/app/backend/alembic/versions/03220f0b93ae_add_pr_fields_to_ticket.py +49 -0
- package/app/backend/alembic/versions/0c2d89fff3b1_seed_board_configs_from_yaml.py +206 -0
- package/app/backend/alembic/versions/3348e5cf54c1_add_merge_checklist_table.py +67 -0
- package/app/backend/alembic/versions/357c780ee445_add_goal_status.py +34 -0
- package/app/backend/alembic/versions/553340b7e26c_add_autonomy_fields_to_goal.py +65 -0
- package/app/backend/alembic/versions/774dc335c679_merge_migration_heads.py +23 -0
- package/app/backend/alembic/versions/7b307e847cbd_merge_heads.py +23 -0
- package/app/backend/alembic/versions/82ecd978cc70_add_missing_indexes.py +48 -0
- package/app/backend/alembic/versions/8ef5054dc280_add_normalized_log_entries.py +173 -0
- package/app/backend/alembic/versions/8f3e2bd8ea3b_merge_migration_heads.py +23 -0
- package/app/backend/alembic/versions/9d17f0698d3b_add_config_column_to_boards_table.py +30 -0
- package/app/backend/alembic/versions/add_agent_conversation_history.py +72 -0
- package/app/backend/alembic/versions/add_job_variant.py +34 -0
- package/app/backend/alembic/versions/add_performance_indexes.py +95 -0
- package/app/backend/alembic/versions/add_repos_and_board_repos.py +174 -0
- package/app/backend/alembic/versions/add_session_id_to_jobs.py +27 -0
- package/app/backend/alembic/versions/add_sqlite_backend_tables.py +104 -0
- package/app/backend/alembic/versions/b10fb0b62240_add_diff_content_to_revisions.py +34 -0
- package/app/backend/alembic.ini +89 -0
- package/app/backend/app/__init__.py +3 -0
- package/app/backend/app/data_dir.py +85 -0
- package/app/backend/app/database.py +70 -0
- package/app/backend/app/database_sync.py +64 -0
- package/app/backend/app/dependencies/__init__.py +5 -0
- package/app/backend/app/dependencies/auth.py +80 -0
- package/app/backend/app/dependencies.py +43 -0
- package/app/backend/app/exceptions.py +178 -0
- package/app/backend/app/executors/__init__.py +1 -0
- package/app/backend/app/executors/adapters/__init__.py +1 -0
- package/app/backend/app/executors/adapters/aider.py +152 -0
- package/app/backend/app/executors/adapters/amazon_q.py +103 -0
- package/app/backend/app/executors/adapters/amp.py +123 -0
- package/app/backend/app/executors/adapters/claude.py +177 -0
- package/app/backend/app/executors/adapters/cline.py +127 -0
- package/app/backend/app/executors/adapters/codex.py +167 -0
- package/app/backend/app/executors/adapters/copilot.py +202 -0
- package/app/backend/app/executors/adapters/cursor.py +87 -0
- package/app/backend/app/executors/adapters/droid.py +123 -0
- package/app/backend/app/executors/adapters/gemini.py +132 -0
- package/app/backend/app/executors/adapters/goose.py +131 -0
- package/app/backend/app/executors/adapters/opencode.py +123 -0
- package/app/backend/app/executors/adapters/qwen.py +123 -0
- package/app/backend/app/executors/plugins/__init__.py +1 -0
- package/app/backend/app/executors/registry.py +202 -0
- package/app/backend/app/executors/spec.py +226 -0
- package/app/backend/app/main.py +486 -0
- package/app/backend/app/middleware/__init__.py +13 -0
- package/app/backend/app/middleware/idempotency.py +426 -0
- package/app/backend/app/middleware/rate_limit.py +312 -0
- package/app/backend/app/middleware/security_headers.py +43 -0
- package/app/backend/app/middleware/timeout.py +37 -0
- package/app/backend/app/models/__init__.py +56 -0
- package/app/backend/app/models/agent_conversation_history.py +56 -0
- package/app/backend/app/models/agent_session.py +127 -0
- package/app/backend/app/models/analysis_cache.py +49 -0
- package/app/backend/app/models/base.py +9 -0
- package/app/backend/app/models/board.py +79 -0
- package/app/backend/app/models/board_repo.py +68 -0
- package/app/backend/app/models/cost_budget.py +42 -0
- package/app/backend/app/models/enums.py +40 -0
- package/app/backend/app/models/evidence.py +132 -0
- package/app/backend/app/models/goal.py +102 -0
- package/app/backend/app/models/idempotency_entry.py +30 -0
- package/app/backend/app/models/job.py +163 -0
- package/app/backend/app/models/job_queue.py +39 -0
- package/app/backend/app/models/kv_store.py +28 -0
- package/app/backend/app/models/merge_checklist.py +87 -0
- package/app/backend/app/models/normalized_log.py +100 -0
- package/app/backend/app/models/planner_lock.py +43 -0
- package/app/backend/app/models/rate_limit_entry.py +25 -0
- package/app/backend/app/models/repo.py +66 -0
- package/app/backend/app/models/review_comment.py +91 -0
- package/app/backend/app/models/review_summary.py +69 -0
- package/app/backend/app/models/revision.py +130 -0
- package/app/backend/app/models/ticket.py +223 -0
- package/app/backend/app/models/ticket_event.py +83 -0
- package/app/backend/app/models/user.py +47 -0
- package/app/backend/app/models/workspace.py +71 -0
- package/app/backend/app/redis_client.py +119 -0
- package/app/backend/app/routers/__init__.py +29 -0
- package/app/backend/app/routers/agents.py +296 -0
- package/app/backend/app/routers/auth.py +94 -0
- package/app/backend/app/routers/board.py +885 -0
- package/app/backend/app/routers/dashboard.py +351 -0
- package/app/backend/app/routers/debug.py +528 -0
- package/app/backend/app/routers/evidence.py +96 -0
- package/app/backend/app/routers/executors.py +324 -0
- package/app/backend/app/routers/goals.py +574 -0
- package/app/backend/app/routers/jobs.py +448 -0
- package/app/backend/app/routers/maintenance.py +172 -0
- package/app/backend/app/routers/merge.py +360 -0
- package/app/backend/app/routers/planner.py +537 -0
- package/app/backend/app/routers/pull_requests.py +382 -0
- package/app/backend/app/routers/repos.py +263 -0
- package/app/backend/app/routers/revisions.py +939 -0
- package/app/backend/app/routers/settings.py +267 -0
- package/app/backend/app/routers/tickets.py +2003 -0
- package/app/backend/app/routers/webhooks.py +143 -0
- package/app/backend/app/routers/websocket.py +249 -0
- package/app/backend/app/schemas/__init__.py +109 -0
- package/app/backend/app/schemas/board.py +87 -0
- package/app/backend/app/schemas/common.py +33 -0
- package/app/backend/app/schemas/evidence.py +87 -0
- package/app/backend/app/schemas/goal.py +90 -0
- package/app/backend/app/schemas/job.py +97 -0
- package/app/backend/app/schemas/merge.py +139 -0
- package/app/backend/app/schemas/planner.py +500 -0
- package/app/backend/app/schemas/repo.py +187 -0
- package/app/backend/app/schemas/review.py +137 -0
- package/app/backend/app/schemas/revision.py +114 -0
- package/app/backend/app/schemas/ticket.py +238 -0
- package/app/backend/app/schemas/ticket_event.py +72 -0
- package/app/backend/app/schemas/workspace.py +19 -0
- package/app/backend/app/services/__init__.py +31 -0
- package/app/backend/app/services/agent_memory_service.py +223 -0
- package/app/backend/app/services/agent_registry.py +346 -0
- package/app/backend/app/services/agent_session_manager.py +318 -0
- package/app/backend/app/services/agent_session_service.py +219 -0
- package/app/backend/app/services/agent_tools.py +379 -0
- package/app/backend/app/services/auth_service.py +98 -0
- package/app/backend/app/services/autonomy_service.py +380 -0
- package/app/backend/app/services/board_repo_service.py +201 -0
- package/app/backend/app/services/board_service.py +326 -0
- package/app/backend/app/services/cleanup_service.py +1085 -0
- package/app/backend/app/services/config_service.py +908 -0
- package/app/backend/app/services/context_gatherer.py +557 -0
- package/app/backend/app/services/cost_tracking_service.py +293 -0
- package/app/backend/app/services/cursor_log_normalizer.py +536 -0
- package/app/backend/app/services/delivery_pipeline.py +440 -0
- package/app/backend/app/services/executor_service.py +634 -0
- package/app/backend/app/services/git_host/__init__.py +11 -0
- package/app/backend/app/services/git_host/factory.py +87 -0
- package/app/backend/app/services/git_host/github.py +270 -0
- package/app/backend/app/services/git_host/gitlab.py +194 -0
- package/app/backend/app/services/git_host/protocol.py +75 -0
- package/app/backend/app/services/git_merge_simple.py +346 -0
- package/app/backend/app/services/git_ops.py +384 -0
- package/app/backend/app/services/github_service.py +233 -0
- package/app/backend/app/services/goal_service.py +113 -0
- package/app/backend/app/services/job_service.py +423 -0
- package/app/backend/app/services/job_watchdog_service.py +424 -0
- package/app/backend/app/services/langchain_adapter.py +122 -0
- package/app/backend/app/services/llm_provider_clients.py +351 -0
- package/app/backend/app/services/llm_service.py +285 -0
- package/app/backend/app/services/log_normalizer.py +342 -0
- package/app/backend/app/services/log_stream_service.py +276 -0
- package/app/backend/app/services/merge_checklist_service.py +264 -0
- package/app/backend/app/services/merge_service.py +784 -0
- package/app/backend/app/services/orchestrator_log.py +84 -0
- package/app/backend/app/services/planner_service.py +1662 -0
- package/app/backend/app/services/planner_tick_sync.py +1040 -0
- package/app/backend/app/services/queued_message_service.py +156 -0
- package/app/backend/app/services/reliability_wrapper.py +389 -0
- package/app/backend/app/services/repo_discovery_service.py +318 -0
- package/app/backend/app/services/review_service.py +334 -0
- package/app/backend/app/services/revision_service.py +389 -0
- package/app/backend/app/services/safe_autopilot.py +510 -0
- package/app/backend/app/services/sqlite_worker.py +372 -0
- package/app/backend/app/services/task_dispatch.py +135 -0
- package/app/backend/app/services/ticket_generation_service.py +1781 -0
- package/app/backend/app/services/ticket_service.py +486 -0
- package/app/backend/app/services/udar_planner_service.py +1007 -0
- package/app/backend/app/services/webhook_service.py +126 -0
- package/app/backend/app/services/workspace_service.py +465 -0
- package/app/backend/app/services/worktree_file_service.py +92 -0
- package/app/backend/app/services/worktree_validator.py +213 -0
- package/app/backend/app/sqlite_kv.py +278 -0
- package/app/backend/app/state_machine.py +128 -0
- package/app/backend/app/templates/__init__.py +5 -0
- package/app/backend/app/templates/registry.py +243 -0
- package/app/backend/app/utils/__init__.py +5 -0
- package/app/backend/app/utils/artifact_reader.py +87 -0
- package/app/backend/app/utils/circuit_breaker.py +229 -0
- package/app/backend/app/utils/db_retry.py +136 -0
- package/app/backend/app/utils/ignored_fields.py +123 -0
- package/app/backend/app/utils/validators.py +54 -0
- package/app/backend/app/websocket/__init__.py +5 -0
- package/app/backend/app/websocket/manager.py +179 -0
- package/app/backend/app/websocket/state_tracker.py +113 -0
- package/app/backend/app/worker.py +3190 -0
- package/app/backend/calculator_tickets.json +40 -0
- package/app/backend/canary_tests.sh +591 -0
- package/app/backend/celerybeat-schedule +0 -0
- package/app/backend/celerybeat-schedule-shm +0 -0
- package/app/backend/celerybeat-schedule-wal +0 -0
- package/app/backend/logs/.gitkeep +3 -0
- package/app/backend/multiplication_division_implementation_tickets.json +55 -0
- package/app/backend/multiplication_division_tickets.json +42 -0
- package/app/backend/pyproject.toml +45 -0
- package/app/backend/requirements-dev.txt +8 -0
- package/app/backend/requirements.txt +20 -0
- package/app/backend/run.sh +30 -0
- package/app/backend/run_with_logs.sh +10 -0
- package/app/backend/scientific_calculator_tickets.json +40 -0
- package/app/backend/scripts/extract_openapi.py +21 -0
- package/app/backend/scripts/seed_demo.py +187 -0
- package/app/backend/setup_demo_review.py +302 -0
- package/app/backend/test_actual_parse.py +41 -0
- package/app/backend/test_agent_streaming.py +61 -0
- package/app/backend/test_parse.py +51 -0
- package/app/backend/test_streaming.py +51 -0
- package/app/backend/test_subprocess_streaming.py +50 -0
- package/app/backend/tests/__init__.py +1 -0
- package/app/backend/tests/conftest.py +46 -0
- package/app/backend/tests/test_auth.py +341 -0
- package/app/backend/tests/test_autonomy_service.py +391 -0
- package/app/backend/tests/test_cleanup_service_safety.py +417 -0
- package/app/backend/tests/test_middleware.py +279 -0
- package/app/backend/tests/test_planner_providers.py +290 -0
- package/app/backend/tests/test_planner_unblock.py +183 -0
- package/app/backend/tests/test_revision_invariants.py +618 -0
- package/app/backend/tests/test_sqlite_kv.py +290 -0
- package/app/backend/tests/test_sqlite_worker.py +353 -0
- package/app/backend/tests/test_task_dispatch.py +100 -0
- package/app/backend/tests/test_ticket_validation.py +304 -0
- package/app/backend/tests/test_udar_agent.py +693 -0
- package/app/backend/tests/test_webhook_service.py +184 -0
- package/app/backend/tickets_output.json +59 -0
- package/app/backend/user_management_tickets.json +50 -0
- package/app/backend/uvicorn.log +0 -0
- package/app/draft.yaml +313 -0
- package/app/frontend/dist/assets/index-LcjCczu5.js +155 -0
- package/app/frontend/dist/assets/index-_FP_279e.css +1 -0
- package/app/frontend/dist/index.html +14 -0
- package/app/frontend/dist/vite.svg +1 -0
- package/app/frontend/package.json +101 -0
- package/bin/cli.js +527 -0
- package/package.json +37 -0
|
@@ -0,0 +1,908 @@
|
|
|
1
|
+
"""Service for reading and parsing draft.yaml configuration."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from enum import StrEnum
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
import yaml
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def deep_merge_dicts(base: dict, override: dict) -> dict:
|
|
16
|
+
"""Deep merge two dictionaries.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
base: Base dictionary
|
|
20
|
+
override: Dictionary with override values
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
Merged dictionary (new dict, doesn't modify inputs)
|
|
24
|
+
|
|
25
|
+
Example:
|
|
26
|
+
>>> base = {"a": 1, "b": {"c": 2}}
|
|
27
|
+
>>> override = {"b": {"d": 3}}
|
|
28
|
+
>>> deep_merge_dicts(base, override)
|
|
29
|
+
{"a": 1, "b": {"c": 2, "d": 3}}
|
|
30
|
+
"""
|
|
31
|
+
result = base.copy()
|
|
32
|
+
for key, value in override.items():
|
|
33
|
+
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
|
34
|
+
result[key] = deep_merge_dicts(result[key], value)
|
|
35
|
+
else:
|
|
36
|
+
result[key] = value
|
|
37
|
+
return result
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class YoloStatus(StrEnum):
|
|
41
|
+
"""Result of YOLO mode check."""
|
|
42
|
+
|
|
43
|
+
DISABLED = "disabled" # yolo_mode: false
|
|
44
|
+
ALLOWED = "allowed" # yolo_mode: true AND repo in allowlist
|
|
45
|
+
REFUSED = "refused" # yolo_mode: true BUT allowlist empty or repo not in list
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@dataclass
|
|
49
|
+
class ProjectConfig:
|
|
50
|
+
"""Project-level configuration."""
|
|
51
|
+
|
|
52
|
+
repo_root: str = "." # Path to repo root (resolved to absolute at runtime)
|
|
53
|
+
|
|
54
|
+
@classmethod
|
|
55
|
+
def from_dict(cls, data: dict[str, Any]) -> "ProjectConfig":
|
|
56
|
+
"""Create a config instance from a dictionary."""
|
|
57
|
+
return cls(
|
|
58
|
+
repo_root=data.get("repo_root", "."),
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
def get_absolute_repo_root(self, config_dir: Path) -> Path:
|
|
62
|
+
"""Resolve repo_root to an absolute path relative to config file location."""
|
|
63
|
+
root = Path(self.repo_root)
|
|
64
|
+
if root.is_absolute():
|
|
65
|
+
return root
|
|
66
|
+
return (config_dir / root).resolve()
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@dataclass
|
|
70
|
+
class ExecuteConfig:
|
|
71
|
+
"""Configuration for execute jobs.
|
|
72
|
+
|
|
73
|
+
YOLO Mode Safety:
|
|
74
|
+
YOLO mode (--dangerously-skip-permissions) is ONLY allowed when:
|
|
75
|
+
1. yolo_mode: true in config
|
|
76
|
+
2. yolo_allowlist is NON-EMPTY
|
|
77
|
+
3. The worktree path is in the allowlist
|
|
78
|
+
|
|
79
|
+
If yolo_mode is true but allowlist is empty, execution REFUSES and
|
|
80
|
+
transitions to needs_human. This prevents accidental YOLO.
|
|
81
|
+
|
|
82
|
+
Default is yolo_mode: false (permissioned mode).
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
timeout: int = 600 # seconds (default 10 minutes)
|
|
86
|
+
preferred_executor: str = "claude" # "claude" (headless) or "cursor" (interactive)
|
|
87
|
+
executor_model: str | None = None # Optional model override for executor
|
|
88
|
+
max_parallel_jobs: int = 1 # Max concurrent execute jobs (1 = sequential)
|
|
89
|
+
yolo_mode: bool = False # DANGEROUS: skip permissions prompts (opt-in only)
|
|
90
|
+
yolo_allowlist: list[str] = field(
|
|
91
|
+
default_factory=list
|
|
92
|
+
) # REQUIRED when yolo_mode=true
|
|
93
|
+
|
|
94
|
+
@classmethod
|
|
95
|
+
def from_dict(cls, data: dict[str, Any]) -> "ExecuteConfig":
|
|
96
|
+
"""Create a config instance from a dictionary."""
|
|
97
|
+
return cls(
|
|
98
|
+
timeout=data.get("timeout", 600),
|
|
99
|
+
preferred_executor=data.get("preferred_executor", "claude"),
|
|
100
|
+
executor_model=data.get("executor_model"),
|
|
101
|
+
max_parallel_jobs=max(1, data.get("max_parallel_jobs", 1)),
|
|
102
|
+
yolo_mode=data.get("yolo_mode", False),
|
|
103
|
+
yolo_allowlist=data.get("yolo_allowlist") or [],
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
def check_yolo_status(
|
|
107
|
+
self, worktree_path: str, repo_root: str | None = None
|
|
108
|
+
) -> YoloStatus:
|
|
109
|
+
"""Check YOLO mode status for a given worktree.
|
|
110
|
+
|
|
111
|
+
Safety Policy:
|
|
112
|
+
- If yolo_mode is False → DISABLED (use permissioned mode)
|
|
113
|
+
- If yolo_mode is True but allowlist is empty → REFUSED (refuse to run)
|
|
114
|
+
- If yolo_mode is True and repo_root in allowlist → ALLOWED
|
|
115
|
+
- If yolo_mode is True but repo_root not in allowlist → REFUSED
|
|
116
|
+
|
|
117
|
+
Path Matching:
|
|
118
|
+
- All paths are resolved to absolute canonical paths (symlinks resolved)
|
|
119
|
+
- Allowlist entries can be the repo root OR a parent directory
|
|
120
|
+
- Worktree must be a descendant of an allowlisted path
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
worktree_path: Path to the worktree
|
|
124
|
+
repo_root: Path to the main repo root (if different from worktree parent)
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
YoloStatus indicating whether YOLO mode should be used
|
|
128
|
+
"""
|
|
129
|
+
if not self.yolo_mode:
|
|
130
|
+
return YoloStatus.DISABLED
|
|
131
|
+
|
|
132
|
+
# CRITICAL: Empty allowlist + yolo_mode=true → REFUSE
|
|
133
|
+
# This prevents "I turned on YOLO and forgot to set allowlist"
|
|
134
|
+
if not self.yolo_allowlist:
|
|
135
|
+
return YoloStatus.REFUSED
|
|
136
|
+
|
|
137
|
+
# Resolve to canonical absolute paths (follows symlinks)
|
|
138
|
+
# Use realpath for symlink resolution, then resolve for normalization
|
|
139
|
+
worktree_canonical = os.path.realpath(worktree_path)
|
|
140
|
+
|
|
141
|
+
# If repo_root is provided, use it; otherwise derive from worktree path
|
|
142
|
+
# (worktrees are typically under {repo_root}/.draft/worktrees/)
|
|
143
|
+
if repo_root:
|
|
144
|
+
check_path = os.path.realpath(repo_root)
|
|
145
|
+
else:
|
|
146
|
+
check_path = worktree_canonical
|
|
147
|
+
|
|
148
|
+
# Check if the path (or repo root) is under any allowlisted path
|
|
149
|
+
for allowed_path in self.yolo_allowlist:
|
|
150
|
+
allowed_canonical = os.path.realpath(allowed_path)
|
|
151
|
+
|
|
152
|
+
# Exact match
|
|
153
|
+
if check_path == allowed_canonical:
|
|
154
|
+
return YoloStatus.ALLOWED
|
|
155
|
+
|
|
156
|
+
# Check if check_path is a descendant of allowed_canonical
|
|
157
|
+
# Use os.path.commonpath to safely determine ancestry
|
|
158
|
+
try:
|
|
159
|
+
common = os.path.commonpath([check_path, allowed_canonical])
|
|
160
|
+
if common == allowed_canonical:
|
|
161
|
+
return YoloStatus.ALLOWED
|
|
162
|
+
except ValueError:
|
|
163
|
+
# Different drives on Windows, no common path
|
|
164
|
+
continue
|
|
165
|
+
|
|
166
|
+
return YoloStatus.REFUSED
|
|
167
|
+
|
|
168
|
+
def get_yolo_refusal_reason(self, repo_root: str | None = None) -> str:
|
|
169
|
+
"""Get a human-readable reason for YOLO refusal.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
repo_root: The repo root path to include in the message
|
|
173
|
+
"""
|
|
174
|
+
if not self.yolo_allowlist:
|
|
175
|
+
return (
|
|
176
|
+
"YOLO mode enabled but yolo_allowlist is empty. "
|
|
177
|
+
"For safety, you must explicitly list trusted repo paths in yolo_allowlist. "
|
|
178
|
+
"Refusing to run with --dangerously-skip-permissions."
|
|
179
|
+
)
|
|
180
|
+
msg = "YOLO mode enabled but this repo is not in yolo_allowlist. "
|
|
181
|
+
if repo_root:
|
|
182
|
+
msg += f"Repo root: {os.path.realpath(repo_root)}. "
|
|
183
|
+
msg += f"Allowlist: {[os.path.realpath(p) for p in self.yolo_allowlist]}. "
|
|
184
|
+
msg += "Add this path to yolo_allowlist if you trust it."
|
|
185
|
+
return msg
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
@dataclass
|
|
189
|
+
class VerifyConfig:
|
|
190
|
+
"""Configuration for verify jobs.
|
|
191
|
+
|
|
192
|
+
Note: After verification passes, tickets always transition to 'needs_human'
|
|
193
|
+
for user review. Only when the user approves the revision does it move to 'done'.
|
|
194
|
+
The on_success field is kept for backwards compatibility but is ignored.
|
|
195
|
+
"""
|
|
196
|
+
|
|
197
|
+
commands: list[str] = field(default_factory=list)
|
|
198
|
+
on_success: str = "needs_human" # DEPRECATED: Always "needs_human", kept for backwards compatibility
|
|
199
|
+
on_failure: str = "blocked" # "blocked" (only option for now)
|
|
200
|
+
extra_allowed_commands: list[str] = field(default_factory=list)
|
|
201
|
+
|
|
202
|
+
@classmethod
|
|
203
|
+
def from_dict(cls, data: dict[str, Any]) -> "VerifyConfig":
|
|
204
|
+
"""Create a config instance from a dictionary."""
|
|
205
|
+
return cls(
|
|
206
|
+
commands=data.get("commands") or [],
|
|
207
|
+
on_success="needs_human", # Always needs_human - user must approve to move to done
|
|
208
|
+
on_failure=data.get("on_failure", "blocked"),
|
|
209
|
+
extra_allowed_commands=data.get("extra_allowed_commands") or [],
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
@dataclass
|
|
214
|
+
class CleanupConfig:
|
|
215
|
+
"""Configuration for cleanup policy.
|
|
216
|
+
|
|
217
|
+
Controls automatic cleanup of worktrees and evidence files.
|
|
218
|
+
"""
|
|
219
|
+
|
|
220
|
+
auto_cleanup_on_merge: bool = True # Delete worktree after successful merge
|
|
221
|
+
worktree_ttl_days: int = 14 # Delete worktrees older than this
|
|
222
|
+
evidence_ttl_days: int = 30 # Delete evidence files older than this
|
|
223
|
+
max_worktrees: int = 50 # Maximum number of active worktrees
|
|
224
|
+
|
|
225
|
+
@classmethod
|
|
226
|
+
def from_dict(cls, data: dict[str, Any]) -> "CleanupConfig":
|
|
227
|
+
"""Create a config instance from a dictionary."""
|
|
228
|
+
return cls(
|
|
229
|
+
auto_cleanup_on_merge=data.get("auto_cleanup_on_merge", True),
|
|
230
|
+
worktree_ttl_days=data.get("worktree_ttl_days", 14),
|
|
231
|
+
evidence_ttl_days=data.get("evidence_ttl_days", 30),
|
|
232
|
+
max_worktrees=data.get("max_worktrees", 50),
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
@dataclass
|
|
237
|
+
class MergeConfig:
|
|
238
|
+
"""Configuration for merge operations."""
|
|
239
|
+
|
|
240
|
+
default_strategy: str = "merge" # "merge" or "rebase"
|
|
241
|
+
pull_before_merge: bool = True # git pull --ff-only before merge
|
|
242
|
+
delete_branch_after_merge: bool = True # Delete branch after merge
|
|
243
|
+
require_pull_success: bool = True # If pull fails, abort merge (safer default)
|
|
244
|
+
push_after_merge: bool = False # Push target branch to remote after merge
|
|
245
|
+
squash_merge: bool = True # Squash commits into single commit
|
|
246
|
+
check_divergence: bool = True # Check if target branch moved ahead
|
|
247
|
+
|
|
248
|
+
@classmethod
|
|
249
|
+
def from_dict(cls, data: dict[str, Any]) -> "MergeConfig":
|
|
250
|
+
"""Create a config instance from a dictionary."""
|
|
251
|
+
return cls(
|
|
252
|
+
default_strategy=data.get("default_strategy", "merge"),
|
|
253
|
+
pull_before_merge=data.get("pull_before_merge", True),
|
|
254
|
+
delete_branch_after_merge=data.get("delete_branch_after_merge", True),
|
|
255
|
+
require_pull_success=data.get("require_pull_success", True),
|
|
256
|
+
push_after_merge=data.get("push_after_merge", False),
|
|
257
|
+
squash_merge=data.get("squash_merge", True),
|
|
258
|
+
check_divergence=data.get("check_divergence", True),
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
@dataclass
|
|
263
|
+
class AutonomyConfig:
|
|
264
|
+
"""Configuration for full autonomy mode safety rails."""
|
|
265
|
+
|
|
266
|
+
max_diff_lines: int = 500
|
|
267
|
+
sensitive_file_patterns: list[str] = field(
|
|
268
|
+
default_factory=lambda: [
|
|
269
|
+
"**/.env*",
|
|
270
|
+
"**/*.pem",
|
|
271
|
+
"**/*.key",
|
|
272
|
+
"**/secrets/**",
|
|
273
|
+
"**/credentials*",
|
|
274
|
+
]
|
|
275
|
+
)
|
|
276
|
+
require_verification_pass: bool = True
|
|
277
|
+
|
|
278
|
+
@classmethod
|
|
279
|
+
def from_dict(cls, data: dict[str, Any]) -> "AutonomyConfig":
|
|
280
|
+
"""Create a config instance from a dictionary."""
|
|
281
|
+
default_patterns = [
|
|
282
|
+
"**/.env*",
|
|
283
|
+
"**/*.pem",
|
|
284
|
+
"**/*.key",
|
|
285
|
+
"**/secrets/**",
|
|
286
|
+
"**/credentials*",
|
|
287
|
+
]
|
|
288
|
+
return cls(
|
|
289
|
+
max_diff_lines=data.get("max_diff_lines", 500),
|
|
290
|
+
sensitive_file_patterns=data.get("sensitive_file_patterns")
|
|
291
|
+
or default_patterns,
|
|
292
|
+
require_verification_pass=data.get("require_verification_pass", True),
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
@dataclass
|
|
297
|
+
class PlannerFeaturesConfig:
|
|
298
|
+
"""Feature flags for the planner."""
|
|
299
|
+
|
|
300
|
+
auto_execute: bool = False
|
|
301
|
+
propose_followups: bool = True
|
|
302
|
+
generate_reflections: bool = True
|
|
303
|
+
validate_tickets: bool = False
|
|
304
|
+
|
|
305
|
+
@classmethod
|
|
306
|
+
def from_dict(cls, data: dict[str, Any]) -> "PlannerFeaturesConfig":
|
|
307
|
+
"""Create a config instance from a dictionary."""
|
|
308
|
+
return cls(
|
|
309
|
+
auto_execute=data.get("auto_execute", False),
|
|
310
|
+
propose_followups=data.get("propose_followups", True),
|
|
311
|
+
generate_reflections=data.get("generate_reflections", True),
|
|
312
|
+
validate_tickets=data.get("validate_tickets", False),
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
@dataclass
|
|
317
|
+
class UDARConfig:
|
|
318
|
+
"""Configuration for UDAR (Understand-Decide-Act-Validate-Review) agent.
|
|
319
|
+
|
|
320
|
+
UDAR is a lean agent architecture for adaptive ticket generation with
|
|
321
|
+
minimal LLM usage (1-2 calls per goal).
|
|
322
|
+
|
|
323
|
+
Phase 5 adds production hardening: error handling, timeouts, fallback behavior.
|
|
324
|
+
"""
|
|
325
|
+
|
|
326
|
+
enabled: bool = False
|
|
327
|
+
enable_incremental_replanning: bool = False
|
|
328
|
+
max_self_correction_iterations: int = 1
|
|
329
|
+
enable_llm_validation: bool = False
|
|
330
|
+
|
|
331
|
+
# Incremental replanning settings (Phase 3)
|
|
332
|
+
replan_batch_size: int = 5
|
|
333
|
+
replan_significance_threshold: int = 10
|
|
334
|
+
replan_max_frequency_minutes: int = 30
|
|
335
|
+
|
|
336
|
+
# Production hardening settings (Phase 5)
|
|
337
|
+
fallback_to_legacy: bool = True # Fallback to legacy on UDAR errors
|
|
338
|
+
timeout_seconds: int = 120 # Timeout for UDAR agent execution
|
|
339
|
+
enable_cost_tracking: bool = True # Track LLM costs in AgentSession
|
|
340
|
+
max_retries_on_error: int = 0 # Retry UDAR on transient errors (0 = no retry)
|
|
341
|
+
|
|
342
|
+
@classmethod
|
|
343
|
+
def from_dict(cls, data: dict[str, Any]) -> "UDARConfig":
|
|
344
|
+
"""Create a config instance from a dictionary."""
|
|
345
|
+
return cls(
|
|
346
|
+
enabled=data.get("enabled", False),
|
|
347
|
+
enable_incremental_replanning=data.get(
|
|
348
|
+
"enable_incremental_replanning", False
|
|
349
|
+
),
|
|
350
|
+
max_self_correction_iterations=data.get(
|
|
351
|
+
"max_self_correction_iterations", 1
|
|
352
|
+
),
|
|
353
|
+
enable_llm_validation=data.get("enable_llm_validation", False),
|
|
354
|
+
replan_batch_size=data.get("replan_batch_size", 5),
|
|
355
|
+
replan_significance_threshold=data.get("replan_significance_threshold", 10),
|
|
356
|
+
replan_max_frequency_minutes=data.get("replan_max_frequency_minutes", 30),
|
|
357
|
+
# Phase 5 settings
|
|
358
|
+
fallback_to_legacy=data.get("fallback_to_legacy", True),
|
|
359
|
+
timeout_seconds=data.get("timeout_seconds", 120),
|
|
360
|
+
enable_cost_tracking=data.get("enable_cost_tracking", True),
|
|
361
|
+
max_retries_on_error=data.get("max_retries_on_error", 0),
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
@dataclass
|
|
366
|
+
class PlannerConfig:
|
|
367
|
+
"""Configuration for the AI planner.
|
|
368
|
+
|
|
369
|
+
The planner automates workflow decisions:
|
|
370
|
+
- Picks next ticket to execute (deterministic)
|
|
371
|
+
- Proposes follow-up tickets for blocked items (LLM)
|
|
372
|
+
- Generates reflection summaries for done tickets (LLM)
|
|
373
|
+
- Generates tickets from goals using agent CLI
|
|
374
|
+
|
|
375
|
+
Safety caps prevent runaway follow-up generation:
|
|
376
|
+
- max_followups_per_ticket: Max follow-ups for any single blocked ticket
|
|
377
|
+
- max_followups_per_tick: Max follow-ups created in one tick
|
|
378
|
+
- skip_followup_reasons: Blocker reasons that should NOT trigger follow-ups
|
|
379
|
+
"""
|
|
380
|
+
|
|
381
|
+
model: str = "cli/claude"
|
|
382
|
+
max_tokens_reflection: int = 300
|
|
383
|
+
max_tokens_followup: int = 500
|
|
384
|
+
timeout: int = 30
|
|
385
|
+
features: PlannerFeaturesConfig = field(default_factory=PlannerFeaturesConfig)
|
|
386
|
+
udar: UDARConfig = field(default_factory=UDARConfig)
|
|
387
|
+
|
|
388
|
+
# Agent path for ticket generation (cursor-agent or claude CLI)
|
|
389
|
+
# Auto-detected from PATH; set full path to override
|
|
390
|
+
agent_path: str = "claude"
|
|
391
|
+
|
|
392
|
+
# Follow-up caps to prevent spam
|
|
393
|
+
max_followups_per_ticket: int = 2 # Total follow-ups for any blocked ticket
|
|
394
|
+
max_followups_per_tick: int = 3 # Max follow-ups created in one tick
|
|
395
|
+
|
|
396
|
+
# Blocker reasons that should NOT trigger follow-ups
|
|
397
|
+
# These are typically prompt/requirements issues, not new tickets
|
|
398
|
+
skip_followup_reasons: list[str] = field(
|
|
399
|
+
default_factory=lambda: [
|
|
400
|
+
"no changes produced",
|
|
401
|
+
"no changes",
|
|
402
|
+
"empty diff",
|
|
403
|
+
]
|
|
404
|
+
)
|
|
405
|
+
|
|
406
|
+
def get_agent_path(self) -> str:
|
|
407
|
+
"""Get the expanded agent path."""
|
|
408
|
+
return os.path.expanduser(self.agent_path)
|
|
409
|
+
|
|
410
|
+
@classmethod
|
|
411
|
+
def from_dict(cls, data: dict[str, Any]) -> "PlannerConfig":
|
|
412
|
+
"""Create a config instance from a dictionary."""
|
|
413
|
+
features_data = data.get("features", {})
|
|
414
|
+
features = (
|
|
415
|
+
PlannerFeaturesConfig.from_dict(features_data)
|
|
416
|
+
if features_data
|
|
417
|
+
else PlannerFeaturesConfig()
|
|
418
|
+
)
|
|
419
|
+
|
|
420
|
+
udar_data = data.get("udar", {})
|
|
421
|
+
udar = UDARConfig.from_dict(udar_data) if udar_data else UDARConfig()
|
|
422
|
+
|
|
423
|
+
default_skip_reasons = ["no changes produced", "no changes", "empty diff"]
|
|
424
|
+
|
|
425
|
+
return cls(
|
|
426
|
+
model=data.get("model", "cli/claude"),
|
|
427
|
+
max_tokens_reflection=data.get("max_tokens_reflection", 300),
|
|
428
|
+
max_tokens_followup=data.get("max_tokens_followup", 500),
|
|
429
|
+
timeout=data.get("timeout", 30),
|
|
430
|
+
features=features,
|
|
431
|
+
udar=udar,
|
|
432
|
+
agent_path=data.get("agent_path", "claude"),
|
|
433
|
+
max_followups_per_ticket=data.get("max_followups_per_ticket", 2),
|
|
434
|
+
max_followups_per_tick=data.get("max_followups_per_tick", 3),
|
|
435
|
+
skip_followup_reasons=data.get("skip_followup_reasons")
|
|
436
|
+
or default_skip_reasons,
|
|
437
|
+
)
|
|
438
|
+
|
|
439
|
+
|
|
440
|
+
@dataclass
|
|
441
|
+
class ExecutorProfile:
|
|
442
|
+
"""A named executor profile with configurable settings.
|
|
443
|
+
|
|
444
|
+
Profiles allow per-executor overrides in draft.yaml:
|
|
445
|
+
|
|
446
|
+
executor_profiles:
|
|
447
|
+
fast:
|
|
448
|
+
executor_type: claude
|
|
449
|
+
timeout: 300
|
|
450
|
+
extra_flags: ["--model", "claude-sonnet-4-5-20250929"]
|
|
451
|
+
thorough:
|
|
452
|
+
executor_type: claude
|
|
453
|
+
timeout: 1200
|
|
454
|
+
extra_flags: ["--model", "claude-opus-4-6"]
|
|
455
|
+
"""
|
|
456
|
+
|
|
457
|
+
name: str
|
|
458
|
+
executor_type: str = "claude"
|
|
459
|
+
timeout: int = 600
|
|
460
|
+
extra_flags: list[str] = field(default_factory=list)
|
|
461
|
+
model: str | None = None
|
|
462
|
+
env: dict[str, str] = field(default_factory=dict)
|
|
463
|
+
|
|
464
|
+
@classmethod
|
|
465
|
+
def from_dict(cls, name: str, data: dict[str, Any]) -> "ExecutorProfile":
|
|
466
|
+
"""Create a profile from a dictionary."""
|
|
467
|
+
return cls(
|
|
468
|
+
name=name,
|
|
469
|
+
executor_type=data.get("executor_type", "claude"),
|
|
470
|
+
timeout=data.get("timeout", 600),
|
|
471
|
+
extra_flags=data.get("extra_flags") or [],
|
|
472
|
+
model=data.get("model"),
|
|
473
|
+
env=data.get("env") or {},
|
|
474
|
+
)
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
def _dataclass_to_dict(obj: Any) -> Any:
|
|
478
|
+
"""Recursively convert a dataclass to a dict, handling nested dataclasses."""
|
|
479
|
+
from dataclasses import fields, is_dataclass
|
|
480
|
+
|
|
481
|
+
if is_dataclass(obj) and not isinstance(obj, type):
|
|
482
|
+
result = {}
|
|
483
|
+
for f in fields(obj):
|
|
484
|
+
value = getattr(obj, f.name)
|
|
485
|
+
result[f.name] = _dataclass_to_dict(value)
|
|
486
|
+
return result
|
|
487
|
+
elif isinstance(obj, dict):
|
|
488
|
+
return {k: _dataclass_to_dict(v) for k, v in obj.items()}
|
|
489
|
+
elif isinstance(obj, (list, tuple)):
|
|
490
|
+
return [_dataclass_to_dict(v) for v in obj]
|
|
491
|
+
return obj
|
|
492
|
+
|
|
493
|
+
|
|
494
|
+
@dataclass
|
|
495
|
+
class DraftConfig:
|
|
496
|
+
"""Root configuration for Draft.
|
|
497
|
+
|
|
498
|
+
Structure:
|
|
499
|
+
project:
|
|
500
|
+
repo_root: "."
|
|
501
|
+
|
|
502
|
+
execute_config:
|
|
503
|
+
timeout: 600
|
|
504
|
+
preferred_executor: "claude"
|
|
505
|
+
yolo_mode: false
|
|
506
|
+
yolo_allowlist: []
|
|
507
|
+
|
|
508
|
+
verify_config:
|
|
509
|
+
commands: [...]
|
|
510
|
+
on_success: "needs_human"
|
|
511
|
+
on_failure: "blocked"
|
|
512
|
+
|
|
513
|
+
planner_config:
|
|
514
|
+
model: "gpt-4o-mini"
|
|
515
|
+
max_tokens_reflection: 300
|
|
516
|
+
max_tokens_followup: 500
|
|
517
|
+
timeout: 30
|
|
518
|
+
features:
|
|
519
|
+
auto_execute: true
|
|
520
|
+
propose_followups: true
|
|
521
|
+
generate_reflections: true
|
|
522
|
+
|
|
523
|
+
cleanup_config:
|
|
524
|
+
auto_cleanup_on_merge: true
|
|
525
|
+
worktree_ttl_days: 14
|
|
526
|
+
evidence_ttl_days: 30
|
|
527
|
+
max_worktrees: 50
|
|
528
|
+
|
|
529
|
+
merge_config:
|
|
530
|
+
default_strategy: "merge"
|
|
531
|
+
pull_before_merge: true
|
|
532
|
+
delete_branch_after_merge: true
|
|
533
|
+
|
|
534
|
+
Legacy Support:
|
|
535
|
+
For backwards compatibility, also supports:
|
|
536
|
+
- verify_commands (top-level) → verify_config.commands
|
|
537
|
+
- auto_transition_on_success → verify_config.on_success
|
|
538
|
+
"""
|
|
539
|
+
|
|
540
|
+
project: ProjectConfig = field(default_factory=ProjectConfig)
|
|
541
|
+
execute_config: ExecuteConfig = field(default_factory=ExecuteConfig)
|
|
542
|
+
verify_config: VerifyConfig = field(default_factory=VerifyConfig)
|
|
543
|
+
planner_config: PlannerConfig = field(default_factory=PlannerConfig)
|
|
544
|
+
cleanup_config: CleanupConfig = field(default_factory=CleanupConfig)
|
|
545
|
+
merge_config: MergeConfig = field(default_factory=MergeConfig)
|
|
546
|
+
autonomy_config: AutonomyConfig = field(default_factory=AutonomyConfig)
|
|
547
|
+
executor_profiles: dict[str, ExecutorProfile] = field(default_factory=dict)
|
|
548
|
+
|
|
549
|
+
@classmethod
|
|
550
|
+
def from_dict(cls, data: dict[str, Any]) -> "DraftConfig":
|
|
551
|
+
"""Create a config instance from a dictionary."""
|
|
552
|
+
# Parse project config
|
|
553
|
+
project_data = data.get("project", {})
|
|
554
|
+
project = (
|
|
555
|
+
ProjectConfig.from_dict(project_data) if project_data else ProjectConfig()
|
|
556
|
+
)
|
|
557
|
+
|
|
558
|
+
# Parse execute config
|
|
559
|
+
execute_data = data.get("execute_config", {})
|
|
560
|
+
execute_config = (
|
|
561
|
+
ExecuteConfig.from_dict(execute_data) if execute_data else ExecuteConfig()
|
|
562
|
+
)
|
|
563
|
+
|
|
564
|
+
# Parse verify config (with legacy fallbacks)
|
|
565
|
+
verify_data = data.get("verify_config", {})
|
|
566
|
+
if verify_data:
|
|
567
|
+
verify_config = VerifyConfig.from_dict(verify_data)
|
|
568
|
+
else:
|
|
569
|
+
# Legacy support: top-level verify_commands and auto_transition_on_success
|
|
570
|
+
legacy_commands = data.get("verify_commands", [])
|
|
571
|
+
legacy_auto = data.get("auto_transition_on_success", False)
|
|
572
|
+
verify_config = VerifyConfig(
|
|
573
|
+
commands=legacy_commands,
|
|
574
|
+
on_success="done" if legacy_auto else "needs_human",
|
|
575
|
+
on_failure="blocked",
|
|
576
|
+
)
|
|
577
|
+
|
|
578
|
+
# Parse planner config
|
|
579
|
+
planner_data = data.get("planner_config", {})
|
|
580
|
+
planner_config = (
|
|
581
|
+
PlannerConfig.from_dict(planner_data) if planner_data else PlannerConfig()
|
|
582
|
+
)
|
|
583
|
+
|
|
584
|
+
# Parse cleanup config
|
|
585
|
+
cleanup_data = data.get("cleanup_config", {})
|
|
586
|
+
cleanup_config = (
|
|
587
|
+
CleanupConfig.from_dict(cleanup_data) if cleanup_data else CleanupConfig()
|
|
588
|
+
)
|
|
589
|
+
|
|
590
|
+
# Parse merge config
|
|
591
|
+
merge_data = data.get("merge_config", {})
|
|
592
|
+
merge_config = (
|
|
593
|
+
MergeConfig.from_dict(merge_data) if merge_data else MergeConfig()
|
|
594
|
+
)
|
|
595
|
+
|
|
596
|
+
# Parse autonomy config
|
|
597
|
+
autonomy_data = data.get("autonomy_config", {})
|
|
598
|
+
autonomy_config = (
|
|
599
|
+
AutonomyConfig.from_dict(autonomy_data)
|
|
600
|
+
if autonomy_data
|
|
601
|
+
else AutonomyConfig()
|
|
602
|
+
)
|
|
603
|
+
|
|
604
|
+
# Parse executor profiles
|
|
605
|
+
profiles_data = data.get("executor_profiles", {})
|
|
606
|
+
executor_profiles = {}
|
|
607
|
+
if isinstance(profiles_data, dict):
|
|
608
|
+
for profile_name, profile_data in profiles_data.items():
|
|
609
|
+
if isinstance(profile_data, dict):
|
|
610
|
+
executor_profiles[profile_name] = ExecutorProfile.from_dict(
|
|
611
|
+
profile_name, profile_data
|
|
612
|
+
)
|
|
613
|
+
|
|
614
|
+
return cls(
|
|
615
|
+
project=project,
|
|
616
|
+
execute_config=execute_config,
|
|
617
|
+
verify_config=verify_config,
|
|
618
|
+
planner_config=planner_config,
|
|
619
|
+
cleanup_config=cleanup_config,
|
|
620
|
+
merge_config=merge_config,
|
|
621
|
+
autonomy_config=autonomy_config,
|
|
622
|
+
executor_profiles=executor_profiles,
|
|
623
|
+
)
|
|
624
|
+
|
|
625
|
+
def to_dict(self) -> dict[str, Any]:
|
|
626
|
+
"""Convert the config to a plain dict suitable for JSON storage.
|
|
627
|
+
|
|
628
|
+
Handles nested dataclasses (ExecutorProfile, PlannerConfig, etc.)
|
|
629
|
+
by recursively converting them.
|
|
630
|
+
"""
|
|
631
|
+
return _dataclass_to_dict(self)
|
|
632
|
+
|
|
633
|
+
@classmethod
|
|
634
|
+
def from_board_config(cls, board_config: dict[str, Any] | None) -> "DraftConfig":
|
|
635
|
+
"""Create a DraftConfig from a board's config dict.
|
|
636
|
+
|
|
637
|
+
This is the primary way to load config at runtime - directly from
|
|
638
|
+
the board's DB-stored config, without reading any YAML file.
|
|
639
|
+
|
|
640
|
+
Args:
|
|
641
|
+
board_config: The board.config JSON dict, or None for defaults.
|
|
642
|
+
|
|
643
|
+
Returns:
|
|
644
|
+
DraftConfig with all sections populated.
|
|
645
|
+
"""
|
|
646
|
+
if not board_config:
|
|
647
|
+
return cls()
|
|
648
|
+
return cls.from_dict(board_config)
|
|
649
|
+
|
|
650
|
+
# Convenience properties for backwards compatibility
|
|
651
|
+
@property
|
|
652
|
+
def verify_commands(self) -> list[str]:
|
|
653
|
+
"""Get verification commands (legacy accessor)."""
|
|
654
|
+
return self.verify_config.commands
|
|
655
|
+
|
|
656
|
+
@property
|
|
657
|
+
def auto_transition_on_success(self) -> bool:
|
|
658
|
+
"""Get auto-transition setting (legacy accessor).
|
|
659
|
+
|
|
660
|
+
DEPRECATED: Always returns False. Tickets must be approved by user
|
|
661
|
+
to transition from needs_human to done.
|
|
662
|
+
"""
|
|
663
|
+
return False
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
class ConfigService:
|
|
667
|
+
"""Service for reading and parsing draft.yaml configuration."""
|
|
668
|
+
|
|
669
|
+
CONFIG_FILENAME = "draft.yaml"
|
|
670
|
+
_cache: dict[str, DraftConfig] = {}
|
|
671
|
+
|
|
672
|
+
def __init__(self, repo_path: Path | str | None = None):
|
|
673
|
+
"""
|
|
674
|
+
Initialize the config service.
|
|
675
|
+
|
|
676
|
+
Args:
|
|
677
|
+
repo_path: Path to the git repository root.
|
|
678
|
+
If None, uses GIT_REPO_PATH env var or current directory.
|
|
679
|
+
"""
|
|
680
|
+
if repo_path is None:
|
|
681
|
+
repo_path = os.environ.get("GIT_REPO_PATH", ".")
|
|
682
|
+
self.repo_path = Path(repo_path)
|
|
683
|
+
|
|
684
|
+
# If config file not found at repo_path, try to find it by walking up
|
|
685
|
+
# to the git repo root (handles CWD being a subdirectory like backend/)
|
|
686
|
+
if not (self.repo_path / self.CONFIG_FILENAME).exists():
|
|
687
|
+
try:
|
|
688
|
+
import subprocess
|
|
689
|
+
|
|
690
|
+
git_root = subprocess.run(
|
|
691
|
+
["git", "rev-parse", "--show-toplevel"],
|
|
692
|
+
capture_output=True,
|
|
693
|
+
text=True,
|
|
694
|
+
timeout=5,
|
|
695
|
+
cwd=str(self.repo_path),
|
|
696
|
+
).stdout.strip()
|
|
697
|
+
if git_root and (Path(git_root) / self.CONFIG_FILENAME).exists():
|
|
698
|
+
self.repo_path = Path(git_root)
|
|
699
|
+
except Exception:
|
|
700
|
+
pass
|
|
701
|
+
|
|
702
|
+
@property
|
|
703
|
+
def config_path(self) -> Path:
|
|
704
|
+
"""Get the path to the config file."""
|
|
705
|
+
return self.repo_path / self.CONFIG_FILENAME
|
|
706
|
+
|
|
707
|
+
def load_config(self, use_cache: bool = False) -> DraftConfig:
|
|
708
|
+
"""
|
|
709
|
+
Load and parse the draft.yaml configuration.
|
|
710
|
+
|
|
711
|
+
Args:
|
|
712
|
+
use_cache: Whether to use cached config if available (default: False for dev).
|
|
713
|
+
|
|
714
|
+
Returns:
|
|
715
|
+
DraftConfig instance with parsed configuration.
|
|
716
|
+
Returns default config if file doesn't exist or is invalid.
|
|
717
|
+
"""
|
|
718
|
+
return self._load_config_from_file()
|
|
719
|
+
|
|
720
|
+
def _load_config_from_file(self) -> DraftConfig:
|
|
721
|
+
"""Load config from file, returning defaults if not found or invalid."""
|
|
722
|
+
if not self.config_path.exists():
|
|
723
|
+
return DraftConfig()
|
|
724
|
+
|
|
725
|
+
try:
|
|
726
|
+
with open(self.config_path) as f:
|
|
727
|
+
data = yaml.safe_load(f)
|
|
728
|
+
|
|
729
|
+
if data is None:
|
|
730
|
+
return DraftConfig()
|
|
731
|
+
|
|
732
|
+
if not isinstance(data, dict):
|
|
733
|
+
logger.warning(
|
|
734
|
+
"Config file %s has invalid format (expected dict, got %s); "
|
|
735
|
+
"using default configuration",
|
|
736
|
+
self.config_path,
|
|
737
|
+
type(data).__name__,
|
|
738
|
+
)
|
|
739
|
+
return DraftConfig()
|
|
740
|
+
|
|
741
|
+
return DraftConfig.from_dict(data)
|
|
742
|
+
|
|
743
|
+
except yaml.YAMLError as e:
|
|
744
|
+
logger.warning(
|
|
745
|
+
"Failed to parse config file %s: %s; using default configuration",
|
|
746
|
+
self.config_path,
|
|
747
|
+
e,
|
|
748
|
+
)
|
|
749
|
+
return DraftConfig()
|
|
750
|
+
except OSError as e:
|
|
751
|
+
logger.warning(
|
|
752
|
+
"Failed to read config file %s: %s; using default configuration",
|
|
753
|
+
self.config_path,
|
|
754
|
+
e,
|
|
755
|
+
)
|
|
756
|
+
return DraftConfig()
|
|
757
|
+
|
|
758
|
+
def load_config_with_board_overrides(
|
|
759
|
+
self,
|
|
760
|
+
board_config: dict[str, Any] | None = None,
|
|
761
|
+
use_cache: bool = False,
|
|
762
|
+
) -> DraftConfig:
|
|
763
|
+
"""Load config from file and apply board-level overrides.
|
|
764
|
+
|
|
765
|
+
Args:
|
|
766
|
+
board_config: Optional dict of board-level config overrides.
|
|
767
|
+
Keys match draft.yaml sections (e.g. execute_config, planner_config).
|
|
768
|
+
use_cache: Whether to use cached config.
|
|
769
|
+
|
|
770
|
+
Returns:
|
|
771
|
+
DraftConfig with board overrides merged in.
|
|
772
|
+
"""
|
|
773
|
+
config = self.load_config(use_cache=use_cache)
|
|
774
|
+
|
|
775
|
+
if not board_config:
|
|
776
|
+
return config
|
|
777
|
+
|
|
778
|
+
# Merge board overrides into the loaded config
|
|
779
|
+
if "execute_config" in board_config and isinstance(
|
|
780
|
+
board_config["execute_config"], dict
|
|
781
|
+
):
|
|
782
|
+
ec = board_config["execute_config"]
|
|
783
|
+
if "timeout" in ec:
|
|
784
|
+
config.execute_config.timeout = ec["timeout"]
|
|
785
|
+
if "preferred_executor" in ec:
|
|
786
|
+
config.execute_config.preferred_executor = ec["preferred_executor"]
|
|
787
|
+
if "yolo_mode" in ec:
|
|
788
|
+
config.execute_config.yolo_mode = ec["yolo_mode"]
|
|
789
|
+
|
|
790
|
+
if "planner_config" in board_config and isinstance(
|
|
791
|
+
board_config["planner_config"], dict
|
|
792
|
+
):
|
|
793
|
+
pc = board_config["planner_config"]
|
|
794
|
+
if "model" in pc:
|
|
795
|
+
config.planner_config.model = pc["model"]
|
|
796
|
+
if "agent_path" in pc:
|
|
797
|
+
config.planner_config.agent_path = pc["agent_path"]
|
|
798
|
+
if "timeout" in pc:
|
|
799
|
+
config.planner_config.timeout = pc["timeout"]
|
|
800
|
+
|
|
801
|
+
if "verify_config" in board_config and isinstance(
|
|
802
|
+
board_config["verify_config"], dict
|
|
803
|
+
):
|
|
804
|
+
vc = board_config["verify_config"]
|
|
805
|
+
if "commands" in vc:
|
|
806
|
+
config.verify_config.commands = vc["commands"]
|
|
807
|
+
|
|
808
|
+
return config
|
|
809
|
+
|
|
810
|
+
def clear_cache(self) -> None:
|
|
811
|
+
"""Clear the configuration cache."""
|
|
812
|
+
self._cache.clear()
|
|
813
|
+
|
|
814
|
+
# Convenience methods
|
|
815
|
+
def get_verify_commands(self) -> list[str]:
|
|
816
|
+
"""Get the list of verification commands."""
|
|
817
|
+
return self.load_config().verify_commands
|
|
818
|
+
|
|
819
|
+
def get_verify_on_success(self) -> str:
|
|
820
|
+
"""Get the target state when verification succeeds.
|
|
821
|
+
|
|
822
|
+
Always returns 'needs_human' - user must approve to move to done.
|
|
823
|
+
"""
|
|
824
|
+
return "needs_human"
|
|
825
|
+
|
|
826
|
+
def get_execute_config(self) -> ExecuteConfig:
|
|
827
|
+
"""Get the execute configuration."""
|
|
828
|
+
return self.load_config().execute_config
|
|
829
|
+
|
|
830
|
+
def get_execute_timeout(self) -> int:
|
|
831
|
+
"""Get the execute job timeout in seconds."""
|
|
832
|
+
return self.load_config().execute_config.timeout
|
|
833
|
+
|
|
834
|
+
def get_preferred_executor(self) -> str:
|
|
835
|
+
"""Get the preferred executor CLI (cursor or claude)."""
|
|
836
|
+
return self.load_config().execute_config.preferred_executor
|
|
837
|
+
|
|
838
|
+
def get_repo_root(self) -> Path:
|
|
839
|
+
"""Get the absolute repo root path."""
|
|
840
|
+
config = self.load_config()
|
|
841
|
+
return config.project.get_absolute_repo_root(self.repo_path)
|
|
842
|
+
|
|
843
|
+
def get_planner_config(self) -> PlannerConfig:
|
|
844
|
+
"""Get the planner configuration."""
|
|
845
|
+
return self.load_config().planner_config
|
|
846
|
+
|
|
847
|
+
def get_cleanup_config(self) -> CleanupConfig:
|
|
848
|
+
"""Get the cleanup configuration."""
|
|
849
|
+
return self.load_config().cleanup_config
|
|
850
|
+
|
|
851
|
+
def get_merge_config(self) -> MergeConfig:
|
|
852
|
+
"""Get the merge configuration."""
|
|
853
|
+
return self.load_config().merge_config
|
|
854
|
+
|
|
855
|
+
def get_autonomy_config(self) -> AutonomyConfig:
|
|
856
|
+
"""Get the autonomy configuration."""
|
|
857
|
+
return self.load_config().autonomy_config
|
|
858
|
+
|
|
859
|
+
def get_executor_profiles(self) -> dict[str, ExecutorProfile]:
|
|
860
|
+
"""Get all configured executor profiles."""
|
|
861
|
+
return self.load_config().executor_profiles
|
|
862
|
+
|
|
863
|
+
def get_executor_profile(self, name: str) -> ExecutorProfile | None:
|
|
864
|
+
"""Get a specific executor profile by name."""
|
|
865
|
+
return self.load_config().executor_profiles.get(name)
|
|
866
|
+
|
|
867
|
+
def save_executor_profiles(
|
|
868
|
+
self, profiles: list[dict[str, Any]]
|
|
869
|
+
) -> dict[str, ExecutorProfile]:
|
|
870
|
+
"""Save executor profiles to draft.yaml.
|
|
871
|
+
|
|
872
|
+
Reads the existing YAML, updates only the executor_profiles section,
|
|
873
|
+
and writes back. Preserves all other config and comments where possible.
|
|
874
|
+
"""
|
|
875
|
+
config_path = self.config_path
|
|
876
|
+
|
|
877
|
+
# Load existing YAML as raw dict (preserves structure)
|
|
878
|
+
data: dict[str, Any] = {}
|
|
879
|
+
if config_path.exists():
|
|
880
|
+
with open(config_path) as f:
|
|
881
|
+
data = yaml.safe_load(f) or {}
|
|
882
|
+
|
|
883
|
+
# Build profiles dict
|
|
884
|
+
profiles_dict: dict[str, Any] = {}
|
|
885
|
+
for p in profiles:
|
|
886
|
+
name = p.get("name", "").strip()
|
|
887
|
+
if not name:
|
|
888
|
+
continue
|
|
889
|
+
entry: dict[str, Any] = {}
|
|
890
|
+
if p.get("executor_type"):
|
|
891
|
+
entry["executor_type"] = p["executor_type"]
|
|
892
|
+
if p.get("timeout"):
|
|
893
|
+
entry["timeout"] = int(p["timeout"])
|
|
894
|
+
if p.get("extra_flags"):
|
|
895
|
+
entry["extra_flags"] = p["extra_flags"]
|
|
896
|
+
if p.get("model"):
|
|
897
|
+
entry["model"] = p["model"]
|
|
898
|
+
if p.get("env"):
|
|
899
|
+
entry["env"] = p["env"]
|
|
900
|
+
profiles_dict[name] = entry
|
|
901
|
+
|
|
902
|
+
data["executor_profiles"] = profiles_dict
|
|
903
|
+
|
|
904
|
+
with open(config_path, "w") as f:
|
|
905
|
+
yaml.dump(data, f, default_flow_style=False, sort_keys=False)
|
|
906
|
+
|
|
907
|
+
self.clear_cache()
|
|
908
|
+
return self.get_executor_profiles()
|