draft-board 0.1.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/app/backend/.env.example +9 -0
- package/app/backend/.smartkanban/evidence/8b383839-cbec-45af-86ee-c7708d075cbe/bddf2ed5-2e21-4d46-a62b-10b87f1642a6_patch.txt +195 -0
- package/app/backend/.smartkanban/evidence/8b383839-cbec-45af-86ee-c7708d075cbe/bddf2ed5-2e21-4d46-a62b-10b87f1642a6_stat.txt +6 -0
- package/app/backend/CURL_EXAMPLES.md +335 -0
- package/app/backend/ENV_SETUP.md +65 -0
- package/app/backend/alembic/env.py +71 -0
- package/app/backend/alembic/script.py.mako +28 -0
- package/app/backend/alembic/versions/001_initial_schema.py +104 -0
- package/app/backend/alembic/versions/002_add_jobs_table.py +52 -0
- package/app/backend/alembic/versions/003_add_workspace_table.py +48 -0
- package/app/backend/alembic/versions/004_add_evidence_table.py +56 -0
- package/app/backend/alembic/versions/005_add_verification_commands.py +32 -0
- package/app/backend/alembic/versions/006_add_planner_lock_table.py +39 -0
- package/app/backend/alembic/versions/007_add_revision_review_tables.py +126 -0
- package/app/backend/alembic/versions/008_add_revision_idempotency_and_traceability.py +52 -0
- package/app/backend/alembic/versions/009_add_job_health_fields.py +46 -0
- package/app/backend/alembic/versions/010_add_review_comment_line_content.py +36 -0
- package/app/backend/alembic/versions/011_add_analysis_cache.py +47 -0
- package/app/backend/alembic/versions/012_add_boards_table.py +102 -0
- package/app/backend/alembic/versions/013_add_ticket_blocking.py +45 -0
- package/app/backend/alembic/versions/014_add_agent_sessions.py +220 -0
- package/app/backend/alembic/versions/015_add_ticket_sort_order.py +33 -0
- package/app/backend/alembic/versions/03220f0b93ae_add_pr_fields_to_ticket.py +49 -0
- package/app/backend/alembic/versions/0c2d89fff3b1_seed_board_configs_from_yaml.py +206 -0
- package/app/backend/alembic/versions/3348e5cf54c1_add_merge_checklist_table.py +67 -0
- package/app/backend/alembic/versions/357c780ee445_add_goal_status.py +34 -0
- package/app/backend/alembic/versions/553340b7e26c_add_autonomy_fields_to_goal.py +65 -0
- package/app/backend/alembic/versions/774dc335c679_merge_migration_heads.py +23 -0
- package/app/backend/alembic/versions/7b307e847cbd_merge_heads.py +23 -0
- package/app/backend/alembic/versions/82ecd978cc70_add_missing_indexes.py +48 -0
- package/app/backend/alembic/versions/8ef5054dc280_add_normalized_log_entries.py +173 -0
- package/app/backend/alembic/versions/8f3e2bd8ea3b_merge_migration_heads.py +23 -0
- package/app/backend/alembic/versions/9d17f0698d3b_add_config_column_to_boards_table.py +30 -0
- package/app/backend/alembic/versions/add_agent_conversation_history.py +72 -0
- package/app/backend/alembic/versions/add_job_variant.py +34 -0
- package/app/backend/alembic/versions/add_performance_indexes.py +95 -0
- package/app/backend/alembic/versions/add_repos_and_board_repos.py +174 -0
- package/app/backend/alembic/versions/add_session_id_to_jobs.py +27 -0
- package/app/backend/alembic/versions/add_sqlite_backend_tables.py +104 -0
- package/app/backend/alembic/versions/b10fb0b62240_add_diff_content_to_revisions.py +34 -0
- package/app/backend/alembic.ini +89 -0
- package/app/backend/app/__init__.py +3 -0
- package/app/backend/app/data_dir.py +85 -0
- package/app/backend/app/database.py +70 -0
- package/app/backend/app/database_sync.py +64 -0
- package/app/backend/app/dependencies/__init__.py +5 -0
- package/app/backend/app/dependencies/auth.py +80 -0
- package/app/backend/app/dependencies.py +43 -0
- package/app/backend/app/exceptions.py +178 -0
- package/app/backend/app/executors/__init__.py +1 -0
- package/app/backend/app/executors/adapters/__init__.py +1 -0
- package/app/backend/app/executors/adapters/aider.py +152 -0
- package/app/backend/app/executors/adapters/amazon_q.py +103 -0
- package/app/backend/app/executors/adapters/amp.py +123 -0
- package/app/backend/app/executors/adapters/claude.py +177 -0
- package/app/backend/app/executors/adapters/cline.py +127 -0
- package/app/backend/app/executors/adapters/codex.py +167 -0
- package/app/backend/app/executors/adapters/copilot.py +202 -0
- package/app/backend/app/executors/adapters/cursor.py +87 -0
- package/app/backend/app/executors/adapters/droid.py +123 -0
- package/app/backend/app/executors/adapters/gemini.py +132 -0
- package/app/backend/app/executors/adapters/goose.py +131 -0
- package/app/backend/app/executors/adapters/opencode.py +123 -0
- package/app/backend/app/executors/adapters/qwen.py +123 -0
- package/app/backend/app/executors/plugins/__init__.py +1 -0
- package/app/backend/app/executors/registry.py +202 -0
- package/app/backend/app/executors/spec.py +226 -0
- package/app/backend/app/main.py +486 -0
- package/app/backend/app/middleware/__init__.py +13 -0
- package/app/backend/app/middleware/idempotency.py +426 -0
- package/app/backend/app/middleware/rate_limit.py +312 -0
- package/app/backend/app/middleware/security_headers.py +43 -0
- package/app/backend/app/middleware/timeout.py +37 -0
- package/app/backend/app/models/__init__.py +56 -0
- package/app/backend/app/models/agent_conversation_history.py +56 -0
- package/app/backend/app/models/agent_session.py +127 -0
- package/app/backend/app/models/analysis_cache.py +49 -0
- package/app/backend/app/models/base.py +9 -0
- package/app/backend/app/models/board.py +79 -0
- package/app/backend/app/models/board_repo.py +68 -0
- package/app/backend/app/models/cost_budget.py +42 -0
- package/app/backend/app/models/enums.py +40 -0
- package/app/backend/app/models/evidence.py +132 -0
- package/app/backend/app/models/goal.py +102 -0
- package/app/backend/app/models/idempotency_entry.py +30 -0
- package/app/backend/app/models/job.py +163 -0
- package/app/backend/app/models/job_queue.py +39 -0
- package/app/backend/app/models/kv_store.py +28 -0
- package/app/backend/app/models/merge_checklist.py +87 -0
- package/app/backend/app/models/normalized_log.py +100 -0
- package/app/backend/app/models/planner_lock.py +43 -0
- package/app/backend/app/models/rate_limit_entry.py +25 -0
- package/app/backend/app/models/repo.py +66 -0
- package/app/backend/app/models/review_comment.py +91 -0
- package/app/backend/app/models/review_summary.py +69 -0
- package/app/backend/app/models/revision.py +130 -0
- package/app/backend/app/models/ticket.py +223 -0
- package/app/backend/app/models/ticket_event.py +83 -0
- package/app/backend/app/models/user.py +47 -0
- package/app/backend/app/models/workspace.py +71 -0
- package/app/backend/app/redis_client.py +119 -0
- package/app/backend/app/routers/__init__.py +29 -0
- package/app/backend/app/routers/agents.py +296 -0
- package/app/backend/app/routers/auth.py +94 -0
- package/app/backend/app/routers/board.py +885 -0
- package/app/backend/app/routers/dashboard.py +351 -0
- package/app/backend/app/routers/debug.py +528 -0
- package/app/backend/app/routers/evidence.py +96 -0
- package/app/backend/app/routers/executors.py +324 -0
- package/app/backend/app/routers/goals.py +574 -0
- package/app/backend/app/routers/jobs.py +448 -0
- package/app/backend/app/routers/maintenance.py +172 -0
- package/app/backend/app/routers/merge.py +360 -0
- package/app/backend/app/routers/planner.py +537 -0
- package/app/backend/app/routers/pull_requests.py +382 -0
- package/app/backend/app/routers/repos.py +263 -0
- package/app/backend/app/routers/revisions.py +939 -0
- package/app/backend/app/routers/settings.py +267 -0
- package/app/backend/app/routers/tickets.py +2003 -0
- package/app/backend/app/routers/webhooks.py +143 -0
- package/app/backend/app/routers/websocket.py +249 -0
- package/app/backend/app/schemas/__init__.py +109 -0
- package/app/backend/app/schemas/board.py +87 -0
- package/app/backend/app/schemas/common.py +33 -0
- package/app/backend/app/schemas/evidence.py +87 -0
- package/app/backend/app/schemas/goal.py +90 -0
- package/app/backend/app/schemas/job.py +97 -0
- package/app/backend/app/schemas/merge.py +139 -0
- package/app/backend/app/schemas/planner.py +500 -0
- package/app/backend/app/schemas/repo.py +187 -0
- package/app/backend/app/schemas/review.py +137 -0
- package/app/backend/app/schemas/revision.py +114 -0
- package/app/backend/app/schemas/ticket.py +238 -0
- package/app/backend/app/schemas/ticket_event.py +72 -0
- package/app/backend/app/schemas/workspace.py +19 -0
- package/app/backend/app/services/__init__.py +31 -0
- package/app/backend/app/services/agent_memory_service.py +223 -0
- package/app/backend/app/services/agent_registry.py +346 -0
- package/app/backend/app/services/agent_session_manager.py +318 -0
- package/app/backend/app/services/agent_session_service.py +219 -0
- package/app/backend/app/services/agent_tools.py +379 -0
- package/app/backend/app/services/auth_service.py +98 -0
- package/app/backend/app/services/autonomy_service.py +380 -0
- package/app/backend/app/services/board_repo_service.py +201 -0
- package/app/backend/app/services/board_service.py +326 -0
- package/app/backend/app/services/cleanup_service.py +1085 -0
- package/app/backend/app/services/config_service.py +908 -0
- package/app/backend/app/services/context_gatherer.py +557 -0
- package/app/backend/app/services/cost_tracking_service.py +293 -0
- package/app/backend/app/services/cursor_log_normalizer.py +536 -0
- package/app/backend/app/services/delivery_pipeline.py +440 -0
- package/app/backend/app/services/executor_service.py +634 -0
- package/app/backend/app/services/git_host/__init__.py +11 -0
- package/app/backend/app/services/git_host/factory.py +87 -0
- package/app/backend/app/services/git_host/github.py +270 -0
- package/app/backend/app/services/git_host/gitlab.py +194 -0
- package/app/backend/app/services/git_host/protocol.py +75 -0
- package/app/backend/app/services/git_merge_simple.py +346 -0
- package/app/backend/app/services/git_ops.py +384 -0
- package/app/backend/app/services/github_service.py +233 -0
- package/app/backend/app/services/goal_service.py +113 -0
- package/app/backend/app/services/job_service.py +423 -0
- package/app/backend/app/services/job_watchdog_service.py +424 -0
- package/app/backend/app/services/langchain_adapter.py +122 -0
- package/app/backend/app/services/llm_provider_clients.py +351 -0
- package/app/backend/app/services/llm_service.py +285 -0
- package/app/backend/app/services/log_normalizer.py +342 -0
- package/app/backend/app/services/log_stream_service.py +276 -0
- package/app/backend/app/services/merge_checklist_service.py +264 -0
- package/app/backend/app/services/merge_service.py +784 -0
- package/app/backend/app/services/orchestrator_log.py +84 -0
- package/app/backend/app/services/planner_service.py +1662 -0
- package/app/backend/app/services/planner_tick_sync.py +1040 -0
- package/app/backend/app/services/queued_message_service.py +156 -0
- package/app/backend/app/services/reliability_wrapper.py +389 -0
- package/app/backend/app/services/repo_discovery_service.py +318 -0
- package/app/backend/app/services/review_service.py +334 -0
- package/app/backend/app/services/revision_service.py +389 -0
- package/app/backend/app/services/safe_autopilot.py +510 -0
- package/app/backend/app/services/sqlite_worker.py +372 -0
- package/app/backend/app/services/task_dispatch.py +135 -0
- package/app/backend/app/services/ticket_generation_service.py +1781 -0
- package/app/backend/app/services/ticket_service.py +486 -0
- package/app/backend/app/services/udar_planner_service.py +1007 -0
- package/app/backend/app/services/webhook_service.py +126 -0
- package/app/backend/app/services/workspace_service.py +465 -0
- package/app/backend/app/services/worktree_file_service.py +92 -0
- package/app/backend/app/services/worktree_validator.py +213 -0
- package/app/backend/app/sqlite_kv.py +278 -0
- package/app/backend/app/state_machine.py +128 -0
- package/app/backend/app/templates/__init__.py +5 -0
- package/app/backend/app/templates/registry.py +243 -0
- package/app/backend/app/utils/__init__.py +5 -0
- package/app/backend/app/utils/artifact_reader.py +87 -0
- package/app/backend/app/utils/circuit_breaker.py +229 -0
- package/app/backend/app/utils/db_retry.py +136 -0
- package/app/backend/app/utils/ignored_fields.py +123 -0
- package/app/backend/app/utils/validators.py +54 -0
- package/app/backend/app/websocket/__init__.py +5 -0
- package/app/backend/app/websocket/manager.py +179 -0
- package/app/backend/app/websocket/state_tracker.py +113 -0
- package/app/backend/app/worker.py +3190 -0
- package/app/backend/calculator_tickets.json +40 -0
- package/app/backend/canary_tests.sh +591 -0
- package/app/backend/celerybeat-schedule +0 -0
- package/app/backend/celerybeat-schedule-shm +0 -0
- package/app/backend/celerybeat-schedule-wal +0 -0
- package/app/backend/logs/.gitkeep +3 -0
- package/app/backend/multiplication_division_implementation_tickets.json +55 -0
- package/app/backend/multiplication_division_tickets.json +42 -0
- package/app/backend/pyproject.toml +45 -0
- package/app/backend/requirements-dev.txt +8 -0
- package/app/backend/requirements.txt +20 -0
- package/app/backend/run.sh +30 -0
- package/app/backend/run_with_logs.sh +10 -0
- package/app/backend/scientific_calculator_tickets.json +40 -0
- package/app/backend/scripts/extract_openapi.py +21 -0
- package/app/backend/scripts/seed_demo.py +187 -0
- package/app/backend/setup_demo_review.py +302 -0
- package/app/backend/test_actual_parse.py +41 -0
- package/app/backend/test_agent_streaming.py +61 -0
- package/app/backend/test_parse.py +51 -0
- package/app/backend/test_streaming.py +51 -0
- package/app/backend/test_subprocess_streaming.py +50 -0
- package/app/backend/tests/__init__.py +1 -0
- package/app/backend/tests/conftest.py +46 -0
- package/app/backend/tests/test_auth.py +341 -0
- package/app/backend/tests/test_autonomy_service.py +391 -0
- package/app/backend/tests/test_cleanup_service_safety.py +417 -0
- package/app/backend/tests/test_middleware.py +279 -0
- package/app/backend/tests/test_planner_providers.py +290 -0
- package/app/backend/tests/test_planner_unblock.py +183 -0
- package/app/backend/tests/test_revision_invariants.py +618 -0
- package/app/backend/tests/test_sqlite_kv.py +290 -0
- package/app/backend/tests/test_sqlite_worker.py +353 -0
- package/app/backend/tests/test_task_dispatch.py +100 -0
- package/app/backend/tests/test_ticket_validation.py +304 -0
- package/app/backend/tests/test_udar_agent.py +693 -0
- package/app/backend/tests/test_webhook_service.py +184 -0
- package/app/backend/tickets_output.json +59 -0
- package/app/backend/user_management_tickets.json +50 -0
- package/app/backend/uvicorn.log +0 -0
- package/app/draft.yaml +313 -0
- package/app/frontend/dist/assets/index-LcjCczu5.js +155 -0
- package/app/frontend/dist/assets/index-_FP_279e.css +1 -0
- package/app/frontend/dist/index.html +14 -0
- package/app/frontend/dist/vite.svg +1 -0
- package/app/frontend/package.json +101 -0
- package/bin/cli.js +527 -0
- package/package.json +37 -0
|
@@ -0,0 +1,372 @@
|
|
|
1
|
+
"""In-process job runner backed by SQLite job_queue table.
|
|
2
|
+
|
|
3
|
+
In-process job runner using a ThreadPoolExecutor(max_workers=1).
|
|
4
|
+
Also runs periodic tasks (watchdog, planner tick, PR polling).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
import sqlite3
|
|
10
|
+
import threading
|
|
11
|
+
import time
|
|
12
|
+
import uuid
|
|
13
|
+
from collections.abc import Callable
|
|
14
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
15
|
+
|
|
16
|
+
from app.sqlite_kv import _DB_PATH
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
# Worker identity for claiming tasks
|
|
21
|
+
_WORKER_ID = f"sqlite-worker-{uuid.uuid4().hex[:8]}"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class SQLiteWorker:
|
|
25
|
+
"""In-process job runner backed by SQLite job_queue table."""
|
|
26
|
+
|
|
27
|
+
def __init__(self, poll_interval: float = 0.5, max_workers: int = 1):
|
|
28
|
+
self.poll_interval = poll_interval
|
|
29
|
+
self.max_workers = max_workers
|
|
30
|
+
self._executor = ThreadPoolExecutor(max_workers=max_workers)
|
|
31
|
+
self._periodic_executor = ThreadPoolExecutor(
|
|
32
|
+
max_workers=1, thread_name_prefix="sk-periodic"
|
|
33
|
+
)
|
|
34
|
+
self._tasks: dict[str, Callable] = {}
|
|
35
|
+
self._periodic_tasks: list[tuple[str, float, Callable]] = []
|
|
36
|
+
self._running = False
|
|
37
|
+
self._poll_thread: threading.Thread | None = None
|
|
38
|
+
self._scheduler_thread: threading.Thread | None = None
|
|
39
|
+
self._stop_event = threading.Event()
|
|
40
|
+
|
|
41
|
+
def register_task(self, name: str, func: Callable) -> None:
|
|
42
|
+
"""Register a task function by name."""
|
|
43
|
+
self._tasks[name] = func
|
|
44
|
+
logger.debug(f"Registered task: {name}")
|
|
45
|
+
|
|
46
|
+
def register_periodic(self, name: str, interval: float, func: Callable) -> None:
|
|
47
|
+
"""Register a periodic task (replaces Celery Beat)."""
|
|
48
|
+
self._periodic_tasks.append((name, interval, func))
|
|
49
|
+
logger.debug(f"Registered periodic task: {name} (every {interval}s)")
|
|
50
|
+
|
|
51
|
+
def start(self) -> None:
|
|
52
|
+
"""Start the worker daemon threads."""
|
|
53
|
+
if self._running:
|
|
54
|
+
return
|
|
55
|
+
|
|
56
|
+
self._running = True
|
|
57
|
+
self._stop_event.clear()
|
|
58
|
+
|
|
59
|
+
self._poll_thread = threading.Thread(
|
|
60
|
+
target=self._poll_loop, daemon=True, name="sqlite-worker-poll"
|
|
61
|
+
)
|
|
62
|
+
self._poll_thread.start()
|
|
63
|
+
|
|
64
|
+
if self._periodic_tasks:
|
|
65
|
+
self._scheduler_thread = threading.Thread(
|
|
66
|
+
target=self._scheduler_loop, daemon=True, name="sqlite-worker-scheduler"
|
|
67
|
+
)
|
|
68
|
+
self._scheduler_thread.start()
|
|
69
|
+
|
|
70
|
+
logger.info(
|
|
71
|
+
f"SQLite worker started (id={_WORKER_ID}, "
|
|
72
|
+
f"tasks={list(self._tasks.keys())}, "
|
|
73
|
+
f"periodic={[t[0] for t in self._periodic_tasks]})"
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
def stop(self) -> None:
|
|
77
|
+
"""Gracefully stop the worker."""
|
|
78
|
+
if not self._running:
|
|
79
|
+
return
|
|
80
|
+
|
|
81
|
+
logger.info("Stopping SQLite worker...")
|
|
82
|
+
self._running = False
|
|
83
|
+
self._stop_event.set()
|
|
84
|
+
|
|
85
|
+
if self._poll_thread:
|
|
86
|
+
self._poll_thread.join(timeout=5)
|
|
87
|
+
if self._scheduler_thread:
|
|
88
|
+
self._scheduler_thread.join(timeout=5)
|
|
89
|
+
|
|
90
|
+
self._executor.shutdown(wait=True, cancel_futures=False)
|
|
91
|
+
self._periodic_executor.shutdown(wait=True, cancel_futures=False)
|
|
92
|
+
logger.info("SQLite worker stopped")
|
|
93
|
+
|
|
94
|
+
def _get_conn(self) -> sqlite3.Connection:
|
|
95
|
+
"""Get a SQLite connection with WAL mode."""
|
|
96
|
+
conn = sqlite3.connect(_DB_PATH, timeout=30)
|
|
97
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
98
|
+
conn.execute("PRAGMA busy_timeout=30000")
|
|
99
|
+
return conn
|
|
100
|
+
|
|
101
|
+
def _poll_loop(self) -> None:
|
|
102
|
+
"""Main polling loop: claim and execute pending tasks.
|
|
103
|
+
|
|
104
|
+
When max_workers > 1, the loop continues claiming tasks without sleeping
|
|
105
|
+
until no more pending tasks are available, enabling parallel execution.
|
|
106
|
+
"""
|
|
107
|
+
while self._running and not self._stop_event.is_set():
|
|
108
|
+
try:
|
|
109
|
+
task = self._claim_next_task()
|
|
110
|
+
if task:
|
|
111
|
+
task_id, task_name, args_json = task
|
|
112
|
+
args = json.loads(args_json)
|
|
113
|
+
|
|
114
|
+
func = self._tasks.get(task_name)
|
|
115
|
+
if func:
|
|
116
|
+
self._executor.submit(
|
|
117
|
+
self._execute_task, task_id, task_name, func, args
|
|
118
|
+
)
|
|
119
|
+
# When parallel enabled, immediately try claiming more
|
|
120
|
+
if self.max_workers > 1:
|
|
121
|
+
continue
|
|
122
|
+
else:
|
|
123
|
+
logger.error(f"Unknown task: {task_name} (id={task_id})")
|
|
124
|
+
self._mark_failed(task_id, f"Unknown task: {task_name}")
|
|
125
|
+
else:
|
|
126
|
+
# No pending tasks, sleep
|
|
127
|
+
self._stop_event.wait(timeout=self.poll_interval)
|
|
128
|
+
|
|
129
|
+
except Exception as e:
|
|
130
|
+
logger.error(f"Poll loop error: {e}", exc_info=True)
|
|
131
|
+
self._stop_event.wait(timeout=1.0)
|
|
132
|
+
|
|
133
|
+
def _claim_next_task(self) -> tuple[str, str, str] | None:
|
|
134
|
+
"""Atomically claim the next pending task.
|
|
135
|
+
|
|
136
|
+
Uses UPDATE...RETURNING with a subquery for a single atomic operation
|
|
137
|
+
(no race window between SELECT and UPDATE).
|
|
138
|
+
|
|
139
|
+
Returns (task_id, task_name, args_json) or None.
|
|
140
|
+
"""
|
|
141
|
+
conn = self._get_conn()
|
|
142
|
+
try:
|
|
143
|
+
row = conn.execute(
|
|
144
|
+
"UPDATE job_queue SET status = 'claimed', claimed_by = ?, "
|
|
145
|
+
"claimed_at = datetime('now') "
|
|
146
|
+
"WHERE id = ("
|
|
147
|
+
" SELECT id FROM job_queue WHERE status = 'pending' "
|
|
148
|
+
" ORDER BY priority DESC, created_at ASC LIMIT 1"
|
|
149
|
+
") "
|
|
150
|
+
"RETURNING id, task_name, args_json",
|
|
151
|
+
(_WORKER_ID,),
|
|
152
|
+
).fetchone()
|
|
153
|
+
conn.commit()
|
|
154
|
+
return (row[0], row[1], row[2]) if row else None
|
|
155
|
+
finally:
|
|
156
|
+
conn.close()
|
|
157
|
+
|
|
158
|
+
def _execute_task(
|
|
159
|
+
self, task_id: str, task_name: str, func: Callable, args: list
|
|
160
|
+
) -> None:
|
|
161
|
+
"""Execute a claimed task and update its status."""
|
|
162
|
+
logger.info(f"Executing task {task_name} (id={task_id})")
|
|
163
|
+
try:
|
|
164
|
+
result = func(*args)
|
|
165
|
+
self._mark_completed(task_id, result)
|
|
166
|
+
logger.info(f"Task {task_name} completed (id={task_id})")
|
|
167
|
+
except Exception as e:
|
|
168
|
+
logger.error(f"Task {task_name} failed (id={task_id}): {e}", exc_info=True)
|
|
169
|
+
self._mark_failed(task_id, str(e))
|
|
170
|
+
|
|
171
|
+
def _mark_completed(self, task_id: str, result) -> None:
|
|
172
|
+
"""Mark a task as completed with its result."""
|
|
173
|
+
conn = self._get_conn()
|
|
174
|
+
try:
|
|
175
|
+
conn.execute(
|
|
176
|
+
"UPDATE job_queue SET status = 'completed', "
|
|
177
|
+
"completed_at = datetime('now'), result_json = ? "
|
|
178
|
+
"WHERE id = ?",
|
|
179
|
+
(json.dumps(result) if result is not None else None, task_id),
|
|
180
|
+
)
|
|
181
|
+
conn.commit()
|
|
182
|
+
finally:
|
|
183
|
+
conn.close()
|
|
184
|
+
|
|
185
|
+
def _mark_failed(self, task_id: str, error: str) -> None:
|
|
186
|
+
"""Mark a task as failed."""
|
|
187
|
+
conn = self._get_conn()
|
|
188
|
+
try:
|
|
189
|
+
conn.execute(
|
|
190
|
+
"UPDATE job_queue SET status = 'failed', "
|
|
191
|
+
"completed_at = datetime('now'), result_json = ? "
|
|
192
|
+
"WHERE id = ?",
|
|
193
|
+
(json.dumps({"error": error[:1000]}), task_id),
|
|
194
|
+
)
|
|
195
|
+
conn.commit()
|
|
196
|
+
finally:
|
|
197
|
+
conn.close()
|
|
198
|
+
|
|
199
|
+
def _scheduler_loop(self) -> None:
|
|
200
|
+
"""Run periodic tasks at their specified intervals.
|
|
201
|
+
|
|
202
|
+
Periodic tasks run on a separate single-thread executor so they are
|
|
203
|
+
never starved by long-running job tasks on the main executor.
|
|
204
|
+
"""
|
|
205
|
+
last_run: dict[str, float] = {}
|
|
206
|
+
|
|
207
|
+
while self._running and not self._stop_event.is_set():
|
|
208
|
+
now = time.monotonic()
|
|
209
|
+
|
|
210
|
+
for name, interval, func in self._periodic_tasks:
|
|
211
|
+
last = last_run.get(name, 0)
|
|
212
|
+
if now - last >= interval:
|
|
213
|
+
last_run[name] = now
|
|
214
|
+
try:
|
|
215
|
+
self._periodic_executor.submit(self._run_periodic, name, func)
|
|
216
|
+
except Exception as e:
|
|
217
|
+
logger.error(f"Failed to schedule periodic task {name}: {e}")
|
|
218
|
+
|
|
219
|
+
self._stop_event.wait(timeout=0.5)
|
|
220
|
+
|
|
221
|
+
def _run_periodic(self, name: str, func: Callable) -> None:
|
|
222
|
+
"""Execute a periodic task."""
|
|
223
|
+
try:
|
|
224
|
+
func()
|
|
225
|
+
except Exception as e:
|
|
226
|
+
logger.error(f"Periodic task {name} failed: {e}", exc_info=True)
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
# Global worker instance (created lazily)
|
|
230
|
+
_worker: SQLiteWorker | None = None
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
def get_worker() -> SQLiteWorker:
|
|
234
|
+
"""Get or create the global SQLite worker.
|
|
235
|
+
|
|
236
|
+
Reads max_parallel_jobs from draft.yaml to configure the thread pool.
|
|
237
|
+
"""
|
|
238
|
+
global _worker
|
|
239
|
+
if _worker is None:
|
|
240
|
+
max_workers = 1
|
|
241
|
+
try:
|
|
242
|
+
from app.services.config_service import ConfigService
|
|
243
|
+
|
|
244
|
+
config = ConfigService().load_config()
|
|
245
|
+
max_workers = config.execute_config.max_parallel_jobs
|
|
246
|
+
except Exception:
|
|
247
|
+
pass # Fall back to 1
|
|
248
|
+
_worker = SQLiteWorker(max_workers=max_workers)
|
|
249
|
+
if max_workers > 1:
|
|
250
|
+
logger.info(f"Parallel execution enabled: max_parallel_jobs={max_workers}")
|
|
251
|
+
return _worker
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
def setup_worker() -> SQLiteWorker:
|
|
255
|
+
"""Set up the SQLite worker with all registered tasks.
|
|
256
|
+
|
|
257
|
+
Called during FastAPI lifespan.
|
|
258
|
+
"""
|
|
259
|
+
worker = get_worker()
|
|
260
|
+
|
|
261
|
+
# Register task functions (import here to avoid circular imports)
|
|
262
|
+
from app.worker import (
|
|
263
|
+
_execute_ticket_task_impl,
|
|
264
|
+
_resume_ticket_task_impl,
|
|
265
|
+
_verify_ticket_task_impl,
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
def execute_ticket_wrapper(job_id: str) -> dict:
|
|
269
|
+
"""Wrapper that sets up streaming context for execute_ticket."""
|
|
270
|
+
from app.models.job import JobStatus
|
|
271
|
+
from app.state_machine import TicketState
|
|
272
|
+
from app.worker import (
|
|
273
|
+
get_job_with_ticket,
|
|
274
|
+
set_current_job,
|
|
275
|
+
stream_finished,
|
|
276
|
+
transition_ticket_sync,
|
|
277
|
+
update_job_finished,
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
set_current_job(job_id)
|
|
281
|
+
try:
|
|
282
|
+
return _execute_ticket_task_impl(job_id)
|
|
283
|
+
except Exception as e:
|
|
284
|
+
logger.error(f"execute_ticket crashed for job {job_id}: {e}", exc_info=True)
|
|
285
|
+
try:
|
|
286
|
+
update_job_finished(job_id, JobStatus.FAILED, exit_code=1)
|
|
287
|
+
except Exception:
|
|
288
|
+
pass
|
|
289
|
+
try:
|
|
290
|
+
result = get_job_with_ticket(job_id)
|
|
291
|
+
if result:
|
|
292
|
+
_, ticket = result
|
|
293
|
+
transition_ticket_sync(
|
|
294
|
+
ticket.id,
|
|
295
|
+
TicketState.BLOCKED,
|
|
296
|
+
reason=f"Execution crashed: {e}",
|
|
297
|
+
actor_id="execute_worker",
|
|
298
|
+
)
|
|
299
|
+
except Exception:
|
|
300
|
+
pass
|
|
301
|
+
return {"job_id": job_id, "status": "failed", "error": str(e)}
|
|
302
|
+
finally:
|
|
303
|
+
stream_finished(job_id)
|
|
304
|
+
set_current_job(None)
|
|
305
|
+
|
|
306
|
+
def verify_ticket_wrapper(job_id: str) -> dict:
|
|
307
|
+
"""Wrapper for verify_ticket."""
|
|
308
|
+
from app.models.job import JobStatus
|
|
309
|
+
from app.state_machine import TicketState
|
|
310
|
+
from app.worker import (
|
|
311
|
+
get_job_with_ticket,
|
|
312
|
+
transition_ticket_sync,
|
|
313
|
+
update_job_finished,
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
try:
|
|
317
|
+
return _verify_ticket_task_impl(job_id)
|
|
318
|
+
except Exception as e:
|
|
319
|
+
logger.error(f"verify_ticket crashed for job {job_id}: {e}", exc_info=True)
|
|
320
|
+
try:
|
|
321
|
+
update_job_finished(job_id, JobStatus.FAILED, exit_code=1)
|
|
322
|
+
except Exception:
|
|
323
|
+
pass
|
|
324
|
+
try:
|
|
325
|
+
result = get_job_with_ticket(job_id)
|
|
326
|
+
if result:
|
|
327
|
+
_, ticket = result
|
|
328
|
+
transition_ticket_sync(
|
|
329
|
+
ticket.id,
|
|
330
|
+
TicketState.BLOCKED,
|
|
331
|
+
reason=f"Verification crashed: {e}",
|
|
332
|
+
actor_id="verify_worker",
|
|
333
|
+
)
|
|
334
|
+
except Exception:
|
|
335
|
+
pass
|
|
336
|
+
return {"job_id": job_id, "status": "failed", "error": str(e)}
|
|
337
|
+
|
|
338
|
+
def resume_ticket_wrapper(job_id: str) -> dict:
|
|
339
|
+
"""Wrapper for resume_ticket."""
|
|
340
|
+
return _resume_ticket_task_impl(job_id)
|
|
341
|
+
|
|
342
|
+
worker.register_task("execute_ticket", execute_ticket_wrapper)
|
|
343
|
+
worker.register_task("verify_ticket", verify_ticket_wrapper)
|
|
344
|
+
worker.register_task("resume_ticket", resume_ticket_wrapper)
|
|
345
|
+
|
|
346
|
+
# Register periodic tasks (replaces Celery Beat)
|
|
347
|
+
def run_job_watchdog():
|
|
348
|
+
from app.services.job_watchdog_service import run_job_watchdog
|
|
349
|
+
|
|
350
|
+
run_job_watchdog()
|
|
351
|
+
|
|
352
|
+
def run_planner_tick():
|
|
353
|
+
from app.services.planner_tick_sync import (
|
|
354
|
+
PlannerLockError,
|
|
355
|
+
run_planner_tick_sync,
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
try:
|
|
359
|
+
run_planner_tick_sync()
|
|
360
|
+
except PlannerLockError:
|
|
361
|
+
pass # Another tick in progress
|
|
362
|
+
|
|
363
|
+
def run_poll_pr_statuses():
|
|
364
|
+
from app.worker import poll_pr_statuses
|
|
365
|
+
|
|
366
|
+
poll_pr_statuses()
|
|
367
|
+
|
|
368
|
+
worker.register_periodic("job_watchdog", 15.0, run_job_watchdog)
|
|
369
|
+
worker.register_periodic("planner_tick", 2.0, run_planner_tick)
|
|
370
|
+
worker.register_periodic("poll_pr_statuses", 300.0, run_poll_pr_statuses)
|
|
371
|
+
|
|
372
|
+
return worker
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"""Task dispatch via SQLite job_queue table.
|
|
2
|
+
|
|
3
|
+
Enqueues background tasks for the in-process SQLiteWorker to pick up.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
import logging
|
|
8
|
+
import time
|
|
9
|
+
import uuid
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class TaskHandle:
|
|
15
|
+
"""Opaque handle returned by enqueue_task.
|
|
16
|
+
|
|
17
|
+
Has .id attribute for compatibility with job tracking.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
def __init__(self, task_id: str):
|
|
21
|
+
self.id = task_id
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _check_budget_sync(job_id: str) -> None:
|
|
25
|
+
"""Check if the goal's budget allows execution (sync, best-effort).
|
|
26
|
+
|
|
27
|
+
Queries the goal's CostBudget and total spend to warn/block if over budget.
|
|
28
|
+
Only applies to execute tasks. Logs a warning if over budget but does not
|
|
29
|
+
block (to avoid breaking existing workflows).
|
|
30
|
+
"""
|
|
31
|
+
try:
|
|
32
|
+
from app.database_sync import get_sync_db
|
|
33
|
+
from app.models.job import Job
|
|
34
|
+
|
|
35
|
+
with get_sync_db() as db:
|
|
36
|
+
job = db.query(Job).filter(Job.id == job_id).first()
|
|
37
|
+
if not job or not job.ticket_id:
|
|
38
|
+
return
|
|
39
|
+
|
|
40
|
+
from app.models.ticket import Ticket
|
|
41
|
+
|
|
42
|
+
ticket = db.query(Ticket).filter(Ticket.id == job.ticket_id).first()
|
|
43
|
+
if not ticket or not ticket.goal_id:
|
|
44
|
+
return
|
|
45
|
+
|
|
46
|
+
from app.models.cost_budget import CostBudget
|
|
47
|
+
|
|
48
|
+
budget = (
|
|
49
|
+
db.query(CostBudget)
|
|
50
|
+
.filter(CostBudget.goal_id == ticket.goal_id)
|
|
51
|
+
.first()
|
|
52
|
+
)
|
|
53
|
+
if not budget or budget.total_budget is None:
|
|
54
|
+
return
|
|
55
|
+
|
|
56
|
+
from sqlalchemy import func
|
|
57
|
+
|
|
58
|
+
from app.models.agent_session import AgentSession
|
|
59
|
+
|
|
60
|
+
total_spent = (
|
|
61
|
+
db.query(func.coalesce(func.sum(AgentSession.estimated_cost_usd), 0))
|
|
62
|
+
.join(Ticket)
|
|
63
|
+
.filter(Ticket.goal_id == ticket.goal_id)
|
|
64
|
+
.scalar()
|
|
65
|
+
)
|
|
66
|
+
total_spent = float(total_spent or 0)
|
|
67
|
+
|
|
68
|
+
if total_spent >= budget.total_budget:
|
|
69
|
+
logger.warning(
|
|
70
|
+
f"Budget exceeded for goal {ticket.goal_id}: "
|
|
71
|
+
f"spent=${total_spent:.2f} >= budget=${budget.total_budget:.2f}. "
|
|
72
|
+
f"Job {job_id} will proceed but may incur overage."
|
|
73
|
+
)
|
|
74
|
+
elif (
|
|
75
|
+
budget.warning_threshold
|
|
76
|
+
and total_spent >= budget.total_budget * budget.warning_threshold
|
|
77
|
+
):
|
|
78
|
+
logger.warning(
|
|
79
|
+
f"Budget warning for goal {ticket.goal_id}: "
|
|
80
|
+
f"spent=${total_spent:.2f} / budget=${budget.total_budget:.2f} "
|
|
81
|
+
f"({total_spent / budget.total_budget * 100:.0f}% used)"
|
|
82
|
+
)
|
|
83
|
+
except Exception as e:
|
|
84
|
+
logger.debug(f"Budget check skipped for job {job_id}: {e}")
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def enqueue_task(task_name: str, args: list | None = None) -> TaskHandle:
|
|
88
|
+
"""Enqueue a task into the SQLite job_queue table.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
task_name: The task name (e.g., "execute_ticket", "verify_ticket")
|
|
92
|
+
args: Positional arguments for the task
|
|
93
|
+
|
|
94
|
+
Returns:
|
|
95
|
+
TaskHandle with .id attribute (task ID string)
|
|
96
|
+
"""
|
|
97
|
+
import sqlite3
|
|
98
|
+
|
|
99
|
+
from app.sqlite_kv import _DB_PATH
|
|
100
|
+
|
|
101
|
+
args = args or []
|
|
102
|
+
task_id = str(uuid.uuid4())
|
|
103
|
+
|
|
104
|
+
# Check budget before enqueuing execute tasks
|
|
105
|
+
if task_name == "execute_ticket" and args:
|
|
106
|
+
_check_budget_sync(args[0])
|
|
107
|
+
|
|
108
|
+
max_retries = 5
|
|
109
|
+
for attempt in range(max_retries):
|
|
110
|
+
conn = sqlite3.connect(_DB_PATH, timeout=30)
|
|
111
|
+
try:
|
|
112
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
113
|
+
conn.execute("PRAGMA busy_timeout=30000")
|
|
114
|
+
conn.execute(
|
|
115
|
+
"INSERT INTO job_queue (id, task_name, args_json, status, priority, created_at) "
|
|
116
|
+
"VALUES (?, ?, ?, 'pending', 0, datetime('now'))",
|
|
117
|
+
(task_id, task_name, json.dumps(args)),
|
|
118
|
+
)
|
|
119
|
+
conn.commit()
|
|
120
|
+
logger.info(f"Enqueued task {task_id} ({task_name})")
|
|
121
|
+
return TaskHandle(task_id)
|
|
122
|
+
except sqlite3.OperationalError as e:
|
|
123
|
+
if "locked" in str(e) and attempt < max_retries - 1:
|
|
124
|
+
logger.warning(
|
|
125
|
+
f"SQLite locked on enqueue attempt {attempt + 1}/{max_retries}, "
|
|
126
|
+
f"retrying in {0.5 * (attempt + 1)}s..."
|
|
127
|
+
)
|
|
128
|
+
time.sleep(0.5 * (attempt + 1))
|
|
129
|
+
else:
|
|
130
|
+
raise
|
|
131
|
+
finally:
|
|
132
|
+
conn.close()
|
|
133
|
+
|
|
134
|
+
# Should not reach here, but just in case
|
|
135
|
+
raise sqlite3.OperationalError("database is locked after all retries")
|