draft-board 0.1.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/app/backend/.env.example +9 -0
- package/app/backend/.smartkanban/evidence/8b383839-cbec-45af-86ee-c7708d075cbe/bddf2ed5-2e21-4d46-a62b-10b87f1642a6_patch.txt +195 -0
- package/app/backend/.smartkanban/evidence/8b383839-cbec-45af-86ee-c7708d075cbe/bddf2ed5-2e21-4d46-a62b-10b87f1642a6_stat.txt +6 -0
- package/app/backend/CURL_EXAMPLES.md +335 -0
- package/app/backend/ENV_SETUP.md +65 -0
- package/app/backend/alembic/env.py +71 -0
- package/app/backend/alembic/script.py.mako +28 -0
- package/app/backend/alembic/versions/001_initial_schema.py +104 -0
- package/app/backend/alembic/versions/002_add_jobs_table.py +52 -0
- package/app/backend/alembic/versions/003_add_workspace_table.py +48 -0
- package/app/backend/alembic/versions/004_add_evidence_table.py +56 -0
- package/app/backend/alembic/versions/005_add_verification_commands.py +32 -0
- package/app/backend/alembic/versions/006_add_planner_lock_table.py +39 -0
- package/app/backend/alembic/versions/007_add_revision_review_tables.py +126 -0
- package/app/backend/alembic/versions/008_add_revision_idempotency_and_traceability.py +52 -0
- package/app/backend/alembic/versions/009_add_job_health_fields.py +46 -0
- package/app/backend/alembic/versions/010_add_review_comment_line_content.py +36 -0
- package/app/backend/alembic/versions/011_add_analysis_cache.py +47 -0
- package/app/backend/alembic/versions/012_add_boards_table.py +102 -0
- package/app/backend/alembic/versions/013_add_ticket_blocking.py +45 -0
- package/app/backend/alembic/versions/014_add_agent_sessions.py +220 -0
- package/app/backend/alembic/versions/015_add_ticket_sort_order.py +33 -0
- package/app/backend/alembic/versions/03220f0b93ae_add_pr_fields_to_ticket.py +49 -0
- package/app/backend/alembic/versions/0c2d89fff3b1_seed_board_configs_from_yaml.py +206 -0
- package/app/backend/alembic/versions/3348e5cf54c1_add_merge_checklist_table.py +67 -0
- package/app/backend/alembic/versions/357c780ee445_add_goal_status.py +34 -0
- package/app/backend/alembic/versions/553340b7e26c_add_autonomy_fields_to_goal.py +65 -0
- package/app/backend/alembic/versions/774dc335c679_merge_migration_heads.py +23 -0
- package/app/backend/alembic/versions/7b307e847cbd_merge_heads.py +23 -0
- package/app/backend/alembic/versions/82ecd978cc70_add_missing_indexes.py +48 -0
- package/app/backend/alembic/versions/8ef5054dc280_add_normalized_log_entries.py +173 -0
- package/app/backend/alembic/versions/8f3e2bd8ea3b_merge_migration_heads.py +23 -0
- package/app/backend/alembic/versions/9d17f0698d3b_add_config_column_to_boards_table.py +30 -0
- package/app/backend/alembic/versions/add_agent_conversation_history.py +72 -0
- package/app/backend/alembic/versions/add_job_variant.py +34 -0
- package/app/backend/alembic/versions/add_performance_indexes.py +95 -0
- package/app/backend/alembic/versions/add_repos_and_board_repos.py +174 -0
- package/app/backend/alembic/versions/add_session_id_to_jobs.py +27 -0
- package/app/backend/alembic/versions/add_sqlite_backend_tables.py +104 -0
- package/app/backend/alembic/versions/b10fb0b62240_add_diff_content_to_revisions.py +34 -0
- package/app/backend/alembic.ini +89 -0
- package/app/backend/app/__init__.py +3 -0
- package/app/backend/app/data_dir.py +85 -0
- package/app/backend/app/database.py +70 -0
- package/app/backend/app/database_sync.py +64 -0
- package/app/backend/app/dependencies/__init__.py +5 -0
- package/app/backend/app/dependencies/auth.py +80 -0
- package/app/backend/app/dependencies.py +43 -0
- package/app/backend/app/exceptions.py +178 -0
- package/app/backend/app/executors/__init__.py +1 -0
- package/app/backend/app/executors/adapters/__init__.py +1 -0
- package/app/backend/app/executors/adapters/aider.py +152 -0
- package/app/backend/app/executors/adapters/amazon_q.py +103 -0
- package/app/backend/app/executors/adapters/amp.py +123 -0
- package/app/backend/app/executors/adapters/claude.py +177 -0
- package/app/backend/app/executors/adapters/cline.py +127 -0
- package/app/backend/app/executors/adapters/codex.py +167 -0
- package/app/backend/app/executors/adapters/copilot.py +202 -0
- package/app/backend/app/executors/adapters/cursor.py +87 -0
- package/app/backend/app/executors/adapters/droid.py +123 -0
- package/app/backend/app/executors/adapters/gemini.py +132 -0
- package/app/backend/app/executors/adapters/goose.py +131 -0
- package/app/backend/app/executors/adapters/opencode.py +123 -0
- package/app/backend/app/executors/adapters/qwen.py +123 -0
- package/app/backend/app/executors/plugins/__init__.py +1 -0
- package/app/backend/app/executors/registry.py +202 -0
- package/app/backend/app/executors/spec.py +226 -0
- package/app/backend/app/main.py +486 -0
- package/app/backend/app/middleware/__init__.py +13 -0
- package/app/backend/app/middleware/idempotency.py +426 -0
- package/app/backend/app/middleware/rate_limit.py +312 -0
- package/app/backend/app/middleware/security_headers.py +43 -0
- package/app/backend/app/middleware/timeout.py +37 -0
- package/app/backend/app/models/__init__.py +56 -0
- package/app/backend/app/models/agent_conversation_history.py +56 -0
- package/app/backend/app/models/agent_session.py +127 -0
- package/app/backend/app/models/analysis_cache.py +49 -0
- package/app/backend/app/models/base.py +9 -0
- package/app/backend/app/models/board.py +79 -0
- package/app/backend/app/models/board_repo.py +68 -0
- package/app/backend/app/models/cost_budget.py +42 -0
- package/app/backend/app/models/enums.py +40 -0
- package/app/backend/app/models/evidence.py +132 -0
- package/app/backend/app/models/goal.py +102 -0
- package/app/backend/app/models/idempotency_entry.py +30 -0
- package/app/backend/app/models/job.py +163 -0
- package/app/backend/app/models/job_queue.py +39 -0
- package/app/backend/app/models/kv_store.py +28 -0
- package/app/backend/app/models/merge_checklist.py +87 -0
- package/app/backend/app/models/normalized_log.py +100 -0
- package/app/backend/app/models/planner_lock.py +43 -0
- package/app/backend/app/models/rate_limit_entry.py +25 -0
- package/app/backend/app/models/repo.py +66 -0
- package/app/backend/app/models/review_comment.py +91 -0
- package/app/backend/app/models/review_summary.py +69 -0
- package/app/backend/app/models/revision.py +130 -0
- package/app/backend/app/models/ticket.py +223 -0
- package/app/backend/app/models/ticket_event.py +83 -0
- package/app/backend/app/models/user.py +47 -0
- package/app/backend/app/models/workspace.py +71 -0
- package/app/backend/app/redis_client.py +119 -0
- package/app/backend/app/routers/__init__.py +29 -0
- package/app/backend/app/routers/agents.py +296 -0
- package/app/backend/app/routers/auth.py +94 -0
- package/app/backend/app/routers/board.py +885 -0
- package/app/backend/app/routers/dashboard.py +351 -0
- package/app/backend/app/routers/debug.py +528 -0
- package/app/backend/app/routers/evidence.py +96 -0
- package/app/backend/app/routers/executors.py +324 -0
- package/app/backend/app/routers/goals.py +574 -0
- package/app/backend/app/routers/jobs.py +448 -0
- package/app/backend/app/routers/maintenance.py +172 -0
- package/app/backend/app/routers/merge.py +360 -0
- package/app/backend/app/routers/planner.py +537 -0
- package/app/backend/app/routers/pull_requests.py +382 -0
- package/app/backend/app/routers/repos.py +263 -0
- package/app/backend/app/routers/revisions.py +939 -0
- package/app/backend/app/routers/settings.py +267 -0
- package/app/backend/app/routers/tickets.py +2003 -0
- package/app/backend/app/routers/webhooks.py +143 -0
- package/app/backend/app/routers/websocket.py +249 -0
- package/app/backend/app/schemas/__init__.py +109 -0
- package/app/backend/app/schemas/board.py +87 -0
- package/app/backend/app/schemas/common.py +33 -0
- package/app/backend/app/schemas/evidence.py +87 -0
- package/app/backend/app/schemas/goal.py +90 -0
- package/app/backend/app/schemas/job.py +97 -0
- package/app/backend/app/schemas/merge.py +139 -0
- package/app/backend/app/schemas/planner.py +500 -0
- package/app/backend/app/schemas/repo.py +187 -0
- package/app/backend/app/schemas/review.py +137 -0
- package/app/backend/app/schemas/revision.py +114 -0
- package/app/backend/app/schemas/ticket.py +238 -0
- package/app/backend/app/schemas/ticket_event.py +72 -0
- package/app/backend/app/schemas/workspace.py +19 -0
- package/app/backend/app/services/__init__.py +31 -0
- package/app/backend/app/services/agent_memory_service.py +223 -0
- package/app/backend/app/services/agent_registry.py +346 -0
- package/app/backend/app/services/agent_session_manager.py +318 -0
- package/app/backend/app/services/agent_session_service.py +219 -0
- package/app/backend/app/services/agent_tools.py +379 -0
- package/app/backend/app/services/auth_service.py +98 -0
- package/app/backend/app/services/autonomy_service.py +380 -0
- package/app/backend/app/services/board_repo_service.py +201 -0
- package/app/backend/app/services/board_service.py +326 -0
- package/app/backend/app/services/cleanup_service.py +1085 -0
- package/app/backend/app/services/config_service.py +908 -0
- package/app/backend/app/services/context_gatherer.py +557 -0
- package/app/backend/app/services/cost_tracking_service.py +293 -0
- package/app/backend/app/services/cursor_log_normalizer.py +536 -0
- package/app/backend/app/services/delivery_pipeline.py +440 -0
- package/app/backend/app/services/executor_service.py +634 -0
- package/app/backend/app/services/git_host/__init__.py +11 -0
- package/app/backend/app/services/git_host/factory.py +87 -0
- package/app/backend/app/services/git_host/github.py +270 -0
- package/app/backend/app/services/git_host/gitlab.py +194 -0
- package/app/backend/app/services/git_host/protocol.py +75 -0
- package/app/backend/app/services/git_merge_simple.py +346 -0
- package/app/backend/app/services/git_ops.py +384 -0
- package/app/backend/app/services/github_service.py +233 -0
- package/app/backend/app/services/goal_service.py +113 -0
- package/app/backend/app/services/job_service.py +423 -0
- package/app/backend/app/services/job_watchdog_service.py +424 -0
- package/app/backend/app/services/langchain_adapter.py +122 -0
- package/app/backend/app/services/llm_provider_clients.py +351 -0
- package/app/backend/app/services/llm_service.py +285 -0
- package/app/backend/app/services/log_normalizer.py +342 -0
- package/app/backend/app/services/log_stream_service.py +276 -0
- package/app/backend/app/services/merge_checklist_service.py +264 -0
- package/app/backend/app/services/merge_service.py +784 -0
- package/app/backend/app/services/orchestrator_log.py +84 -0
- package/app/backend/app/services/planner_service.py +1662 -0
- package/app/backend/app/services/planner_tick_sync.py +1040 -0
- package/app/backend/app/services/queued_message_service.py +156 -0
- package/app/backend/app/services/reliability_wrapper.py +389 -0
- package/app/backend/app/services/repo_discovery_service.py +318 -0
- package/app/backend/app/services/review_service.py +334 -0
- package/app/backend/app/services/revision_service.py +389 -0
- package/app/backend/app/services/safe_autopilot.py +510 -0
- package/app/backend/app/services/sqlite_worker.py +372 -0
- package/app/backend/app/services/task_dispatch.py +135 -0
- package/app/backend/app/services/ticket_generation_service.py +1781 -0
- package/app/backend/app/services/ticket_service.py +486 -0
- package/app/backend/app/services/udar_planner_service.py +1007 -0
- package/app/backend/app/services/webhook_service.py +126 -0
- package/app/backend/app/services/workspace_service.py +465 -0
- package/app/backend/app/services/worktree_file_service.py +92 -0
- package/app/backend/app/services/worktree_validator.py +213 -0
- package/app/backend/app/sqlite_kv.py +278 -0
- package/app/backend/app/state_machine.py +128 -0
- package/app/backend/app/templates/__init__.py +5 -0
- package/app/backend/app/templates/registry.py +243 -0
- package/app/backend/app/utils/__init__.py +5 -0
- package/app/backend/app/utils/artifact_reader.py +87 -0
- package/app/backend/app/utils/circuit_breaker.py +229 -0
- package/app/backend/app/utils/db_retry.py +136 -0
- package/app/backend/app/utils/ignored_fields.py +123 -0
- package/app/backend/app/utils/validators.py +54 -0
- package/app/backend/app/websocket/__init__.py +5 -0
- package/app/backend/app/websocket/manager.py +179 -0
- package/app/backend/app/websocket/state_tracker.py +113 -0
- package/app/backend/app/worker.py +3190 -0
- package/app/backend/calculator_tickets.json +40 -0
- package/app/backend/canary_tests.sh +591 -0
- package/app/backend/celerybeat-schedule +0 -0
- package/app/backend/celerybeat-schedule-shm +0 -0
- package/app/backend/celerybeat-schedule-wal +0 -0
- package/app/backend/logs/.gitkeep +3 -0
- package/app/backend/multiplication_division_implementation_tickets.json +55 -0
- package/app/backend/multiplication_division_tickets.json +42 -0
- package/app/backend/pyproject.toml +45 -0
- package/app/backend/requirements-dev.txt +8 -0
- package/app/backend/requirements.txt +20 -0
- package/app/backend/run.sh +30 -0
- package/app/backend/run_with_logs.sh +10 -0
- package/app/backend/scientific_calculator_tickets.json +40 -0
- package/app/backend/scripts/extract_openapi.py +21 -0
- package/app/backend/scripts/seed_demo.py +187 -0
- package/app/backend/setup_demo_review.py +302 -0
- package/app/backend/test_actual_parse.py +41 -0
- package/app/backend/test_agent_streaming.py +61 -0
- package/app/backend/test_parse.py +51 -0
- package/app/backend/test_streaming.py +51 -0
- package/app/backend/test_subprocess_streaming.py +50 -0
- package/app/backend/tests/__init__.py +1 -0
- package/app/backend/tests/conftest.py +46 -0
- package/app/backend/tests/test_auth.py +341 -0
- package/app/backend/tests/test_autonomy_service.py +391 -0
- package/app/backend/tests/test_cleanup_service_safety.py +417 -0
- package/app/backend/tests/test_middleware.py +279 -0
- package/app/backend/tests/test_planner_providers.py +290 -0
- package/app/backend/tests/test_planner_unblock.py +183 -0
- package/app/backend/tests/test_revision_invariants.py +618 -0
- package/app/backend/tests/test_sqlite_kv.py +290 -0
- package/app/backend/tests/test_sqlite_worker.py +353 -0
- package/app/backend/tests/test_task_dispatch.py +100 -0
- package/app/backend/tests/test_ticket_validation.py +304 -0
- package/app/backend/tests/test_udar_agent.py +693 -0
- package/app/backend/tests/test_webhook_service.py +184 -0
- package/app/backend/tickets_output.json +59 -0
- package/app/backend/user_management_tickets.json +50 -0
- package/app/backend/uvicorn.log +0 -0
- package/app/draft.yaml +313 -0
- package/app/frontend/dist/assets/index-LcjCczu5.js +155 -0
- package/app/frontend/dist/assets/index-_FP_279e.css +1 -0
- package/app/frontend/dist/index.html +14 -0
- package/app/frontend/dist/vite.svg +1 -0
- package/app/frontend/package.json +101 -0
- package/bin/cli.js +527 -0
- package/package.json +37 -0
|
@@ -0,0 +1,2003 @@
|
|
|
1
|
+
"""API router for Ticket endpoints."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import os
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
|
8
|
+
from sqlalchemy import func, or_, select
|
|
9
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
10
|
+
from sqlalchemy.orm import selectinload
|
|
11
|
+
|
|
12
|
+
from app.database import get_db
|
|
13
|
+
from app.models.evidence import Evidence
|
|
14
|
+
from app.models.job import JobKind
|
|
15
|
+
from app.models.ticket import Ticket
|
|
16
|
+
from app.models.ticket_event import TicketEvent
|
|
17
|
+
from app.schemas.common import PaginatedResponse
|
|
18
|
+
from app.schemas.evidence import EvidenceListResponse, EvidenceResponse
|
|
19
|
+
from app.schemas.job import JobCreateResponse, JobListResponse, JobResponse
|
|
20
|
+
from app.schemas.planner import (
|
|
21
|
+
BulkPriorityUpdateRequest,
|
|
22
|
+
BulkPriorityUpdateResponse,
|
|
23
|
+
BulkPriorityUpdateResult,
|
|
24
|
+
bucket_to_priority,
|
|
25
|
+
priority_to_bucket,
|
|
26
|
+
)
|
|
27
|
+
from app.schemas.ticket import (
|
|
28
|
+
BulkAcceptRequest,
|
|
29
|
+
BulkAcceptResponse,
|
|
30
|
+
BulkAcceptResult,
|
|
31
|
+
BulkTransitionRequest,
|
|
32
|
+
BulkTransitionResponse,
|
|
33
|
+
BulkTransitionResult,
|
|
34
|
+
TicketCreate,
|
|
35
|
+
TicketDetailResponse,
|
|
36
|
+
TicketReorderRequest,
|
|
37
|
+
TicketResponse,
|
|
38
|
+
TicketTransition,
|
|
39
|
+
TicketUpdate,
|
|
40
|
+
)
|
|
41
|
+
from app.schemas.ticket_event import TicketEventListResponse, TicketEventResponse
|
|
42
|
+
from app.services.job_service import JobService
|
|
43
|
+
from app.services.ticket_service import TicketService
|
|
44
|
+
from app.state_machine import ActorType, EventType, TicketState, validate_transition
|
|
45
|
+
from app.websocket.manager import manager as connection_manager
|
|
46
|
+
|
|
47
|
+
logger = logging.getLogger(__name__)
|
|
48
|
+
|
|
49
|
+
router = APIRouter(prefix="/tickets", tags=["tickets"])
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
async def _broadcast_board_invalidate(
|
|
53
|
+
board_id: str | None, reason: str = "ticket_mutation"
|
|
54
|
+
) -> None:
|
|
55
|
+
"""Broadcast a board invalidation message via WebSocket if board_id is available."""
|
|
56
|
+
if board_id:
|
|
57
|
+
await connection_manager.broadcast(
|
|
58
|
+
f"board:{board_id}",
|
|
59
|
+
{"type": "invalidate", "reason": reason},
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@router.post(
|
|
64
|
+
"",
|
|
65
|
+
response_model=TicketResponse,
|
|
66
|
+
status_code=status.HTTP_201_CREATED,
|
|
67
|
+
summary="Create a new ticket",
|
|
68
|
+
)
|
|
69
|
+
async def create_ticket(
|
|
70
|
+
data: TicketCreate,
|
|
71
|
+
db: AsyncSession = Depends(get_db),
|
|
72
|
+
) -> TicketResponse:
|
|
73
|
+
"""
|
|
74
|
+
Create a new ticket linked to a goal.
|
|
75
|
+
The ticket will start in the 'proposed' state.
|
|
76
|
+
"""
|
|
77
|
+
service = TicketService(db)
|
|
78
|
+
ticket = await service.create_ticket(data)
|
|
79
|
+
return TicketResponse.model_validate(ticket)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@router.get(
|
|
83
|
+
"",
|
|
84
|
+
response_model=PaginatedResponse[TicketResponse],
|
|
85
|
+
summary="List tickets with optional filtering and pagination",
|
|
86
|
+
)
|
|
87
|
+
async def list_tickets(
|
|
88
|
+
page: int = Query(1, ge=1, description="Page number (1-based)"),
|
|
89
|
+
limit: int = Query(50, ge=1, le=200, description="Items per page"),
|
|
90
|
+
state: TicketState | None = Query(None, description="Filter by ticket state"),
|
|
91
|
+
priority_min: int | None = Query(
|
|
92
|
+
None, ge=0, le=100, description="Minimum priority"
|
|
93
|
+
),
|
|
94
|
+
priority_max: int | None = Query(
|
|
95
|
+
None, ge=0, le=100, description="Maximum priority"
|
|
96
|
+
),
|
|
97
|
+
goal_id: str | None = Query(None, description="Filter by goal ID"),
|
|
98
|
+
board_id: str | None = Query(None, description="Filter by board ID"),
|
|
99
|
+
q: str | None = Query(
|
|
100
|
+
None,
|
|
101
|
+
min_length=1,
|
|
102
|
+
max_length=200,
|
|
103
|
+
description="Text search on title/description",
|
|
104
|
+
),
|
|
105
|
+
db: AsyncSession = Depends(get_db),
|
|
106
|
+
) -> PaginatedResponse[TicketResponse]:
|
|
107
|
+
"""
|
|
108
|
+
List tickets with optional filtering and pagination.
|
|
109
|
+
|
|
110
|
+
**Filters:**
|
|
111
|
+
- `state`: Filter by ticket state (e.g., planned, executing)
|
|
112
|
+
- `priority_min` / `priority_max`: Filter by priority range
|
|
113
|
+
- `goal_id`: Filter by parent goal
|
|
114
|
+
- `board_id`: Filter by board
|
|
115
|
+
- `q`: Full-text search on title and description
|
|
116
|
+
|
|
117
|
+
**Pagination:**
|
|
118
|
+
- `page`: Page number (1-based, default 1)
|
|
119
|
+
- `limit`: Items per page (default 50, max 200)
|
|
120
|
+
"""
|
|
121
|
+
query = select(Ticket).options(selectinload(Ticket.blocked_by))
|
|
122
|
+
count_query = select(func.count(Ticket.id))
|
|
123
|
+
|
|
124
|
+
# Apply filters
|
|
125
|
+
if state is not None:
|
|
126
|
+
query = query.where(Ticket.state == state.value)
|
|
127
|
+
count_query = count_query.where(Ticket.state == state.value)
|
|
128
|
+
if priority_min is not None:
|
|
129
|
+
query = query.where(Ticket.priority >= priority_min)
|
|
130
|
+
count_query = count_query.where(Ticket.priority >= priority_min)
|
|
131
|
+
if priority_max is not None:
|
|
132
|
+
query = query.where(Ticket.priority <= priority_max)
|
|
133
|
+
count_query = count_query.where(Ticket.priority <= priority_max)
|
|
134
|
+
if goal_id is not None:
|
|
135
|
+
query = query.where(Ticket.goal_id == goal_id)
|
|
136
|
+
count_query = count_query.where(Ticket.goal_id == goal_id)
|
|
137
|
+
if board_id is not None:
|
|
138
|
+
query = query.where(Ticket.board_id == board_id)
|
|
139
|
+
count_query = count_query.where(Ticket.board_id == board_id)
|
|
140
|
+
if q is not None:
|
|
141
|
+
search_pattern = f"%{q}%"
|
|
142
|
+
search_filter = or_(
|
|
143
|
+
Ticket.title.ilike(search_pattern),
|
|
144
|
+
Ticket.description.ilike(search_pattern),
|
|
145
|
+
)
|
|
146
|
+
query = query.where(search_filter)
|
|
147
|
+
count_query = count_query.where(search_filter)
|
|
148
|
+
|
|
149
|
+
# Get total count
|
|
150
|
+
total_result = await db.execute(count_query)
|
|
151
|
+
total = total_result.scalar() or 0
|
|
152
|
+
|
|
153
|
+
# Apply ordering and pagination
|
|
154
|
+
offset = (page - 1) * limit
|
|
155
|
+
query = (
|
|
156
|
+
query.order_by(
|
|
157
|
+
Ticket.priority.desc().nulls_last(),
|
|
158
|
+
Ticket.created_at.desc(),
|
|
159
|
+
)
|
|
160
|
+
.offset(offset)
|
|
161
|
+
.limit(limit)
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
result = await db.execute(query)
|
|
165
|
+
tickets = result.scalars().all()
|
|
166
|
+
|
|
167
|
+
items = []
|
|
168
|
+
for ticket in tickets:
|
|
169
|
+
ticket_data = TicketResponse.model_validate(ticket).model_dump()
|
|
170
|
+
if ticket.blocked_by_ticket_id and ticket.blocked_by:
|
|
171
|
+
ticket_data["blocked_by_ticket_title"] = ticket.blocked_by.title
|
|
172
|
+
items.append(TicketResponse(**ticket_data))
|
|
173
|
+
|
|
174
|
+
return PaginatedResponse[TicketResponse](
|
|
175
|
+
items=items,
|
|
176
|
+
total=total,
|
|
177
|
+
page=page,
|
|
178
|
+
limit=limit,
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
@router.get(
|
|
183
|
+
"/{ticket_id}",
|
|
184
|
+
response_model=TicketDetailResponse,
|
|
185
|
+
summary="Get a ticket by ID",
|
|
186
|
+
)
|
|
187
|
+
async def get_ticket(
|
|
188
|
+
ticket_id: str,
|
|
189
|
+
db: AsyncSession = Depends(get_db),
|
|
190
|
+
) -> TicketDetailResponse:
|
|
191
|
+
"""Get a ticket by its ID with full context."""
|
|
192
|
+
from app.state_machine import TicketState as TS
|
|
193
|
+
|
|
194
|
+
service = TicketService(db)
|
|
195
|
+
ticket = await service.get_ticket_by_id(ticket_id)
|
|
196
|
+
|
|
197
|
+
# Determine if ticket is blocked by an incomplete dependency
|
|
198
|
+
is_blocked = False
|
|
199
|
+
blocked_by_title = None
|
|
200
|
+
if ticket.blocked_by_ticket_id:
|
|
201
|
+
if ticket.blocked_by:
|
|
202
|
+
blocked_by_title = ticket.blocked_by.title
|
|
203
|
+
is_blocked = ticket.blocked_by.state != TS.DONE.value
|
|
204
|
+
else:
|
|
205
|
+
is_blocked = True # Assume blocked if relationship not loaded
|
|
206
|
+
|
|
207
|
+
return TicketDetailResponse(
|
|
208
|
+
id=ticket.id,
|
|
209
|
+
goal_id=ticket.goal_id,
|
|
210
|
+
goal_title=ticket.goal.title if ticket.goal else None,
|
|
211
|
+
goal_description=ticket.goal.description if ticket.goal else None,
|
|
212
|
+
title=ticket.title,
|
|
213
|
+
description=ticket.description,
|
|
214
|
+
state=ticket.state_enum,
|
|
215
|
+
state_display=TicketDetailResponse.get_state_display(ticket.state_enum),
|
|
216
|
+
priority=ticket.priority,
|
|
217
|
+
priority_label=TicketDetailResponse.get_priority_label(ticket.priority),
|
|
218
|
+
blocked_by_ticket_id=ticket.blocked_by_ticket_id,
|
|
219
|
+
blocked_by_ticket_title=blocked_by_title,
|
|
220
|
+
is_blocked=is_blocked,
|
|
221
|
+
created_at=ticket.created_at,
|
|
222
|
+
updated_at=ticket.updated_at,
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
@router.delete(
|
|
227
|
+
"/{ticket_id}",
|
|
228
|
+
status_code=status.HTTP_204_NO_CONTENT,
|
|
229
|
+
summary="Delete a ticket",
|
|
230
|
+
)
|
|
231
|
+
async def delete_ticket(
|
|
232
|
+
ticket_id: str,
|
|
233
|
+
db: AsyncSession = Depends(get_db),
|
|
234
|
+
) -> None:
|
|
235
|
+
"""
|
|
236
|
+
Delete a ticket and all its associated data.
|
|
237
|
+
|
|
238
|
+
This will cascade delete:
|
|
239
|
+
- All jobs for this ticket
|
|
240
|
+
- All revisions and their review comments/summaries
|
|
241
|
+
- All ticket events
|
|
242
|
+
- The workspace and worktree (best effort)
|
|
243
|
+
- All evidence files
|
|
244
|
+
|
|
245
|
+
**WARNING:** This action cannot be undone!
|
|
246
|
+
"""
|
|
247
|
+
from sqlalchemy import delete as sql_delete
|
|
248
|
+
|
|
249
|
+
service = TicketService(db)
|
|
250
|
+
|
|
251
|
+
# Verify ticket exists and get board_id for broadcast
|
|
252
|
+
ticket = await service.get_ticket_by_id(ticket_id)
|
|
253
|
+
board_id = ticket.board_id
|
|
254
|
+
|
|
255
|
+
# Clean up workspace (best effort, don't block deletion if it fails)
|
|
256
|
+
try:
|
|
257
|
+
await service._cleanup_workspace_async(ticket_id)
|
|
258
|
+
except Exception as e:
|
|
259
|
+
import logging
|
|
260
|
+
|
|
261
|
+
logger = logging.getLogger(__name__)
|
|
262
|
+
logger.warning(f"Failed to cleanup workspace for ticket {ticket_id}: {e}")
|
|
263
|
+
|
|
264
|
+
# Delete the ticket (cascade will handle related records)
|
|
265
|
+
await db.execute(sql_delete(Ticket).where(Ticket.id == ticket_id))
|
|
266
|
+
|
|
267
|
+
await db.commit()
|
|
268
|
+
|
|
269
|
+
# Broadcast board invalidation
|
|
270
|
+
await _broadcast_board_invalidate(board_id, reason="ticket_deleted")
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
@router.patch(
|
|
274
|
+
"/{ticket_id}",
|
|
275
|
+
response_model=TicketResponse,
|
|
276
|
+
summary="Update a ticket",
|
|
277
|
+
)
|
|
278
|
+
async def update_ticket(
|
|
279
|
+
ticket_id: str,
|
|
280
|
+
data: TicketUpdate,
|
|
281
|
+
db: AsyncSession = Depends(get_db),
|
|
282
|
+
) -> TicketResponse:
|
|
283
|
+
"""Update a ticket's title, description, or priority."""
|
|
284
|
+
service = TicketService(db)
|
|
285
|
+
ticket = await service.get_ticket_by_id(ticket_id)
|
|
286
|
+
|
|
287
|
+
if "title" in data.model_fields_set:
|
|
288
|
+
ticket.title = data.title
|
|
289
|
+
if "description" in data.model_fields_set:
|
|
290
|
+
ticket.description = data.description
|
|
291
|
+
if "priority" in data.model_fields_set:
|
|
292
|
+
ticket.priority = data.priority
|
|
293
|
+
|
|
294
|
+
await db.flush()
|
|
295
|
+
await db.refresh(ticket)
|
|
296
|
+
await db.commit()
|
|
297
|
+
|
|
298
|
+
# Broadcast board invalidation
|
|
299
|
+
await _broadcast_board_invalidate(ticket.board_id, reason="ticket_updated")
|
|
300
|
+
|
|
301
|
+
return TicketResponse(
|
|
302
|
+
id=ticket.id,
|
|
303
|
+
goal_id=ticket.goal_id,
|
|
304
|
+
title=ticket.title,
|
|
305
|
+
description=ticket.description,
|
|
306
|
+
state=ticket.state,
|
|
307
|
+
priority=ticket.priority,
|
|
308
|
+
blocked_by_ticket_id=ticket.blocked_by_ticket_id,
|
|
309
|
+
created_at=ticket.created_at,
|
|
310
|
+
updated_at=ticket.updated_at,
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
@router.post(
|
|
315
|
+
"/accept",
|
|
316
|
+
response_model=BulkAcceptResponse,
|
|
317
|
+
summary="Bulk accept proposed tickets",
|
|
318
|
+
)
|
|
319
|
+
async def bulk_accept_tickets(
|
|
320
|
+
data: BulkAcceptRequest,
|
|
321
|
+
db: AsyncSession = Depends(get_db),
|
|
322
|
+
) -> BulkAcceptResponse:
|
|
323
|
+
"""
|
|
324
|
+
Bulk accept proposed tickets, transitioning them from 'proposed' to 'planned'.
|
|
325
|
+
|
|
326
|
+
Validation rules:
|
|
327
|
+
- All tickets must exist
|
|
328
|
+
- All tickets must be in 'proposed' state
|
|
329
|
+
- If goal_id is provided, all tickets must belong to that goal
|
|
330
|
+
|
|
331
|
+
The operation is atomic: if any ticket fails validation, none are accepted.
|
|
332
|
+
This prevents partial acceptance which causes UI confusion.
|
|
333
|
+
|
|
334
|
+
If queue_first=true:
|
|
335
|
+
- The FIRST ticket in the request order (ticket_ids[0]) will be queued
|
|
336
|
+
- Request order is deterministic and matches UI selection order
|
|
337
|
+
- Remaining tickets stay in 'planned' state (not auto-queued)
|
|
338
|
+
- Job is created AFTER all transitions are committed
|
|
339
|
+
- Returns queued_job_id and queued_ticket_id for traceability
|
|
340
|
+
"""
|
|
341
|
+
from app.models.ticket import Ticket
|
|
342
|
+
|
|
343
|
+
service = TicketService(db)
|
|
344
|
+
job_service = JobService(db)
|
|
345
|
+
rejected: list[BulkAcceptResult] = []
|
|
346
|
+
|
|
347
|
+
# Phase 1: Pre-validation - fetch all tickets and validate
|
|
348
|
+
# Preserve request order by using a list, not a dict
|
|
349
|
+
tickets_to_accept: list[Ticket] = []
|
|
350
|
+
|
|
351
|
+
for ticket_id in data.ticket_ids:
|
|
352
|
+
try:
|
|
353
|
+
ticket = await service.get_ticket_by_id(ticket_id)
|
|
354
|
+
except Exception:
|
|
355
|
+
rejected.append(
|
|
356
|
+
BulkAcceptResult(
|
|
357
|
+
ticket_id=ticket_id,
|
|
358
|
+
success=False,
|
|
359
|
+
error="Ticket not found",
|
|
360
|
+
)
|
|
361
|
+
)
|
|
362
|
+
continue
|
|
363
|
+
|
|
364
|
+
# Validate state
|
|
365
|
+
if ticket.state != TicketState.PROPOSED.value:
|
|
366
|
+
rejected.append(
|
|
367
|
+
BulkAcceptResult(
|
|
368
|
+
ticket_id=ticket_id,
|
|
369
|
+
success=False,
|
|
370
|
+
error=f"Ticket is in '{ticket.state}' state, not 'proposed'",
|
|
371
|
+
)
|
|
372
|
+
)
|
|
373
|
+
continue
|
|
374
|
+
|
|
375
|
+
# Validate goal ownership if goal_id provided
|
|
376
|
+
if data.goal_id and ticket.goal_id != data.goal_id:
|
|
377
|
+
rejected.append(
|
|
378
|
+
BulkAcceptResult(
|
|
379
|
+
ticket_id=ticket_id,
|
|
380
|
+
success=False,
|
|
381
|
+
error=f"Ticket belongs to goal '{ticket.goal_id}', not '{data.goal_id}'",
|
|
382
|
+
)
|
|
383
|
+
)
|
|
384
|
+
continue
|
|
385
|
+
|
|
386
|
+
tickets_to_accept.append(ticket)
|
|
387
|
+
|
|
388
|
+
# If any tickets were rejected, don't accept any (atomic operation)
|
|
389
|
+
if rejected:
|
|
390
|
+
return BulkAcceptResponse(
|
|
391
|
+
accepted_ids=[],
|
|
392
|
+
rejected=rejected,
|
|
393
|
+
accepted_count=0,
|
|
394
|
+
failed_count=len(rejected),
|
|
395
|
+
queued_job_id=None,
|
|
396
|
+
queued_ticket_id=None,
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
# Phase 2: Accept all validated tickets within transaction
|
|
400
|
+
# Note: SQLAlchemy async session auto-commits at the end of the request handler
|
|
401
|
+
# unless we explicitly use db.begin() or db.rollback()
|
|
402
|
+
accepted_ids: list[str] = []
|
|
403
|
+
|
|
404
|
+
for ticket in tickets_to_accept:
|
|
405
|
+
try:
|
|
406
|
+
await service.transition_ticket(
|
|
407
|
+
ticket_id=ticket.id,
|
|
408
|
+
to_state=TicketState.PLANNED,
|
|
409
|
+
actor_type=data.actor_type,
|
|
410
|
+
actor_id=data.actor_id,
|
|
411
|
+
reason=data.reason,
|
|
412
|
+
)
|
|
413
|
+
accepted_ids.append(ticket.id)
|
|
414
|
+
except Exception as e:
|
|
415
|
+
# This shouldn't happen after pre-validation, but handle it
|
|
416
|
+
rejected.append(
|
|
417
|
+
BulkAcceptResult(
|
|
418
|
+
ticket_id=ticket.id,
|
|
419
|
+
success=False,
|
|
420
|
+
error=str(e),
|
|
421
|
+
)
|
|
422
|
+
)
|
|
423
|
+
# Rollback will happen automatically on exception
|
|
424
|
+
raise
|
|
425
|
+
|
|
426
|
+
# Commit transitions before queueing job
|
|
427
|
+
await db.commit()
|
|
428
|
+
|
|
429
|
+
# Phase 3: Queue first ticket if requested (after commit)
|
|
430
|
+
# This ensures the worker sees the updated ticket state
|
|
431
|
+
queued_job_id: str | None = None
|
|
432
|
+
queued_ticket_id: str | None = None
|
|
433
|
+
|
|
434
|
+
if data.queue_first and accepted_ids:
|
|
435
|
+
# Use first ticket in request order (deterministic)
|
|
436
|
+
first_ticket_id = accepted_ids[0]
|
|
437
|
+
try:
|
|
438
|
+
job = await job_service.create_job(first_ticket_id, JobKind.EXECUTE)
|
|
439
|
+
await db.commit() # Commit job creation
|
|
440
|
+
queued_job_id = job.id
|
|
441
|
+
queued_ticket_id = first_ticket_id
|
|
442
|
+
except Exception as e:
|
|
443
|
+
# Don't fail the whole operation if queueing fails
|
|
444
|
+
# Tickets are already accepted at this point
|
|
445
|
+
import logging
|
|
446
|
+
|
|
447
|
+
logging.getLogger(__name__).warning(
|
|
448
|
+
f"Failed to queue job for ticket {first_ticket_id}: {e}"
|
|
449
|
+
)
|
|
450
|
+
|
|
451
|
+
return BulkAcceptResponse(
|
|
452
|
+
accepted_ids=accepted_ids,
|
|
453
|
+
rejected=rejected,
|
|
454
|
+
accepted_count=len(accepted_ids),
|
|
455
|
+
failed_count=len(rejected),
|
|
456
|
+
queued_job_id=queued_job_id,
|
|
457
|
+
queued_ticket_id=queued_ticket_id,
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
@router.post(
|
|
462
|
+
"/bulk-transition",
|
|
463
|
+
response_model=BulkTransitionResponse,
|
|
464
|
+
summary="Bulk transition multiple tickets to a new state",
|
|
465
|
+
)
|
|
466
|
+
async def bulk_transition_tickets(
|
|
467
|
+
data: BulkTransitionRequest,
|
|
468
|
+
db: AsyncSession = Depends(get_db),
|
|
469
|
+
) -> BulkTransitionResponse:
|
|
470
|
+
"""
|
|
471
|
+
Transition multiple tickets to a new state in a single request.
|
|
472
|
+
|
|
473
|
+
Each ticket is validated independently against the state machine.
|
|
474
|
+
Tickets that fail validation are skipped (partial success is allowed).
|
|
475
|
+
|
|
476
|
+
**Use cases:**
|
|
477
|
+
- Bulk abandon tickets
|
|
478
|
+
- Bulk move tickets back to planned
|
|
479
|
+
- Bulk mark tickets as done
|
|
480
|
+
"""
|
|
481
|
+
service = TicketService(db)
|
|
482
|
+
results: list[BulkTransitionResult] = []
|
|
483
|
+
transitioned_count = 0
|
|
484
|
+
failed_count = 0
|
|
485
|
+
|
|
486
|
+
for ticket_id in data.ticket_ids:
|
|
487
|
+
try:
|
|
488
|
+
ticket = await service.get_ticket_by_id(ticket_id)
|
|
489
|
+
from_state = TicketState(ticket.state)
|
|
490
|
+
|
|
491
|
+
# Validate transition
|
|
492
|
+
if not validate_transition(from_state, data.target_state):
|
|
493
|
+
results.append(
|
|
494
|
+
BulkTransitionResult(
|
|
495
|
+
ticket_id=ticket_id,
|
|
496
|
+
success=False,
|
|
497
|
+
error=(
|
|
498
|
+
f"Invalid transition from '{from_state.value}' "
|
|
499
|
+
f"to '{data.target_state.value}'"
|
|
500
|
+
),
|
|
501
|
+
from_state=from_state.value,
|
|
502
|
+
to_state=data.target_state.value,
|
|
503
|
+
)
|
|
504
|
+
)
|
|
505
|
+
failed_count += 1
|
|
506
|
+
continue
|
|
507
|
+
|
|
508
|
+
await service.transition_ticket(
|
|
509
|
+
ticket_id=ticket_id,
|
|
510
|
+
to_state=data.target_state,
|
|
511
|
+
actor_type=data.actor_type,
|
|
512
|
+
actor_id=data.actor_id,
|
|
513
|
+
reason=data.reason,
|
|
514
|
+
)
|
|
515
|
+
results.append(
|
|
516
|
+
BulkTransitionResult(
|
|
517
|
+
ticket_id=ticket_id,
|
|
518
|
+
success=True,
|
|
519
|
+
from_state=from_state.value,
|
|
520
|
+
to_state=data.target_state.value,
|
|
521
|
+
)
|
|
522
|
+
)
|
|
523
|
+
transitioned_count += 1
|
|
524
|
+
except Exception as e:
|
|
525
|
+
results.append(
|
|
526
|
+
BulkTransitionResult(
|
|
527
|
+
ticket_id=ticket_id,
|
|
528
|
+
success=False,
|
|
529
|
+
error=str(e),
|
|
530
|
+
)
|
|
531
|
+
)
|
|
532
|
+
failed_count += 1
|
|
533
|
+
|
|
534
|
+
# Commit all successful transitions
|
|
535
|
+
if transitioned_count > 0:
|
|
536
|
+
await db.commit()
|
|
537
|
+
|
|
538
|
+
return BulkTransitionResponse(
|
|
539
|
+
results=results,
|
|
540
|
+
transitioned_count=transitioned_count,
|
|
541
|
+
failed_count=failed_count,
|
|
542
|
+
)
|
|
543
|
+
|
|
544
|
+
|
|
545
|
+
@router.patch(
|
|
546
|
+
"/reorder",
|
|
547
|
+
response_model=TicketResponse,
|
|
548
|
+
summary="Reorder a ticket within a state column",
|
|
549
|
+
)
|
|
550
|
+
async def reorder_ticket(
|
|
551
|
+
data: TicketReorderRequest,
|
|
552
|
+
db: AsyncSession = Depends(get_db),
|
|
553
|
+
) -> TicketResponse:
|
|
554
|
+
"""
|
|
555
|
+
Reorder a ticket within its state column by updating sort_order.
|
|
556
|
+
|
|
557
|
+
Moves the ticket to `new_index` (0-based) within the specified
|
|
558
|
+
`column_state`. Other tickets in the column are re-indexed to
|
|
559
|
+
maintain a contiguous order.
|
|
560
|
+
|
|
561
|
+
**Note:** The ticket must already be in the specified column_state.
|
|
562
|
+
"""
|
|
563
|
+
# Verify the ticket exists and is in the correct state
|
|
564
|
+
service = TicketService(db)
|
|
565
|
+
ticket = await service.get_ticket_by_id(data.ticket_id)
|
|
566
|
+
|
|
567
|
+
if ticket.state != data.column_state.value:
|
|
568
|
+
raise HTTPException(
|
|
569
|
+
status_code=400,
|
|
570
|
+
detail=(
|
|
571
|
+
f"Ticket is in '{ticket.state}' state, not '{data.column_state.value}'"
|
|
572
|
+
),
|
|
573
|
+
)
|
|
574
|
+
|
|
575
|
+
# Get all tickets in the same column, ordered by sort_order
|
|
576
|
+
column_query = (
|
|
577
|
+
select(Ticket)
|
|
578
|
+
.where(Ticket.state == data.column_state.value)
|
|
579
|
+
.order_by(
|
|
580
|
+
Ticket.sort_order.asc().nulls_last(),
|
|
581
|
+
Ticket.priority.desc().nulls_last(),
|
|
582
|
+
Ticket.created_at.desc(),
|
|
583
|
+
)
|
|
584
|
+
)
|
|
585
|
+
# Scope to same board if ticket has board_id
|
|
586
|
+
if ticket.board_id:
|
|
587
|
+
column_query = column_query.where(Ticket.board_id == ticket.board_id)
|
|
588
|
+
|
|
589
|
+
result = await db.execute(column_query)
|
|
590
|
+
column_tickets = list(result.scalars().all())
|
|
591
|
+
|
|
592
|
+
# Remove the target ticket from the list
|
|
593
|
+
column_tickets = [t for t in column_tickets if t.id != data.ticket_id]
|
|
594
|
+
|
|
595
|
+
# Clamp new_index to valid range
|
|
596
|
+
new_index = min(data.new_index, len(column_tickets))
|
|
597
|
+
|
|
598
|
+
# Insert at new position
|
|
599
|
+
column_tickets.insert(new_index, ticket)
|
|
600
|
+
|
|
601
|
+
# Re-assign sort_order for all tickets in the column
|
|
602
|
+
for idx, t in enumerate(column_tickets):
|
|
603
|
+
t.sort_order = idx
|
|
604
|
+
|
|
605
|
+
await db.flush()
|
|
606
|
+
await db.refresh(ticket)
|
|
607
|
+
await db.commit()
|
|
608
|
+
|
|
609
|
+
# Broadcast board invalidation
|
|
610
|
+
await _broadcast_board_invalidate(ticket.board_id, reason="ticket_reordered")
|
|
611
|
+
|
|
612
|
+
return TicketResponse.model_validate(ticket)
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
@router.post(
|
|
616
|
+
"/{ticket_id}/transition",
|
|
617
|
+
response_model=TicketResponse,
|
|
618
|
+
summary="Transition a ticket to a new state",
|
|
619
|
+
)
|
|
620
|
+
async def transition_ticket(
|
|
621
|
+
ticket_id: str,
|
|
622
|
+
data: TicketTransition,
|
|
623
|
+
db: AsyncSession = Depends(get_db),
|
|
624
|
+
) -> TicketResponse:
|
|
625
|
+
"""
|
|
626
|
+
Transition a ticket to a new state.
|
|
627
|
+
|
|
628
|
+
The transition must be valid according to the state machine rules.
|
|
629
|
+
An event will be recorded for this transition.
|
|
630
|
+
"""
|
|
631
|
+
service = TicketService(db)
|
|
632
|
+
ticket = await service.transition_ticket(
|
|
633
|
+
ticket_id=ticket_id,
|
|
634
|
+
to_state=data.to_state,
|
|
635
|
+
actor_type=data.actor_type,
|
|
636
|
+
actor_id=data.actor_id,
|
|
637
|
+
reason=data.reason,
|
|
638
|
+
)
|
|
639
|
+
|
|
640
|
+
# Broadcast board invalidation
|
|
641
|
+
await _broadcast_board_invalidate(ticket.board_id, reason="ticket_transition")
|
|
642
|
+
|
|
643
|
+
return TicketResponse.model_validate(ticket)
|
|
644
|
+
|
|
645
|
+
|
|
646
|
+
@router.get(
|
|
647
|
+
"/{ticket_id}/events",
|
|
648
|
+
response_model=TicketEventListResponse,
|
|
649
|
+
summary="Get all events for a ticket",
|
|
650
|
+
)
|
|
651
|
+
async def get_ticket_events(
|
|
652
|
+
ticket_id: str,
|
|
653
|
+
db: AsyncSession = Depends(get_db),
|
|
654
|
+
) -> TicketEventListResponse:
|
|
655
|
+
"""Get the event history for a ticket."""
|
|
656
|
+
service = TicketService(db)
|
|
657
|
+
events = await service.get_ticket_events(ticket_id)
|
|
658
|
+
|
|
659
|
+
# Transform events to response schema
|
|
660
|
+
event_responses = []
|
|
661
|
+
for event in events:
|
|
662
|
+
event_responses.append(
|
|
663
|
+
TicketEventResponse(
|
|
664
|
+
id=event.id,
|
|
665
|
+
ticket_id=event.ticket_id,
|
|
666
|
+
event_type=EventType(event.event_type),
|
|
667
|
+
from_state=event.from_state,
|
|
668
|
+
to_state=event.to_state,
|
|
669
|
+
actor_type=event.actor_type,
|
|
670
|
+
actor_id=event.actor_id,
|
|
671
|
+
reason=event.reason,
|
|
672
|
+
payload=event.get_payload(),
|
|
673
|
+
created_at=event.created_at,
|
|
674
|
+
)
|
|
675
|
+
)
|
|
676
|
+
|
|
677
|
+
return TicketEventListResponse(
|
|
678
|
+
events=event_responses,
|
|
679
|
+
total=len(event_responses),
|
|
680
|
+
)
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
@router.post(
|
|
684
|
+
"/{ticket_id}/execute",
|
|
685
|
+
response_model=JobCreateResponse,
|
|
686
|
+
status_code=status.HTTP_201_CREATED,
|
|
687
|
+
summary="Execute a single ticket (run it now)",
|
|
688
|
+
)
|
|
689
|
+
async def execute_ticket(
|
|
690
|
+
ticket_id: str,
|
|
691
|
+
executor_profile: str | None = None,
|
|
692
|
+
db: AsyncSession = Depends(get_db),
|
|
693
|
+
) -> JobCreateResponse:
|
|
694
|
+
"""
|
|
695
|
+
Execute a single ticket immediately.
|
|
696
|
+
|
|
697
|
+
**Use this to run a specific ticket without using autopilot.**
|
|
698
|
+
|
|
699
|
+
Valid ticket states for execution:
|
|
700
|
+
- PLANNED: Normal execution
|
|
701
|
+
- NEEDS_HUMAN: Re-run after human intervention
|
|
702
|
+
- DONE: Re-run if changes were requested on revision
|
|
703
|
+
|
|
704
|
+
The ticket will transition to EXECUTING when the job starts,
|
|
705
|
+
then to VERIFYING or BLOCKED based on the outcome.
|
|
706
|
+
|
|
707
|
+
Pass `executor_profile` query param to use a named profile from
|
|
708
|
+
draft.yaml (e.g., `?executor_profile=fast`).
|
|
709
|
+
|
|
710
|
+
For automated execution of all planned tickets, use `/planner/start`.
|
|
711
|
+
"""
|
|
712
|
+
from app.state_machine import validate_transition
|
|
713
|
+
|
|
714
|
+
# Validate executor profile if specified
|
|
715
|
+
if executor_profile:
|
|
716
|
+
from app.services.config_service import ConfigService
|
|
717
|
+
|
|
718
|
+
config_service = ConfigService()
|
|
719
|
+
profile = config_service.get_executor_profile(executor_profile)
|
|
720
|
+
if not profile:
|
|
721
|
+
raise HTTPException(
|
|
722
|
+
status_code=400,
|
|
723
|
+
detail=f"Unknown executor profile: '{executor_profile}'. "
|
|
724
|
+
f"Available: {list(config_service.get_executor_profiles().keys())}",
|
|
725
|
+
)
|
|
726
|
+
|
|
727
|
+
ticket_service = TicketService(db)
|
|
728
|
+
ticket = await ticket_service.get_ticket_by_id(ticket_id)
|
|
729
|
+
|
|
730
|
+
# Validate ticket can transition to EXECUTING
|
|
731
|
+
current_state = ticket.state_enum
|
|
732
|
+
if not validate_transition(current_state, TicketState.EXECUTING):
|
|
733
|
+
raise HTTPException(
|
|
734
|
+
status_code=400,
|
|
735
|
+
detail=f"Cannot execute ticket in '{current_state.value}' state. "
|
|
736
|
+
f"Ticket must be in PLANNED, NEEDS_HUMAN, or DONE state.",
|
|
737
|
+
)
|
|
738
|
+
|
|
739
|
+
# Check dependency: ticket cannot execute if blocked by an incomplete ticket
|
|
740
|
+
if ticket.is_blocked_by_dependency:
|
|
741
|
+
blocker_title = ticket.blocked_by.title if ticket.blocked_by else "unknown"
|
|
742
|
+
raise HTTPException(
|
|
743
|
+
status_code=409,
|
|
744
|
+
detail=f"Cannot execute: ticket is blocked by '{blocker_title}' "
|
|
745
|
+
f"(id: {ticket.blocked_by_ticket_id}) which is not yet done.",
|
|
746
|
+
)
|
|
747
|
+
|
|
748
|
+
# Transition ticket to EXECUTING immediately so the board reflects it
|
|
749
|
+
# before the Celery worker picks up the job. This prevents the "bounce"
|
|
750
|
+
# where optimistic UI update snaps back on the next board refresh.
|
|
751
|
+
if current_state != TicketState.EXECUTING:
|
|
752
|
+
await ticket_service.transition_ticket(
|
|
753
|
+
ticket_id=ticket_id,
|
|
754
|
+
to_state=TicketState.EXECUTING,
|
|
755
|
+
actor_type=ActorType.HUMAN,
|
|
756
|
+
reason="Execution requested",
|
|
757
|
+
auto_verify=False,
|
|
758
|
+
skip_cleanup=False,
|
|
759
|
+
)
|
|
760
|
+
|
|
761
|
+
job_service = JobService(db)
|
|
762
|
+
job = await job_service.create_job(ticket_id, JobKind.EXECUTE)
|
|
763
|
+
|
|
764
|
+
return JobCreateResponse(
|
|
765
|
+
id=job.id,
|
|
766
|
+
ticket_id=job.ticket_id,
|
|
767
|
+
kind=job.kind_enum,
|
|
768
|
+
status=job.status_enum,
|
|
769
|
+
created_at=job.created_at,
|
|
770
|
+
started_at=job.started_at,
|
|
771
|
+
finished_at=job.finished_at,
|
|
772
|
+
exit_code=job.exit_code,
|
|
773
|
+
log_path=job.log_path,
|
|
774
|
+
celery_task_id=job.celery_task_id,
|
|
775
|
+
)
|
|
776
|
+
|
|
777
|
+
|
|
778
|
+
@router.post(
|
|
779
|
+
"/{ticket_id}/run",
|
|
780
|
+
response_model=JobCreateResponse,
|
|
781
|
+
status_code=status.HTTP_201_CREATED,
|
|
782
|
+
summary="Enqueue an execute job for a ticket (alias for /execute)",
|
|
783
|
+
deprecated=True,
|
|
784
|
+
)
|
|
785
|
+
async def run_ticket(
|
|
786
|
+
ticket_id: str,
|
|
787
|
+
db: AsyncSession = Depends(get_db),
|
|
788
|
+
) -> JobCreateResponse:
|
|
789
|
+
"""
|
|
790
|
+
**Deprecated: Use `/tickets/{ticket_id}/execute` instead.**
|
|
791
|
+
|
|
792
|
+
Enqueue an execute job for a ticket.
|
|
793
|
+
"""
|
|
794
|
+
return await execute_ticket(ticket_id, db=db)
|
|
795
|
+
|
|
796
|
+
|
|
797
|
+
@router.post(
|
|
798
|
+
"/{ticket_id}/verify",
|
|
799
|
+
response_model=JobCreateResponse,
|
|
800
|
+
status_code=status.HTTP_201_CREATED,
|
|
801
|
+
summary="Enqueue a verify job for a ticket",
|
|
802
|
+
)
|
|
803
|
+
async def verify_ticket(
|
|
804
|
+
ticket_id: str,
|
|
805
|
+
db: AsyncSession = Depends(get_db),
|
|
806
|
+
) -> JobCreateResponse:
|
|
807
|
+
"""
|
|
808
|
+
Enqueue a verify job for a ticket.
|
|
809
|
+
|
|
810
|
+
Creates a new Job record with kind='verify' and status='queued',
|
|
811
|
+
then dispatches a Celery task to verify the ticket.
|
|
812
|
+
"""
|
|
813
|
+
service = JobService(db)
|
|
814
|
+
job = await service.create_job(ticket_id, JobKind.VERIFY)
|
|
815
|
+
|
|
816
|
+
return JobCreateResponse(
|
|
817
|
+
id=job.id,
|
|
818
|
+
ticket_id=job.ticket_id,
|
|
819
|
+
kind=job.kind_enum,
|
|
820
|
+
status=job.status_enum,
|
|
821
|
+
created_at=job.created_at,
|
|
822
|
+
started_at=job.started_at,
|
|
823
|
+
finished_at=job.finished_at,
|
|
824
|
+
exit_code=job.exit_code,
|
|
825
|
+
log_path=job.log_path,
|
|
826
|
+
celery_task_id=job.celery_task_id,
|
|
827
|
+
)
|
|
828
|
+
|
|
829
|
+
|
|
830
|
+
@router.post(
|
|
831
|
+
"/{ticket_id}/resume",
|
|
832
|
+
response_model=JobCreateResponse,
|
|
833
|
+
status_code=status.HTTP_201_CREATED,
|
|
834
|
+
summary="Resume an interactive ticket after human completion",
|
|
835
|
+
)
|
|
836
|
+
async def resume_ticket(
|
|
837
|
+
ticket_id: str,
|
|
838
|
+
db: AsyncSession = Depends(get_db),
|
|
839
|
+
) -> JobCreateResponse:
|
|
840
|
+
"""
|
|
841
|
+
Resume an interactive ticket after human completion.
|
|
842
|
+
|
|
843
|
+
Use this endpoint when:
|
|
844
|
+
1. A ticket was transitioned to 'needs_human' by an interactive executor (Cursor)
|
|
845
|
+
2. The human has made their changes in the worktree
|
|
846
|
+
3. The human wants to continue the workflow
|
|
847
|
+
|
|
848
|
+
This endpoint:
|
|
849
|
+
1. Creates a 'resume' job that captures the git diff as evidence
|
|
850
|
+
2. Transitions the ticket to 'verifying' state
|
|
851
|
+
3. Queues a verification job
|
|
852
|
+
|
|
853
|
+
The resume job will fail if the ticket is not in 'needs_human' state.
|
|
854
|
+
"""
|
|
855
|
+
service = JobService(db)
|
|
856
|
+
job = await service.create_job(ticket_id, JobKind.RESUME)
|
|
857
|
+
|
|
858
|
+
return JobCreateResponse(
|
|
859
|
+
id=job.id,
|
|
860
|
+
ticket_id=job.ticket_id,
|
|
861
|
+
kind=job.kind_enum,
|
|
862
|
+
status=job.status_enum,
|
|
863
|
+
created_at=job.created_at,
|
|
864
|
+
started_at=job.started_at,
|
|
865
|
+
finished_at=job.finished_at,
|
|
866
|
+
exit_code=job.exit_code,
|
|
867
|
+
log_path=job.log_path,
|
|
868
|
+
celery_task_id=job.celery_task_id,
|
|
869
|
+
)
|
|
870
|
+
|
|
871
|
+
|
|
872
|
+
@router.get(
|
|
873
|
+
"/{ticket_id}/jobs",
|
|
874
|
+
response_model=JobListResponse,
|
|
875
|
+
summary="Get all jobs for a ticket",
|
|
876
|
+
)
|
|
877
|
+
async def get_ticket_jobs(
|
|
878
|
+
ticket_id: str,
|
|
879
|
+
db: AsyncSession = Depends(get_db),
|
|
880
|
+
) -> JobListResponse:
|
|
881
|
+
"""Get all jobs associated with a ticket, ordered by creation time descending."""
|
|
882
|
+
service = JobService(db)
|
|
883
|
+
jobs = await service.get_jobs_for_ticket(ticket_id)
|
|
884
|
+
|
|
885
|
+
job_responses = [
|
|
886
|
+
JobResponse(
|
|
887
|
+
id=job.id,
|
|
888
|
+
ticket_id=job.ticket_id,
|
|
889
|
+
kind=job.kind_enum,
|
|
890
|
+
status=job.status_enum,
|
|
891
|
+
created_at=job.created_at,
|
|
892
|
+
started_at=job.started_at,
|
|
893
|
+
finished_at=job.finished_at,
|
|
894
|
+
exit_code=job.exit_code,
|
|
895
|
+
log_path=job.log_path,
|
|
896
|
+
)
|
|
897
|
+
for job in jobs
|
|
898
|
+
]
|
|
899
|
+
|
|
900
|
+
return JobListResponse(
|
|
901
|
+
jobs=job_responses,
|
|
902
|
+
total=len(job_responses),
|
|
903
|
+
)
|
|
904
|
+
|
|
905
|
+
|
|
906
|
+
@router.get(
|
|
907
|
+
"/{ticket_id}/evidence",
|
|
908
|
+
response_model=EvidenceListResponse,
|
|
909
|
+
summary="Get all verification evidence for a ticket",
|
|
910
|
+
)
|
|
911
|
+
async def get_ticket_evidence(
|
|
912
|
+
ticket_id: str,
|
|
913
|
+
db: AsyncSession = Depends(get_db),
|
|
914
|
+
) -> EvidenceListResponse:
|
|
915
|
+
"""
|
|
916
|
+
Get all verification evidence for a ticket.
|
|
917
|
+
|
|
918
|
+
Returns evidence records from all verification jobs, ordered by creation time descending.
|
|
919
|
+
"""
|
|
920
|
+
# First verify the ticket exists
|
|
921
|
+
service = TicketService(db)
|
|
922
|
+
await service.get_ticket_by_id(ticket_id)
|
|
923
|
+
|
|
924
|
+
# Get all evidence for the ticket
|
|
925
|
+
result = await db.execute(
|
|
926
|
+
select(Evidence)
|
|
927
|
+
.where(Evidence.ticket_id == ticket_id)
|
|
928
|
+
.order_by(Evidence.created_at.desc())
|
|
929
|
+
)
|
|
930
|
+
evidence_list = list(result.scalars().all())
|
|
931
|
+
|
|
932
|
+
evidence_responses = [
|
|
933
|
+
EvidenceResponse(
|
|
934
|
+
id=e.id,
|
|
935
|
+
ticket_id=e.ticket_id,
|
|
936
|
+
job_id=e.job_id,
|
|
937
|
+
kind=e.kind_enum,
|
|
938
|
+
command=e.command,
|
|
939
|
+
exit_code=e.exit_code,
|
|
940
|
+
stdout_path=e.stdout_path,
|
|
941
|
+
stderr_path=e.stderr_path,
|
|
942
|
+
created_at=e.created_at,
|
|
943
|
+
succeeded=e.succeeded,
|
|
944
|
+
)
|
|
945
|
+
for e in evidence_list
|
|
946
|
+
]
|
|
947
|
+
|
|
948
|
+
return EvidenceListResponse(
|
|
949
|
+
evidence=evidence_responses,
|
|
950
|
+
total=len(evidence_responses),
|
|
951
|
+
)
|
|
952
|
+
|
|
953
|
+
|
|
954
|
+
@router.get(
|
|
955
|
+
"/{ticket_id}/worktree/tree",
|
|
956
|
+
summary="Get file tree for a ticket's worktree",
|
|
957
|
+
)
|
|
958
|
+
async def get_worktree_tree(
|
|
959
|
+
ticket_id: str,
|
|
960
|
+
db: AsyncSession = Depends(get_db),
|
|
961
|
+
):
|
|
962
|
+
"""Return the directory structure of a ticket's worktree.
|
|
963
|
+
|
|
964
|
+
Used by the frontend FileTree component to browse files
|
|
965
|
+
in the ticket's isolated workspace.
|
|
966
|
+
"""
|
|
967
|
+
from pathlib import Path
|
|
968
|
+
|
|
969
|
+
from app.models.board import Board
|
|
970
|
+
from app.models.workspace import Workspace
|
|
971
|
+
from app.services.worktree_file_service import build_file_tree
|
|
972
|
+
|
|
973
|
+
# Verify ticket exists
|
|
974
|
+
service = TicketService(db)
|
|
975
|
+
ticket = await service.get_ticket_by_id(ticket_id)
|
|
976
|
+
|
|
977
|
+
# Find the workspace for this ticket
|
|
978
|
+
result = await db.execute(select(Workspace).where(Workspace.ticket_id == ticket_id))
|
|
979
|
+
workspace = result.scalar_one_or_none()
|
|
980
|
+
|
|
981
|
+
if not workspace or not workspace.worktree_path:
|
|
982
|
+
raise HTTPException(
|
|
983
|
+
status_code=404,
|
|
984
|
+
detail="No worktree found for this ticket",
|
|
985
|
+
)
|
|
986
|
+
|
|
987
|
+
worktree_path = Path(workspace.worktree_path)
|
|
988
|
+
|
|
989
|
+
# If the worktree path is relative, resolve it against the board's repo root
|
|
990
|
+
if not worktree_path.is_absolute():
|
|
991
|
+
repo_root = None
|
|
992
|
+
if ticket.board_id:
|
|
993
|
+
board_result = await db.execute(
|
|
994
|
+
select(Board).where(Board.id == ticket.board_id)
|
|
995
|
+
)
|
|
996
|
+
board = board_result.scalar_one_or_none()
|
|
997
|
+
if board and board.repo_root:
|
|
998
|
+
repo_root = Path(board.repo_root)
|
|
999
|
+
|
|
1000
|
+
if repo_root is None:
|
|
1001
|
+
git_repo_path = os.environ.get("GIT_REPO_PATH")
|
|
1002
|
+
repo_root = Path(git_repo_path) if git_repo_path else Path.cwd()
|
|
1003
|
+
|
|
1004
|
+
worktree_path = repo_root / worktree_path
|
|
1005
|
+
|
|
1006
|
+
if not worktree_path.exists():
|
|
1007
|
+
raise HTTPException(
|
|
1008
|
+
status_code=404,
|
|
1009
|
+
detail="Worktree directory does not exist on disk",
|
|
1010
|
+
)
|
|
1011
|
+
|
|
1012
|
+
tree = build_file_tree(str(worktree_path))
|
|
1013
|
+
if tree is None:
|
|
1014
|
+
raise HTTPException(
|
|
1015
|
+
status_code=404,
|
|
1016
|
+
detail="Could not build file tree",
|
|
1017
|
+
)
|
|
1018
|
+
|
|
1019
|
+
return tree
|
|
1020
|
+
|
|
1021
|
+
|
|
1022
|
+
@router.get(
|
|
1023
|
+
"/{ticket_id}/dependents",
|
|
1024
|
+
response_model=list[TicketResponse],
|
|
1025
|
+
summary="Get tickets blocked by this ticket",
|
|
1026
|
+
)
|
|
1027
|
+
async def get_ticket_dependents(
|
|
1028
|
+
ticket_id: str,
|
|
1029
|
+
db: AsyncSession = Depends(get_db),
|
|
1030
|
+
) -> list[TicketResponse]:
|
|
1031
|
+
"""
|
|
1032
|
+
Get all tickets that are blocked by this ticket (downstream dependencies).
|
|
1033
|
+
|
|
1034
|
+
Returns tickets where blocked_by_ticket_id = ticket_id, ordered by priority descending.
|
|
1035
|
+
"""
|
|
1036
|
+
# First verify the ticket exists
|
|
1037
|
+
service = TicketService(db)
|
|
1038
|
+
await service.get_ticket_by_id(ticket_id)
|
|
1039
|
+
|
|
1040
|
+
# Get all tickets blocked by this ticket
|
|
1041
|
+
result = await db.execute(
|
|
1042
|
+
select(Ticket)
|
|
1043
|
+
.where(Ticket.blocked_by_ticket_id == ticket_id)
|
|
1044
|
+
.order_by(Ticket.priority.desc().nullslast(), Ticket.created_at)
|
|
1045
|
+
)
|
|
1046
|
+
dependent_tickets = list(result.scalars().all())
|
|
1047
|
+
|
|
1048
|
+
return [TicketResponse.model_validate(t) for t in dependent_tickets]
|
|
1049
|
+
|
|
1050
|
+
|
|
1051
|
+
@router.post(
|
|
1052
|
+
"/bulk-update-priority",
|
|
1053
|
+
response_model=BulkPriorityUpdateResponse,
|
|
1054
|
+
summary="Bulk update ticket priorities",
|
|
1055
|
+
)
|
|
1056
|
+
async def bulk_update_priority(
|
|
1057
|
+
request: BulkPriorityUpdateRequest,
|
|
1058
|
+
db: AsyncSession = Depends(get_db),
|
|
1059
|
+
) -> BulkPriorityUpdateResponse:
|
|
1060
|
+
"""
|
|
1061
|
+
Bulk update priorities for multiple tickets.
|
|
1062
|
+
|
|
1063
|
+
This endpoint is designed to work with the reflection feature:
|
|
1064
|
+
1. Call `POST /goals/{id}/reflect-on-tickets` to get suggested changes
|
|
1065
|
+
2. Review the suggestions in the UI
|
|
1066
|
+
3. Call this endpoint with selected changes to apply them
|
|
1067
|
+
|
|
1068
|
+
**Authorization:**
|
|
1069
|
+
- `board_id` is REQUIRED - all operations are scoped to this board
|
|
1070
|
+
- All tickets must belong to both the specified `board_id` AND `goal_id`
|
|
1071
|
+
|
|
1072
|
+
**Safety:**
|
|
1073
|
+
- P0 assignments require `allow_p0: true` flag
|
|
1074
|
+
- Max 3 P0 assignments per request (server-enforced)
|
|
1075
|
+
- All changes are logged with PRIORITY_BULK_UPDATED event
|
|
1076
|
+
|
|
1077
|
+
**Request:**
|
|
1078
|
+
```json
|
|
1079
|
+
{
|
|
1080
|
+
"board_id": "uuid",
|
|
1081
|
+
"goal_id": "uuid",
|
|
1082
|
+
"allow_p0": true,
|
|
1083
|
+
"updates": [
|
|
1084
|
+
{"ticket_id": "uuid", "priority_bucket": "P1"},
|
|
1085
|
+
{"ticket_id": "uuid", "priority_bucket": "P0"}
|
|
1086
|
+
]
|
|
1087
|
+
}
|
|
1088
|
+
```
|
|
1089
|
+
|
|
1090
|
+
**Priority Buckets:**
|
|
1091
|
+
- P0 → 90 (Critical) - requires allow_p0=true
|
|
1092
|
+
- P1 → 70 (High)
|
|
1093
|
+
- P2 → 50 (Medium)
|
|
1094
|
+
- P3 → 30 (Low)
|
|
1095
|
+
"""
|
|
1096
|
+
from fastapi.responses import JSONResponse
|
|
1097
|
+
|
|
1098
|
+
from app.schemas.planner import MAX_P0_PER_REQUEST, PriorityBucket
|
|
1099
|
+
from app.services.board_service import BoardService
|
|
1100
|
+
|
|
1101
|
+
# AUTHORIZATION: Verify board exists
|
|
1102
|
+
board_service = BoardService(db)
|
|
1103
|
+
try:
|
|
1104
|
+
await board_service.get_board_by_id(request.board_id)
|
|
1105
|
+
except ValueError as e:
|
|
1106
|
+
raise HTTPException(status_code=404, detail=str(e))
|
|
1107
|
+
|
|
1108
|
+
# Verify goal belongs to board
|
|
1109
|
+
try:
|
|
1110
|
+
await board_service.verify_goal_in_board(request.goal_id, request.board_id)
|
|
1111
|
+
except ValueError as e:
|
|
1112
|
+
raise HTTPException(status_code=403, detail=str(e))
|
|
1113
|
+
|
|
1114
|
+
results: list[BulkPriorityUpdateResult] = []
|
|
1115
|
+
updated_count = 0
|
|
1116
|
+
failed_count = 0
|
|
1117
|
+
|
|
1118
|
+
# Identify P0 assignments
|
|
1119
|
+
p0_updates = [u for u in request.updates if u.priority_bucket == PriorityBucket.P0]
|
|
1120
|
+
p0_count = len(p0_updates)
|
|
1121
|
+
p0_ticket_ids = [u.ticket_id for u in p0_updates]
|
|
1122
|
+
|
|
1123
|
+
# P0 safety checks with structured error
|
|
1124
|
+
if p0_count > 0:
|
|
1125
|
+
if not request.allow_p0:
|
|
1126
|
+
return JSONResponse(
|
|
1127
|
+
status_code=400,
|
|
1128
|
+
content={
|
|
1129
|
+
"detail": "P0 assignments require allow_p0=true flag.",
|
|
1130
|
+
"error_type": "p0_flag_required",
|
|
1131
|
+
"p0_count": p0_count,
|
|
1132
|
+
"p0_ticket_ids": p0_ticket_ids,
|
|
1133
|
+
"max_p0_per_request": MAX_P0_PER_REQUEST,
|
|
1134
|
+
"resolution": "Add allow_p0: true to your request body to confirm P0 assignments.",
|
|
1135
|
+
},
|
|
1136
|
+
)
|
|
1137
|
+
if p0_count > MAX_P0_PER_REQUEST:
|
|
1138
|
+
return JSONResponse(
|
|
1139
|
+
status_code=400,
|
|
1140
|
+
content={
|
|
1141
|
+
"detail": f"Max {MAX_P0_PER_REQUEST} P0 assignments per request.",
|
|
1142
|
+
"error_type": "p0_limit_exceeded",
|
|
1143
|
+
"p0_count": p0_count,
|
|
1144
|
+
"p0_ticket_ids": p0_ticket_ids,
|
|
1145
|
+
"max_p0_per_request": MAX_P0_PER_REQUEST,
|
|
1146
|
+
"resolution": f"Split into multiple requests with at most {MAX_P0_PER_REQUEST} P0 assignments each.",
|
|
1147
|
+
},
|
|
1148
|
+
)
|
|
1149
|
+
|
|
1150
|
+
# Track before/after for audit logging
|
|
1151
|
+
changes_log = []
|
|
1152
|
+
|
|
1153
|
+
for update in request.updates:
|
|
1154
|
+
# AUTHORIZATION: Verify ticket belongs to board AND goal
|
|
1155
|
+
try:
|
|
1156
|
+
ticket = await board_service.verify_ticket_in_board(
|
|
1157
|
+
update.ticket_id, request.board_id
|
|
1158
|
+
)
|
|
1159
|
+
except ValueError:
|
|
1160
|
+
results.append(
|
|
1161
|
+
BulkPriorityUpdateResult(
|
|
1162
|
+
ticket_id=update.ticket_id,
|
|
1163
|
+
success=False,
|
|
1164
|
+
error="Ticket not found or does not belong to board",
|
|
1165
|
+
)
|
|
1166
|
+
)
|
|
1167
|
+
failed_count += 1
|
|
1168
|
+
continue
|
|
1169
|
+
|
|
1170
|
+
# Security check: verify ticket belongs to the specified goal
|
|
1171
|
+
if ticket.goal_id != request.goal_id:
|
|
1172
|
+
results.append(
|
|
1173
|
+
BulkPriorityUpdateResult(
|
|
1174
|
+
ticket_id=update.ticket_id,
|
|
1175
|
+
success=False,
|
|
1176
|
+
error="Ticket does not belong to specified goal",
|
|
1177
|
+
)
|
|
1178
|
+
)
|
|
1179
|
+
failed_count += 1
|
|
1180
|
+
continue
|
|
1181
|
+
|
|
1182
|
+
# Record before state for audit
|
|
1183
|
+
old_priority = ticket.priority
|
|
1184
|
+
old_bucket = (
|
|
1185
|
+
priority_to_bucket(old_priority) if old_priority else PriorityBucket.P2
|
|
1186
|
+
)
|
|
1187
|
+
|
|
1188
|
+
# Update priority
|
|
1189
|
+
new_priority = bucket_to_priority(update.priority_bucket)
|
|
1190
|
+
ticket.priority = new_priority
|
|
1191
|
+
|
|
1192
|
+
changes_log.append(
|
|
1193
|
+
{
|
|
1194
|
+
"ticket_id": ticket.id,
|
|
1195
|
+
"ticket_title": ticket.title,
|
|
1196
|
+
"old_bucket": old_bucket.value,
|
|
1197
|
+
"new_bucket": update.priority_bucket.value,
|
|
1198
|
+
"old_priority": old_priority,
|
|
1199
|
+
"new_priority": new_priority,
|
|
1200
|
+
}
|
|
1201
|
+
)
|
|
1202
|
+
|
|
1203
|
+
results.append(
|
|
1204
|
+
BulkPriorityUpdateResult(
|
|
1205
|
+
ticket_id=update.ticket_id,
|
|
1206
|
+
success=True,
|
|
1207
|
+
new_priority=new_priority,
|
|
1208
|
+
new_bucket=update.priority_bucket,
|
|
1209
|
+
)
|
|
1210
|
+
)
|
|
1211
|
+
updated_count += 1
|
|
1212
|
+
|
|
1213
|
+
# Create audit event for all changes
|
|
1214
|
+
if updated_count > 0 and changes_log:
|
|
1215
|
+
# Count direction of changes
|
|
1216
|
+
up_count = sum(
|
|
1217
|
+
1 for c in changes_log if c["new_priority"] > (c["old_priority"] or 0)
|
|
1218
|
+
)
|
|
1219
|
+
down_count = sum(
|
|
1220
|
+
1 for c in changes_log if c["new_priority"] < (c["old_priority"] or 0)
|
|
1221
|
+
)
|
|
1222
|
+
to_p0_count = sum(
|
|
1223
|
+
1
|
|
1224
|
+
for c in changes_log
|
|
1225
|
+
if c["new_bucket"] == "P0" and c["old_bucket"] != "P0"
|
|
1226
|
+
)
|
|
1227
|
+
|
|
1228
|
+
# Log the bulk update event (one per goal, includes all ticket changes)
|
|
1229
|
+
# Get first ticket's ID for the event
|
|
1230
|
+
first_ticket_id = changes_log[0]["ticket_id"]
|
|
1231
|
+
|
|
1232
|
+
event = TicketEvent(
|
|
1233
|
+
ticket_id=first_ticket_id,
|
|
1234
|
+
event_type="priority_bulk_updated",
|
|
1235
|
+
from_state=None,
|
|
1236
|
+
to_state=None,
|
|
1237
|
+
actor_type=ActorType.HUMAN.value,
|
|
1238
|
+
actor_id="bulk_priority_update",
|
|
1239
|
+
reason=f"Bulk priority update: {updated_count} tickets ({up_count} up, {down_count} down, {to_p0_count} to P0)",
|
|
1240
|
+
payload_json=json.dumps(
|
|
1241
|
+
{
|
|
1242
|
+
"goal_id": request.goal_id,
|
|
1243
|
+
"total_updated": updated_count,
|
|
1244
|
+
"up_count": up_count,
|
|
1245
|
+
"down_count": down_count,
|
|
1246
|
+
"to_p0_count": to_p0_count,
|
|
1247
|
+
"allow_p0": request.allow_p0,
|
|
1248
|
+
"changes": changes_log,
|
|
1249
|
+
}
|
|
1250
|
+
),
|
|
1251
|
+
)
|
|
1252
|
+
db.add(event)
|
|
1253
|
+
|
|
1254
|
+
await db.commit()
|
|
1255
|
+
|
|
1256
|
+
return BulkPriorityUpdateResponse(
|
|
1257
|
+
updated=results,
|
|
1258
|
+
updated_count=updated_count,
|
|
1259
|
+
failed_count=failed_count,
|
|
1260
|
+
)
|
|
1261
|
+
|
|
1262
|
+
|
|
1263
|
+
# ==================== Queued Message Endpoints ====================
|
|
1264
|
+
# Like vibe-kanban, allows queuing the next prompt while execution is in progress
|
|
1265
|
+
|
|
1266
|
+
|
|
1267
|
+
from pydantic import BaseModel, Field
|
|
1268
|
+
|
|
1269
|
+
from app.services.queued_message_service import queued_message_service
|
|
1270
|
+
|
|
1271
|
+
|
|
1272
|
+
class QueueMessageRequest(BaseModel):
|
|
1273
|
+
"""Request to queue a follow-up message."""
|
|
1274
|
+
|
|
1275
|
+
message: str = Field(..., description="The follow-up prompt to execute next")
|
|
1276
|
+
|
|
1277
|
+
|
|
1278
|
+
class QueueStatusResponse(BaseModel):
|
|
1279
|
+
"""Response showing queue status for a ticket."""
|
|
1280
|
+
|
|
1281
|
+
status: str = Field(..., description="Queue status: 'empty' or 'queued'")
|
|
1282
|
+
message: str | None = Field(None, description="The queued message (if any)")
|
|
1283
|
+
queued_at: str | None = Field(None, description="When the message was queued")
|
|
1284
|
+
|
|
1285
|
+
|
|
1286
|
+
@router.post(
|
|
1287
|
+
"/{ticket_id}/queue",
|
|
1288
|
+
response_model=QueueStatusResponse,
|
|
1289
|
+
summary="Queue a follow-up message for a ticket",
|
|
1290
|
+
)
|
|
1291
|
+
async def queue_message(
|
|
1292
|
+
ticket_id: str,
|
|
1293
|
+
data: QueueMessageRequest,
|
|
1294
|
+
db: AsyncSession = Depends(get_db),
|
|
1295
|
+
) -> QueueStatusResponse:
|
|
1296
|
+
"""Queue a follow-up message to be executed after the current job finishes.
|
|
1297
|
+
|
|
1298
|
+
This enables a faster iteration loop for individual developers:
|
|
1299
|
+
- While the agent is working on one task, you can type the next instruction
|
|
1300
|
+
- When the current execution completes, the queued message auto-executes
|
|
1301
|
+
- Only one message can be queued at a time (new message replaces old)
|
|
1302
|
+
|
|
1303
|
+
Similar to vibe-kanban's queued message feature.
|
|
1304
|
+
"""
|
|
1305
|
+
# Verify ticket exists
|
|
1306
|
+
service = TicketService(db)
|
|
1307
|
+
await service.get_ticket_by_id(ticket_id)
|
|
1308
|
+
|
|
1309
|
+
queued = queued_message_service.queue_message(ticket_id, data.message)
|
|
1310
|
+
|
|
1311
|
+
return QueueStatusResponse(
|
|
1312
|
+
status="queued",
|
|
1313
|
+
message=queued.message,
|
|
1314
|
+
queued_at=queued.queued_at.isoformat(),
|
|
1315
|
+
)
|
|
1316
|
+
|
|
1317
|
+
|
|
1318
|
+
@router.get(
|
|
1319
|
+
"/{ticket_id}/queue",
|
|
1320
|
+
response_model=QueueStatusResponse,
|
|
1321
|
+
summary="Get queue status for a ticket",
|
|
1322
|
+
)
|
|
1323
|
+
async def get_queue_status(
|
|
1324
|
+
ticket_id: str,
|
|
1325
|
+
db: AsyncSession = Depends(get_db),
|
|
1326
|
+
) -> QueueStatusResponse:
|
|
1327
|
+
"""Get the current queue status for a ticket.
|
|
1328
|
+
|
|
1329
|
+
Returns the queued message if one exists, or empty status.
|
|
1330
|
+
"""
|
|
1331
|
+
# Verify ticket exists
|
|
1332
|
+
service = TicketService(db)
|
|
1333
|
+
await service.get_ticket_by_id(ticket_id)
|
|
1334
|
+
|
|
1335
|
+
queued = queued_message_service.get_queued(ticket_id)
|
|
1336
|
+
|
|
1337
|
+
if queued:
|
|
1338
|
+
return QueueStatusResponse(
|
|
1339
|
+
status="queued",
|
|
1340
|
+
message=queued.message,
|
|
1341
|
+
queued_at=queued.queued_at.isoformat(),
|
|
1342
|
+
)
|
|
1343
|
+
|
|
1344
|
+
return QueueStatusResponse(status="empty", message=None, queued_at=None)
|
|
1345
|
+
|
|
1346
|
+
|
|
1347
|
+
@router.delete(
|
|
1348
|
+
"/{ticket_id}/queue",
|
|
1349
|
+
response_model=QueueStatusResponse,
|
|
1350
|
+
summary="Cancel a queued message for a ticket",
|
|
1351
|
+
)
|
|
1352
|
+
async def cancel_queued_message(
|
|
1353
|
+
ticket_id: str,
|
|
1354
|
+
db: AsyncSession = Depends(get_db),
|
|
1355
|
+
) -> QueueStatusResponse:
|
|
1356
|
+
"""Cancel/remove a queued message for a ticket.
|
|
1357
|
+
|
|
1358
|
+
Returns empty status after cancellation.
|
|
1359
|
+
"""
|
|
1360
|
+
# Verify ticket exists
|
|
1361
|
+
service = TicketService(db)
|
|
1362
|
+
await service.get_ticket_by_id(ticket_id)
|
|
1363
|
+
|
|
1364
|
+
queued_message_service.cancel_queued(ticket_id)
|
|
1365
|
+
|
|
1366
|
+
return QueueStatusResponse(status="empty", message=None, queued_at=None)
|
|
1367
|
+
|
|
1368
|
+
|
|
1369
|
+
# ==================== Agent Activity Logs ====================
|
|
1370
|
+
# Aggregated view of all agent execution logs for a ticket
|
|
1371
|
+
|
|
1372
|
+
|
|
1373
|
+
import re
|
|
1374
|
+
import uuid as uuid_module
|
|
1375
|
+
from pathlib import Path
|
|
1376
|
+
|
|
1377
|
+
from app.models.evidence import EvidenceKind
|
|
1378
|
+
from app.models.job import Job
|
|
1379
|
+
from app.models.normalized_log import NormalizedLogEntry
|
|
1380
|
+
|
|
1381
|
+
|
|
1382
|
+
class AgentLogEntry(BaseModel):
|
|
1383
|
+
"""A single normalized log entry from agent execution."""
|
|
1384
|
+
|
|
1385
|
+
id: str
|
|
1386
|
+
job_id: str
|
|
1387
|
+
sequence: int
|
|
1388
|
+
timestamp: str
|
|
1389
|
+
entry_type: str
|
|
1390
|
+
content: str
|
|
1391
|
+
metadata: dict = Field(default_factory=dict)
|
|
1392
|
+
collapsed: bool = False
|
|
1393
|
+
highlight: bool = False
|
|
1394
|
+
|
|
1395
|
+
|
|
1396
|
+
class JobExecutionSummary(BaseModel):
|
|
1397
|
+
"""Summary of a job's execution for display."""
|
|
1398
|
+
|
|
1399
|
+
job_id: str
|
|
1400
|
+
job_kind: str
|
|
1401
|
+
job_status: str
|
|
1402
|
+
started_at: str | None = None
|
|
1403
|
+
finished_at: str | None = None
|
|
1404
|
+
duration_seconds: float | None = None
|
|
1405
|
+
entry_count: int = 0
|
|
1406
|
+
entries: list[AgentLogEntry] = Field(default_factory=list)
|
|
1407
|
+
|
|
1408
|
+
|
|
1409
|
+
class TicketAgentLogsResponse(BaseModel):
|
|
1410
|
+
"""Response containing all agent execution logs for a ticket."""
|
|
1411
|
+
|
|
1412
|
+
ticket_id: str
|
|
1413
|
+
ticket_title: str
|
|
1414
|
+
total_entries: int
|
|
1415
|
+
total_jobs: int
|
|
1416
|
+
executions: list[JobExecutionSummary] = Field(default_factory=list)
|
|
1417
|
+
|
|
1418
|
+
|
|
1419
|
+
def parse_agent_output(
|
|
1420
|
+
content: str, job_id: str, timestamp: str
|
|
1421
|
+
) -> list[AgentLogEntry]:
|
|
1422
|
+
"""
|
|
1423
|
+
Parse agent stdout content into structured log entries.
|
|
1424
|
+
|
|
1425
|
+
Supports two formats:
|
|
1426
|
+
1. cursor-agent JSON streaming (lines starting with {"type":...)
|
|
1427
|
+
2. Claude-style output with <thinking> blocks
|
|
1428
|
+
|
|
1429
|
+
Extracts:
|
|
1430
|
+
- Thinking blocks
|
|
1431
|
+
- Assistant messages
|
|
1432
|
+
- Tool calls
|
|
1433
|
+
- System messages
|
|
1434
|
+
"""
|
|
1435
|
+
entries: list[AgentLogEntry] = []
|
|
1436
|
+
seq = 0
|
|
1437
|
+
|
|
1438
|
+
if not content or not content.strip():
|
|
1439
|
+
return entries
|
|
1440
|
+
|
|
1441
|
+
# Check if this is cursor-agent JSON streaming format
|
|
1442
|
+
lines = content.strip().split("\n")
|
|
1443
|
+
first_line = lines[0].strip() if lines else ""
|
|
1444
|
+
|
|
1445
|
+
if first_line.startswith('{"type":'):
|
|
1446
|
+
# Parse cursor-agent JSON streaming format
|
|
1447
|
+
return parse_cursor_json_output(content, job_id, timestamp)
|
|
1448
|
+
|
|
1449
|
+
# Fall back to Claude-style parsing
|
|
1450
|
+
# Check for thinking blocks (Claude style)
|
|
1451
|
+
thinking_pattern = re.compile(r"<thinking>(.*?)</thinking>", re.DOTALL)
|
|
1452
|
+
thinking_matches = thinking_pattern.findall(content)
|
|
1453
|
+
|
|
1454
|
+
for thinking in thinking_matches:
|
|
1455
|
+
if thinking.strip():
|
|
1456
|
+
entries.append(
|
|
1457
|
+
AgentLogEntry(
|
|
1458
|
+
id=str(uuid_module.uuid4()),
|
|
1459
|
+
job_id=job_id,
|
|
1460
|
+
sequence=seq,
|
|
1461
|
+
timestamp=timestamp,
|
|
1462
|
+
entry_type="thinking",
|
|
1463
|
+
content=thinking.strip(),
|
|
1464
|
+
metadata={"collapsed": True},
|
|
1465
|
+
collapsed=True,
|
|
1466
|
+
highlight=False,
|
|
1467
|
+
)
|
|
1468
|
+
)
|
|
1469
|
+
seq += 1
|
|
1470
|
+
|
|
1471
|
+
# Remove thinking blocks from content for further parsing
|
|
1472
|
+
content_without_thinking = thinking_pattern.sub("", content)
|
|
1473
|
+
|
|
1474
|
+
# Check for todo lists (look for patterns like "- [ ]" or numbered items with checkmarks)
|
|
1475
|
+
todo_pattern = re.compile(
|
|
1476
|
+
r"(?:^|\n)(?:[-*]\s*\[[ xX✓✗]\].*?(?:\n|$))+", re.MULTILINE
|
|
1477
|
+
)
|
|
1478
|
+
todo_match = todo_pattern.search(content_without_thinking)
|
|
1479
|
+
|
|
1480
|
+
if todo_match:
|
|
1481
|
+
todos_text = todo_match.group(0).strip()
|
|
1482
|
+
entries.append(
|
|
1483
|
+
AgentLogEntry(
|
|
1484
|
+
id=str(uuid_module.uuid4()),
|
|
1485
|
+
job_id=job_id,
|
|
1486
|
+
sequence=seq,
|
|
1487
|
+
timestamp=timestamp,
|
|
1488
|
+
entry_type="todo_list",
|
|
1489
|
+
content=todos_text,
|
|
1490
|
+
metadata={"todos": parse_todos_from_text(todos_text)},
|
|
1491
|
+
collapsed=False,
|
|
1492
|
+
highlight=False,
|
|
1493
|
+
)
|
|
1494
|
+
)
|
|
1495
|
+
seq += 1
|
|
1496
|
+
|
|
1497
|
+
# The main content is the assistant's response
|
|
1498
|
+
# Clean up the content and treat it as the main message
|
|
1499
|
+
main_content = content_without_thinking.strip()
|
|
1500
|
+
|
|
1501
|
+
if main_content:
|
|
1502
|
+
entries.append(
|
|
1503
|
+
AgentLogEntry(
|
|
1504
|
+
id=str(uuid_module.uuid4()),
|
|
1505
|
+
job_id=job_id,
|
|
1506
|
+
sequence=seq,
|
|
1507
|
+
timestamp=timestamp,
|
|
1508
|
+
entry_type="assistant_message",
|
|
1509
|
+
content=main_content,
|
|
1510
|
+
metadata={},
|
|
1511
|
+
collapsed=False,
|
|
1512
|
+
highlight=False,
|
|
1513
|
+
)
|
|
1514
|
+
)
|
|
1515
|
+
seq += 1
|
|
1516
|
+
|
|
1517
|
+
return entries
|
|
1518
|
+
|
|
1519
|
+
|
|
1520
|
+
def parse_cursor_json_output(
|
|
1521
|
+
content: str, job_id: str, timestamp: str
|
|
1522
|
+
) -> list[AgentLogEntry]:
|
|
1523
|
+
"""
|
|
1524
|
+
Parse cursor-agent JSON streaming output into structured log entries.
|
|
1525
|
+
|
|
1526
|
+
Handles JSON lines with types: system, user, assistant, thinking, tool_call, result
|
|
1527
|
+
"""
|
|
1528
|
+
import json
|
|
1529
|
+
|
|
1530
|
+
entries: list[AgentLogEntry] = []
|
|
1531
|
+
seq = 0
|
|
1532
|
+
|
|
1533
|
+
# Coalescing state for streaming messages
|
|
1534
|
+
current_thinking = ""
|
|
1535
|
+
current_assistant = ""
|
|
1536
|
+
|
|
1537
|
+
for line in content.strip().split("\n"):
|
|
1538
|
+
line = line.strip()
|
|
1539
|
+
if not line:
|
|
1540
|
+
continue
|
|
1541
|
+
|
|
1542
|
+
try:
|
|
1543
|
+
data = json.loads(line)
|
|
1544
|
+
except json.JSONDecodeError:
|
|
1545
|
+
# Non-JSON line - skip or treat as system message
|
|
1546
|
+
if line and not line.startswith("{"):
|
|
1547
|
+
entries.append(
|
|
1548
|
+
AgentLogEntry(
|
|
1549
|
+
id=str(uuid_module.uuid4()),
|
|
1550
|
+
job_id=job_id,
|
|
1551
|
+
sequence=seq,
|
|
1552
|
+
timestamp=timestamp,
|
|
1553
|
+
entry_type="system_message",
|
|
1554
|
+
content=line,
|
|
1555
|
+
metadata={},
|
|
1556
|
+
collapsed=False,
|
|
1557
|
+
highlight=False,
|
|
1558
|
+
)
|
|
1559
|
+
)
|
|
1560
|
+
seq += 1
|
|
1561
|
+
continue
|
|
1562
|
+
|
|
1563
|
+
msg_type = data.get("type", "")
|
|
1564
|
+
|
|
1565
|
+
if msg_type == "system":
|
|
1566
|
+
model = data.get("model")
|
|
1567
|
+
if model:
|
|
1568
|
+
entries.append(
|
|
1569
|
+
AgentLogEntry(
|
|
1570
|
+
id=str(uuid_module.uuid4()),
|
|
1571
|
+
job_id=job_id,
|
|
1572
|
+
sequence=seq,
|
|
1573
|
+
timestamp=timestamp,
|
|
1574
|
+
entry_type="system_message",
|
|
1575
|
+
content=f"🤖 Model: {model}",
|
|
1576
|
+
metadata={"model": model},
|
|
1577
|
+
collapsed=False,
|
|
1578
|
+
highlight=False,
|
|
1579
|
+
)
|
|
1580
|
+
)
|
|
1581
|
+
seq += 1
|
|
1582
|
+
|
|
1583
|
+
elif msg_type == "thinking":
|
|
1584
|
+
subtype = data.get("subtype", "")
|
|
1585
|
+
if subtype == "delta":
|
|
1586
|
+
text = data.get("text", "")
|
|
1587
|
+
current_thinking += text
|
|
1588
|
+
elif subtype == "completed":
|
|
1589
|
+
if current_thinking:
|
|
1590
|
+
entries.append(
|
|
1591
|
+
AgentLogEntry(
|
|
1592
|
+
id=str(uuid_module.uuid4()),
|
|
1593
|
+
job_id=job_id,
|
|
1594
|
+
sequence=seq,
|
|
1595
|
+
timestamp=timestamp,
|
|
1596
|
+
entry_type="thinking",
|
|
1597
|
+
content=current_thinking,
|
|
1598
|
+
metadata={"collapsed": True},
|
|
1599
|
+
collapsed=True,
|
|
1600
|
+
highlight=False,
|
|
1601
|
+
)
|
|
1602
|
+
)
|
|
1603
|
+
seq += 1
|
|
1604
|
+
current_thinking = ""
|
|
1605
|
+
|
|
1606
|
+
elif msg_type == "assistant":
|
|
1607
|
+
message = data.get("message", {})
|
|
1608
|
+
content_parts = message.get("content", [])
|
|
1609
|
+
text = ""
|
|
1610
|
+
for part in content_parts:
|
|
1611
|
+
if isinstance(part, dict) and part.get("type") == "text":
|
|
1612
|
+
text += part.get("text", "")
|
|
1613
|
+
elif isinstance(part, str):
|
|
1614
|
+
text += part
|
|
1615
|
+
|
|
1616
|
+
if text:
|
|
1617
|
+
current_assistant += text
|
|
1618
|
+
|
|
1619
|
+
elif msg_type == "tool_call":
|
|
1620
|
+
subtype = data.get("subtype", "")
|
|
1621
|
+
tool_call = data.get("tool_call", {})
|
|
1622
|
+
|
|
1623
|
+
# Parse tool type and content
|
|
1624
|
+
tool_name, content_text = _parse_cursor_tool_call(tool_call)
|
|
1625
|
+
|
|
1626
|
+
if subtype == "started":
|
|
1627
|
+
entries.append(
|
|
1628
|
+
AgentLogEntry(
|
|
1629
|
+
id=str(uuid_module.uuid4()),
|
|
1630
|
+
job_id=job_id,
|
|
1631
|
+
sequence=seq,
|
|
1632
|
+
timestamp=timestamp,
|
|
1633
|
+
entry_type="tool_call",
|
|
1634
|
+
content=content_text,
|
|
1635
|
+
metadata={"tool_name": tool_name, "status": "started"},
|
|
1636
|
+
collapsed=False,
|
|
1637
|
+
highlight=False,
|
|
1638
|
+
)
|
|
1639
|
+
)
|
|
1640
|
+
seq += 1
|
|
1641
|
+
elif subtype == "completed":
|
|
1642
|
+
result_text = _extract_cursor_tool_result(tool_call)
|
|
1643
|
+
entries.append(
|
|
1644
|
+
AgentLogEntry(
|
|
1645
|
+
id=str(uuid_module.uuid4()),
|
|
1646
|
+
job_id=job_id,
|
|
1647
|
+
sequence=seq,
|
|
1648
|
+
timestamp=timestamp,
|
|
1649
|
+
entry_type="tool_call",
|
|
1650
|
+
content=f"{content_text}\n→ {result_text}"
|
|
1651
|
+
if result_text
|
|
1652
|
+
else content_text,
|
|
1653
|
+
metadata={"tool_name": tool_name, "status": "completed"},
|
|
1654
|
+
collapsed=False,
|
|
1655
|
+
highlight=False,
|
|
1656
|
+
)
|
|
1657
|
+
)
|
|
1658
|
+
seq += 1
|
|
1659
|
+
|
|
1660
|
+
# Flush any remaining assistant content
|
|
1661
|
+
if current_assistant:
|
|
1662
|
+
entries.append(
|
|
1663
|
+
AgentLogEntry(
|
|
1664
|
+
id=str(uuid_module.uuid4()),
|
|
1665
|
+
job_id=job_id,
|
|
1666
|
+
sequence=seq,
|
|
1667
|
+
timestamp=timestamp,
|
|
1668
|
+
entry_type="assistant_message",
|
|
1669
|
+
content=current_assistant,
|
|
1670
|
+
metadata={},
|
|
1671
|
+
collapsed=False,
|
|
1672
|
+
highlight=False,
|
|
1673
|
+
)
|
|
1674
|
+
)
|
|
1675
|
+
seq += 1
|
|
1676
|
+
|
|
1677
|
+
# Flush any remaining thinking content
|
|
1678
|
+
if current_thinking:
|
|
1679
|
+
entries.append(
|
|
1680
|
+
AgentLogEntry(
|
|
1681
|
+
id=str(uuid_module.uuid4()),
|
|
1682
|
+
job_id=job_id,
|
|
1683
|
+
sequence=seq,
|
|
1684
|
+
timestamp=timestamp,
|
|
1685
|
+
entry_type="thinking",
|
|
1686
|
+
content=current_thinking,
|
|
1687
|
+
metadata={"collapsed": True},
|
|
1688
|
+
collapsed=True,
|
|
1689
|
+
highlight=False,
|
|
1690
|
+
)
|
|
1691
|
+
)
|
|
1692
|
+
seq += 1
|
|
1693
|
+
|
|
1694
|
+
return entries
|
|
1695
|
+
|
|
1696
|
+
|
|
1697
|
+
def _strip_worktree_prefix(path: str) -> str:
|
|
1698
|
+
"""Strip worktree path prefixes for cleaner display.
|
|
1699
|
+
|
|
1700
|
+
Handles both central (~/.draft/worktrees/) and legacy (.draft/worktrees/) paths.
|
|
1701
|
+
"""
|
|
1702
|
+
import re
|
|
1703
|
+
|
|
1704
|
+
# Match central data dir pattern: .../.draft/worktrees/{board_id}/{ticket_id}/...
|
|
1705
|
+
m = re.search(r"\.draft/worktrees/[^/]+/[^/]+/(.+)", path)
|
|
1706
|
+
if m:
|
|
1707
|
+
return m.group(1)
|
|
1708
|
+
# Match legacy pattern: .../.draft/worktrees/{ticket_id}/...
|
|
1709
|
+
m = re.search(r"\.draft/worktrees/[^/]+/(.+)", path)
|
|
1710
|
+
if m:
|
|
1711
|
+
return m.group(1)
|
|
1712
|
+
return path
|
|
1713
|
+
|
|
1714
|
+
|
|
1715
|
+
def _parse_cursor_tool_call(tool_call: dict) -> tuple[str, str]:
|
|
1716
|
+
"""Parse cursor-agent tool call to extract name and display content."""
|
|
1717
|
+
if "readToolCall" in tool_call:
|
|
1718
|
+
args = tool_call["readToolCall"].get("args", {})
|
|
1719
|
+
path = args.get("path", "unknown")
|
|
1720
|
+
# Strip common worktree prefixes for cleaner display
|
|
1721
|
+
path = _strip_worktree_prefix(path)
|
|
1722
|
+
return "read_file", f"📖 Read: {path}"
|
|
1723
|
+
|
|
1724
|
+
if "editToolCall" in tool_call:
|
|
1725
|
+
args = tool_call["editToolCall"].get("args", {})
|
|
1726
|
+
path = args.get("path", "unknown")
|
|
1727
|
+
path = _strip_worktree_prefix(path)
|
|
1728
|
+
return "edit_file", f"✏️ Edit: {path}"
|
|
1729
|
+
|
|
1730
|
+
if "lsToolCall" in tool_call:
|
|
1731
|
+
args = tool_call["lsToolCall"].get("args", {})
|
|
1732
|
+
path = args.get("path", ".")
|
|
1733
|
+
return "list_dir", f"📁 List: {path}"
|
|
1734
|
+
|
|
1735
|
+
if "globToolCall" in tool_call:
|
|
1736
|
+
args = tool_call["globToolCall"].get("args", {})
|
|
1737
|
+
pattern = args.get("globPattern", "*")
|
|
1738
|
+
return "glob", f"🔍 Glob: {pattern}"
|
|
1739
|
+
|
|
1740
|
+
if "grepToolCall" in tool_call:
|
|
1741
|
+
args = tool_call["grepToolCall"].get("args", {})
|
|
1742
|
+
pattern = args.get("pattern", "")
|
|
1743
|
+
return "grep", f"🔍 Grep: {pattern}"
|
|
1744
|
+
|
|
1745
|
+
if "shellToolCall" in tool_call:
|
|
1746
|
+
args = tool_call["shellToolCall"].get("args", {})
|
|
1747
|
+
command = args.get("command", "")
|
|
1748
|
+
return "shell", f"💻 Shell: {command}"
|
|
1749
|
+
|
|
1750
|
+
return "unknown", "🔧 Tool call"
|
|
1751
|
+
|
|
1752
|
+
|
|
1753
|
+
def _extract_cursor_tool_result(tool_call: dict) -> str:
|
|
1754
|
+
"""Extract a summary of cursor-agent tool result."""
|
|
1755
|
+
for key in [
|
|
1756
|
+
"readToolCall",
|
|
1757
|
+
"editToolCall",
|
|
1758
|
+
"lsToolCall",
|
|
1759
|
+
"globToolCall",
|
|
1760
|
+
"grepToolCall",
|
|
1761
|
+
"shellToolCall",
|
|
1762
|
+
]:
|
|
1763
|
+
if key in tool_call:
|
|
1764
|
+
result = tool_call[key].get("result", {})
|
|
1765
|
+
if "success" in result:
|
|
1766
|
+
success = result["success"]
|
|
1767
|
+
if key == "editToolCall":
|
|
1768
|
+
lines_added = success.get("linesAdded", 0)
|
|
1769
|
+
lines_removed = success.get("linesRemoved", 0)
|
|
1770
|
+
return f"+{lines_added} -{lines_removed} lines"
|
|
1771
|
+
elif key == "shellToolCall":
|
|
1772
|
+
exit_code = success.get("exitCode", 0)
|
|
1773
|
+
return f"exit code: {exit_code}"
|
|
1774
|
+
elif key == "globToolCall":
|
|
1775
|
+
total = success.get("totalFiles", 0)
|
|
1776
|
+
return f"{total} files"
|
|
1777
|
+
elif key == "readToolCall":
|
|
1778
|
+
total_lines = success.get("totalLines", 0)
|
|
1779
|
+
return f"{total_lines} lines" if total_lines else "read"
|
|
1780
|
+
elif "error" in result:
|
|
1781
|
+
return f"❌ {str(result['error'])[:50]}"
|
|
1782
|
+
return ""
|
|
1783
|
+
|
|
1784
|
+
|
|
1785
|
+
def parse_todos_from_text(text: str) -> list[dict]:
|
|
1786
|
+
"""Parse todo items from text into structured format."""
|
|
1787
|
+
todos = []
|
|
1788
|
+
lines = text.split("\n")
|
|
1789
|
+
|
|
1790
|
+
for line in lines:
|
|
1791
|
+
line = line.strip()
|
|
1792
|
+
if not line:
|
|
1793
|
+
continue
|
|
1794
|
+
|
|
1795
|
+
# Match "- [ ] task" or "- [x] task" patterns
|
|
1796
|
+
match = re.match(r"^[-*]\s*\[([xX✓ ])\]\s*(.+)$", line)
|
|
1797
|
+
if match:
|
|
1798
|
+
checked = match.group(1).lower() in ("x", "✓")
|
|
1799
|
+
content = match.group(2).strip()
|
|
1800
|
+
todos.append(
|
|
1801
|
+
{
|
|
1802
|
+
"content": content,
|
|
1803
|
+
"completed": checked,
|
|
1804
|
+
}
|
|
1805
|
+
)
|
|
1806
|
+
|
|
1807
|
+
return todos
|
|
1808
|
+
|
|
1809
|
+
|
|
1810
|
+
@router.get(
|
|
1811
|
+
"/{ticket_id}/agent-logs",
|
|
1812
|
+
response_model=TicketAgentLogsResponse,
|
|
1813
|
+
summary="Get all agent execution logs for a ticket",
|
|
1814
|
+
)
|
|
1815
|
+
async def get_ticket_agent_logs(
|
|
1816
|
+
ticket_id: str,
|
|
1817
|
+
include_entries: bool = True,
|
|
1818
|
+
db: AsyncSession = Depends(get_db),
|
|
1819
|
+
) -> TicketAgentLogsResponse:
|
|
1820
|
+
"""
|
|
1821
|
+
Get all agent execution logs for a ticket across all jobs.
|
|
1822
|
+
|
|
1823
|
+
This provides a complete view of the agent's chain of thought, tool calls,
|
|
1824
|
+
file edits, and other actions taken during ticket execution.
|
|
1825
|
+
|
|
1826
|
+
Like vibe-kanban's execution_process_logs, this allows users to:
|
|
1827
|
+
- Review the agent's reasoning process
|
|
1828
|
+
- See what tools were used and why
|
|
1829
|
+
- Debug issues with ticket execution
|
|
1830
|
+
- Understand how the agent approached the task
|
|
1831
|
+
|
|
1832
|
+
Reads from Evidence stdout files (actual agent output) rather than
|
|
1833
|
+
orchestrator logs.
|
|
1834
|
+
|
|
1835
|
+
Args:
|
|
1836
|
+
ticket_id: The ticket ID
|
|
1837
|
+
include_entries: If True (default), include full log entries.
|
|
1838
|
+
If False, only return summary info.
|
|
1839
|
+
|
|
1840
|
+
Returns:
|
|
1841
|
+
All agent conversation/output grouped by job execution.
|
|
1842
|
+
"""
|
|
1843
|
+
import os
|
|
1844
|
+
|
|
1845
|
+
from sqlalchemy.orm import selectinload
|
|
1846
|
+
|
|
1847
|
+
from app.models.board import Board
|
|
1848
|
+
|
|
1849
|
+
# Verify ticket exists and get title
|
|
1850
|
+
service = TicketService(db)
|
|
1851
|
+
ticket = await service.get_ticket_by_id(ticket_id)
|
|
1852
|
+
|
|
1853
|
+
# Get repo root from the ticket's board (authoritative source)
|
|
1854
|
+
repo_root = None
|
|
1855
|
+
if ticket.board_id:
|
|
1856
|
+
board_result = await db.execute(
|
|
1857
|
+
select(Board).where(Board.id == ticket.board_id)
|
|
1858
|
+
)
|
|
1859
|
+
board = board_result.scalar_one_or_none()
|
|
1860
|
+
if board and board.repo_root:
|
|
1861
|
+
repo_root = Path(board.repo_root)
|
|
1862
|
+
|
|
1863
|
+
# Fallback to environment or cwd
|
|
1864
|
+
if repo_root is None or not repo_root.exists():
|
|
1865
|
+
git_repo_path = os.environ.get("GIT_REPO_PATH")
|
|
1866
|
+
if git_repo_path:
|
|
1867
|
+
repo_root = Path(git_repo_path)
|
|
1868
|
+
else:
|
|
1869
|
+
repo_root = Path.cwd()
|
|
1870
|
+
|
|
1871
|
+
# Get all jobs for this ticket with their evidence
|
|
1872
|
+
result = await db.execute(
|
|
1873
|
+
select(Job)
|
|
1874
|
+
.where(Job.ticket_id == ticket_id)
|
|
1875
|
+
.options(selectinload(Job.evidence))
|
|
1876
|
+
.order_by(Job.created_at.desc())
|
|
1877
|
+
)
|
|
1878
|
+
jobs = list(result.scalars().all())
|
|
1879
|
+
|
|
1880
|
+
executions: list[JobExecutionSummary] = []
|
|
1881
|
+
total_entries = 0
|
|
1882
|
+
|
|
1883
|
+
for job in jobs:
|
|
1884
|
+
# Calculate duration if job is finished
|
|
1885
|
+
duration = None
|
|
1886
|
+
if job.started_at and job.finished_at:
|
|
1887
|
+
duration = (job.finished_at - job.started_at).total_seconds()
|
|
1888
|
+
|
|
1889
|
+
timestamp = (
|
|
1890
|
+
job.started_at.isoformat() if job.started_at else job.created_at.isoformat()
|
|
1891
|
+
)
|
|
1892
|
+
|
|
1893
|
+
# Build entries from Evidence stdout files
|
|
1894
|
+
entries: list[AgentLogEntry] = []
|
|
1895
|
+
|
|
1896
|
+
if include_entries:
|
|
1897
|
+
# Get executor evidence (the actual agent output)
|
|
1898
|
+
executor_evidence = [
|
|
1899
|
+
ev
|
|
1900
|
+
for ev in job.evidence
|
|
1901
|
+
if ev.kind == EvidenceKind.EXECUTOR_STDOUT.value
|
|
1902
|
+
]
|
|
1903
|
+
|
|
1904
|
+
for ev in executor_evidence:
|
|
1905
|
+
if ev.stdout_path:
|
|
1906
|
+
try:
|
|
1907
|
+
# Resolve the stdout path - try multiple locations
|
|
1908
|
+
stdout_path = repo_root / ev.stdout_path
|
|
1909
|
+
|
|
1910
|
+
# If not found at repo root, try relative to cwd
|
|
1911
|
+
if not stdout_path.exists():
|
|
1912
|
+
stdout_path = Path.cwd() / ev.stdout_path
|
|
1913
|
+
|
|
1914
|
+
# If still not found, try absolute path
|
|
1915
|
+
if not stdout_path.exists() and ev.stdout_path.startswith("/"):
|
|
1916
|
+
stdout_path = Path(ev.stdout_path)
|
|
1917
|
+
|
|
1918
|
+
if stdout_path.exists():
|
|
1919
|
+
content = stdout_path.read_text()
|
|
1920
|
+
if content.strip():
|
|
1921
|
+
# Parse the agent output into structured entries
|
|
1922
|
+
parsed = parse_agent_output(content, job.id, timestamp)
|
|
1923
|
+
entries.extend(parsed)
|
|
1924
|
+
else:
|
|
1925
|
+
# File not found - add info entry
|
|
1926
|
+
entries.append(
|
|
1927
|
+
AgentLogEntry(
|
|
1928
|
+
id=str(uuid_module.uuid4()),
|
|
1929
|
+
job_id=job.id,
|
|
1930
|
+
sequence=0,
|
|
1931
|
+
timestamp=timestamp,
|
|
1932
|
+
entry_type="system_message",
|
|
1933
|
+
content=f"Agent output file not found: {ev.stdout_path}",
|
|
1934
|
+
metadata={"repo_root": str(repo_root)},
|
|
1935
|
+
collapsed=False,
|
|
1936
|
+
highlight=False,
|
|
1937
|
+
)
|
|
1938
|
+
)
|
|
1939
|
+
except Exception as e:
|
|
1940
|
+
# If we can't read the file, add an error entry
|
|
1941
|
+
entries.append(
|
|
1942
|
+
AgentLogEntry(
|
|
1943
|
+
id=str(uuid_module.uuid4()),
|
|
1944
|
+
job_id=job.id,
|
|
1945
|
+
sequence=0,
|
|
1946
|
+
timestamp=timestamp,
|
|
1947
|
+
entry_type="error",
|
|
1948
|
+
content=f"Could not read agent output: {str(e)}",
|
|
1949
|
+
metadata={},
|
|
1950
|
+
collapsed=False,
|
|
1951
|
+
highlight=True,
|
|
1952
|
+
)
|
|
1953
|
+
)
|
|
1954
|
+
|
|
1955
|
+
# If no executor evidence, try to get from normalized logs as fallback
|
|
1956
|
+
if not entries:
|
|
1957
|
+
# Fallback to normalized_logs table
|
|
1958
|
+
logs_result = await db.execute(
|
|
1959
|
+
select(NormalizedLogEntry)
|
|
1960
|
+
.where(NormalizedLogEntry.job_id == job.id)
|
|
1961
|
+
.order_by(NormalizedLogEntry.sequence)
|
|
1962
|
+
)
|
|
1963
|
+
logs = list(logs_result.scalars().all())
|
|
1964
|
+
|
|
1965
|
+
for log in logs:
|
|
1966
|
+
entries.append(
|
|
1967
|
+
AgentLogEntry(
|
|
1968
|
+
id=log.id,
|
|
1969
|
+
job_id=log.job_id,
|
|
1970
|
+
sequence=log.sequence,
|
|
1971
|
+
timestamp=log.timestamp.isoformat()
|
|
1972
|
+
if log.timestamp
|
|
1973
|
+
else "",
|
|
1974
|
+
entry_type=log.entry_type.value if log.entry_type else "",
|
|
1975
|
+
content=log.content,
|
|
1976
|
+
metadata=log.entry_metadata or {},
|
|
1977
|
+
collapsed=log.collapsed or False,
|
|
1978
|
+
highlight=log.highlight or False,
|
|
1979
|
+
)
|
|
1980
|
+
)
|
|
1981
|
+
|
|
1982
|
+
total_entries += len(entries)
|
|
1983
|
+
|
|
1984
|
+
executions.append(
|
|
1985
|
+
JobExecutionSummary(
|
|
1986
|
+
job_id=job.id,
|
|
1987
|
+
job_kind=job.kind,
|
|
1988
|
+
job_status=job.status,
|
|
1989
|
+
started_at=job.started_at.isoformat() if job.started_at else None,
|
|
1990
|
+
finished_at=job.finished_at.isoformat() if job.finished_at else None,
|
|
1991
|
+
duration_seconds=duration,
|
|
1992
|
+
entry_count=len(entries),
|
|
1993
|
+
entries=entries,
|
|
1994
|
+
)
|
|
1995
|
+
)
|
|
1996
|
+
|
|
1997
|
+
return TicketAgentLogsResponse(
|
|
1998
|
+
ticket_id=ticket_id,
|
|
1999
|
+
ticket_title=ticket.title,
|
|
2000
|
+
total_entries=total_entries,
|
|
2001
|
+
total_jobs=len(jobs),
|
|
2002
|
+
executions=executions,
|
|
2003
|
+
)
|