mcp-ticketer 0.2.0__py3-none-any.whl → 2.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_ticketer/__init__.py +10 -10
- mcp_ticketer/__version__.py +3 -3
- mcp_ticketer/_version_scm.py +1 -0
- mcp_ticketer/adapters/__init__.py +2 -0
- mcp_ticketer/adapters/aitrackdown.py +930 -52
- mcp_ticketer/adapters/asana/__init__.py +15 -0
- mcp_ticketer/adapters/asana/adapter.py +1537 -0
- mcp_ticketer/adapters/asana/client.py +292 -0
- mcp_ticketer/adapters/asana/mappers.py +348 -0
- mcp_ticketer/adapters/asana/types.py +146 -0
- mcp_ticketer/adapters/github/__init__.py +26 -0
- mcp_ticketer/adapters/github/adapter.py +3229 -0
- mcp_ticketer/adapters/github/client.py +335 -0
- mcp_ticketer/adapters/github/mappers.py +797 -0
- mcp_ticketer/adapters/github/queries.py +692 -0
- mcp_ticketer/adapters/github/types.py +460 -0
- mcp_ticketer/adapters/hybrid.py +58 -16
- mcp_ticketer/adapters/jira/__init__.py +35 -0
- mcp_ticketer/adapters/jira/adapter.py +1351 -0
- mcp_ticketer/adapters/jira/client.py +271 -0
- mcp_ticketer/adapters/jira/mappers.py +246 -0
- mcp_ticketer/adapters/jira/queries.py +216 -0
- mcp_ticketer/adapters/jira/types.py +304 -0
- mcp_ticketer/adapters/linear/__init__.py +1 -1
- mcp_ticketer/adapters/linear/adapter.py +3810 -462
- mcp_ticketer/adapters/linear/client.py +312 -69
- mcp_ticketer/adapters/linear/mappers.py +305 -85
- mcp_ticketer/adapters/linear/queries.py +317 -17
- mcp_ticketer/adapters/linear/types.py +187 -64
- mcp_ticketer/adapters/linear.py +2 -2
- mcp_ticketer/analysis/__init__.py +56 -0
- mcp_ticketer/analysis/dependency_graph.py +255 -0
- mcp_ticketer/analysis/health_assessment.py +304 -0
- mcp_ticketer/analysis/orphaned.py +218 -0
- mcp_ticketer/analysis/project_status.py +594 -0
- mcp_ticketer/analysis/similarity.py +224 -0
- mcp_ticketer/analysis/staleness.py +266 -0
- mcp_ticketer/automation/__init__.py +11 -0
- mcp_ticketer/automation/project_updates.py +378 -0
- mcp_ticketer/cache/memory.py +9 -8
- mcp_ticketer/cli/adapter_diagnostics.py +421 -0
- mcp_ticketer/cli/auggie_configure.py +116 -15
- mcp_ticketer/cli/codex_configure.py +274 -82
- mcp_ticketer/cli/configure.py +1323 -151
- mcp_ticketer/cli/cursor_configure.py +314 -0
- mcp_ticketer/cli/diagnostics.py +209 -114
- mcp_ticketer/cli/discover.py +297 -26
- mcp_ticketer/cli/gemini_configure.py +119 -26
- mcp_ticketer/cli/init_command.py +880 -0
- mcp_ticketer/cli/install_mcp_server.py +418 -0
- mcp_ticketer/cli/instruction_commands.py +435 -0
- mcp_ticketer/cli/linear_commands.py +256 -130
- mcp_ticketer/cli/main.py +140 -1284
- mcp_ticketer/cli/mcp_configure.py +1013 -100
- mcp_ticketer/cli/mcp_server_commands.py +415 -0
- mcp_ticketer/cli/migrate_config.py +12 -8
- mcp_ticketer/cli/platform_commands.py +123 -0
- mcp_ticketer/cli/platform_detection.py +477 -0
- mcp_ticketer/cli/platform_installer.py +545 -0
- mcp_ticketer/cli/project_update_commands.py +350 -0
- mcp_ticketer/cli/python_detection.py +126 -0
- mcp_ticketer/cli/queue_commands.py +15 -15
- mcp_ticketer/cli/setup_command.py +794 -0
- mcp_ticketer/cli/simple_health.py +84 -59
- mcp_ticketer/cli/ticket_commands.py +1375 -0
- mcp_ticketer/cli/update_checker.py +313 -0
- mcp_ticketer/cli/utils.py +195 -72
- mcp_ticketer/core/__init__.py +64 -1
- mcp_ticketer/core/adapter.py +618 -18
- mcp_ticketer/core/config.py +77 -68
- mcp_ticketer/core/env_discovery.py +75 -16
- mcp_ticketer/core/env_loader.py +121 -97
- mcp_ticketer/core/exceptions.py +32 -24
- mcp_ticketer/core/http_client.py +26 -26
- mcp_ticketer/core/instructions.py +405 -0
- mcp_ticketer/core/label_manager.py +732 -0
- mcp_ticketer/core/mappers.py +42 -30
- mcp_ticketer/core/milestone_manager.py +252 -0
- mcp_ticketer/core/models.py +566 -19
- mcp_ticketer/core/onepassword_secrets.py +379 -0
- mcp_ticketer/core/priority_matcher.py +463 -0
- mcp_ticketer/core/project_config.py +189 -49
- mcp_ticketer/core/project_utils.py +281 -0
- mcp_ticketer/core/project_validator.py +376 -0
- mcp_ticketer/core/registry.py +3 -3
- mcp_ticketer/core/session_state.py +176 -0
- mcp_ticketer/core/state_matcher.py +592 -0
- mcp_ticketer/core/url_parser.py +425 -0
- mcp_ticketer/core/validators.py +69 -0
- mcp_ticketer/defaults/ticket_instructions.md +644 -0
- mcp_ticketer/mcp/__init__.py +29 -1
- mcp_ticketer/mcp/__main__.py +60 -0
- mcp_ticketer/mcp/server/__init__.py +25 -0
- mcp_ticketer/mcp/server/__main__.py +60 -0
- mcp_ticketer/mcp/server/constants.py +58 -0
- mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
- mcp_ticketer/mcp/server/dto.py +195 -0
- mcp_ticketer/mcp/server/main.py +1343 -0
- mcp_ticketer/mcp/server/response_builder.py +206 -0
- mcp_ticketer/mcp/server/routing.py +723 -0
- mcp_ticketer/mcp/server/server_sdk.py +151 -0
- mcp_ticketer/mcp/server/tools/__init__.py +69 -0
- mcp_ticketer/mcp/server/tools/analysis_tools.py +854 -0
- mcp_ticketer/mcp/server/tools/attachment_tools.py +224 -0
- mcp_ticketer/mcp/server/tools/bulk_tools.py +330 -0
- mcp_ticketer/mcp/server/tools/comment_tools.py +152 -0
- mcp_ticketer/mcp/server/tools/config_tools.py +1564 -0
- mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
- mcp_ticketer/mcp/server/tools/hierarchy_tools.py +942 -0
- mcp_ticketer/mcp/server/tools/instruction_tools.py +295 -0
- mcp_ticketer/mcp/server/tools/label_tools.py +942 -0
- mcp_ticketer/mcp/server/tools/milestone_tools.py +338 -0
- mcp_ticketer/mcp/server/tools/pr_tools.py +150 -0
- mcp_ticketer/mcp/server/tools/project_status_tools.py +158 -0
- mcp_ticketer/mcp/server/tools/project_update_tools.py +473 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +318 -0
- mcp_ticketer/mcp/server/tools/session_tools.py +308 -0
- mcp_ticketer/mcp/server/tools/ticket_tools.py +1413 -0
- mcp_ticketer/mcp/server/tools/user_ticket_tools.py +364 -0
- mcp_ticketer/queue/__init__.py +1 -0
- mcp_ticketer/queue/health_monitor.py +168 -136
- mcp_ticketer/queue/manager.py +78 -63
- mcp_ticketer/queue/queue.py +108 -21
- mcp_ticketer/queue/run_worker.py +2 -2
- mcp_ticketer/queue/ticket_registry.py +213 -155
- mcp_ticketer/queue/worker.py +96 -58
- mcp_ticketer/utils/__init__.py +5 -0
- mcp_ticketer/utils/token_utils.py +246 -0
- mcp_ticketer-2.2.9.dist-info/METADATA +1396 -0
- mcp_ticketer-2.2.9.dist-info/RECORD +158 -0
- mcp_ticketer-2.2.9.dist-info/top_level.txt +2 -0
- py_mcp_installer/examples/phase3_demo.py +178 -0
- py_mcp_installer/scripts/manage_version.py +54 -0
- py_mcp_installer/setup.py +6 -0
- py_mcp_installer/src/py_mcp_installer/__init__.py +153 -0
- py_mcp_installer/src/py_mcp_installer/command_builder.py +445 -0
- py_mcp_installer/src/py_mcp_installer/config_manager.py +541 -0
- py_mcp_installer/src/py_mcp_installer/exceptions.py +243 -0
- py_mcp_installer/src/py_mcp_installer/installation_strategy.py +617 -0
- py_mcp_installer/src/py_mcp_installer/installer.py +656 -0
- py_mcp_installer/src/py_mcp_installer/mcp_inspector.py +750 -0
- py_mcp_installer/src/py_mcp_installer/platform_detector.py +451 -0
- py_mcp_installer/src/py_mcp_installer/platforms/__init__.py +26 -0
- py_mcp_installer/src/py_mcp_installer/platforms/claude_code.py +225 -0
- py_mcp_installer/src/py_mcp_installer/platforms/codex.py +181 -0
- py_mcp_installer/src/py_mcp_installer/platforms/cursor.py +191 -0
- py_mcp_installer/src/py_mcp_installer/types.py +222 -0
- py_mcp_installer/src/py_mcp_installer/utils.py +463 -0
- py_mcp_installer/tests/__init__.py +0 -0
- py_mcp_installer/tests/platforms/__init__.py +0 -0
- py_mcp_installer/tests/test_platform_detector.py +17 -0
- mcp_ticketer/adapters/github.py +0 -1354
- mcp_ticketer/adapters/jira.py +0 -1011
- mcp_ticketer/mcp/server.py +0 -1895
- mcp_ticketer-0.2.0.dist-info/METADATA +0 -414
- mcp_ticketer-0.2.0.dist-info/RECORD +0 -58
- mcp_ticketer-0.2.0.dist-info/top_level.txt +0 -1
- {mcp_ticketer-0.2.0.dist-info → mcp_ticketer-2.2.9.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.2.0.dist-info → mcp_ticketer-2.2.9.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.2.0.dist-info → mcp_ticketer-2.2.9.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,25 +1,38 @@
|
|
|
1
1
|
"""AI-Trackdown adapter implementation."""
|
|
2
2
|
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
3
5
|
import builtins
|
|
4
6
|
import json
|
|
7
|
+
import logging
|
|
5
8
|
from datetime import datetime
|
|
6
9
|
from pathlib import Path
|
|
7
|
-
from typing import Any
|
|
10
|
+
from typing import Any
|
|
8
11
|
|
|
9
12
|
from ..core.adapter import BaseAdapter
|
|
10
|
-
from ..core.models import
|
|
13
|
+
from ..core.models import (
|
|
14
|
+
Attachment,
|
|
15
|
+
Comment,
|
|
16
|
+
Epic,
|
|
17
|
+
Priority,
|
|
18
|
+
SearchQuery,
|
|
19
|
+
Task,
|
|
20
|
+
TicketState,
|
|
21
|
+
)
|
|
11
22
|
from ..core.registry import AdapterRegistry
|
|
12
23
|
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
13
26
|
# Import ai-trackdown-pytools when available
|
|
14
27
|
try:
|
|
15
|
-
from ai_trackdown_pytools import AITrackdown
|
|
16
|
-
from ai_trackdown_pytools import Ticket as AITicket
|
|
28
|
+
from ai_trackdown_pytools import AITrackdown # type: ignore[attr-defined]
|
|
29
|
+
from ai_trackdown_pytools import Ticket as AITicket # type: ignore[attr-defined]
|
|
17
30
|
|
|
18
31
|
HAS_AITRACKDOWN = True
|
|
19
32
|
except ImportError:
|
|
20
33
|
HAS_AITRACKDOWN = False
|
|
21
|
-
AITrackdown = None
|
|
22
|
-
AITicket = None
|
|
34
|
+
AITrackdown = None # type: ignore[assignment]
|
|
35
|
+
AITicket = None # type: ignore[assignment]
|
|
23
36
|
|
|
24
37
|
|
|
25
38
|
class AITrackdownAdapter(BaseAdapter[Task]):
|
|
@@ -29,20 +42,24 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
29
42
|
"""Initialize AI-Trackdown adapter.
|
|
30
43
|
|
|
31
44
|
Args:
|
|
45
|
+
----
|
|
32
46
|
config: Configuration with 'base_path' for tickets directory
|
|
33
47
|
|
|
34
48
|
"""
|
|
35
49
|
super().__init__(config)
|
|
36
50
|
self.base_path = Path(config.get("base_path", ".aitrackdown"))
|
|
37
51
|
self.tickets_dir = self.base_path / "tickets"
|
|
52
|
+
self._comment_counter = 0 # Counter for unique comment IDs
|
|
38
53
|
|
|
39
54
|
# Initialize AI-Trackdown if available
|
|
55
|
+
# Always create tickets directory (needed for both modes)
|
|
56
|
+
self.tickets_dir.mkdir(parents=True, exist_ok=True)
|
|
57
|
+
|
|
40
58
|
if HAS_AITRACKDOWN:
|
|
41
59
|
self.tracker = AITrackdown(str(self.base_path))
|
|
42
60
|
else:
|
|
43
61
|
# Fallback to direct file operations
|
|
44
62
|
self.tracker = None
|
|
45
|
-
self.tickets_dir.mkdir(parents=True, exist_ok=True)
|
|
46
63
|
|
|
47
64
|
def validate_credentials(self) -> tuple[bool, str]:
|
|
48
65
|
"""Validate that required credentials are present.
|
|
@@ -50,6 +67,7 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
50
67
|
AITrackdown is file-based and doesn't require credentials.
|
|
51
68
|
|
|
52
69
|
Returns:
|
|
70
|
+
-------
|
|
53
71
|
(is_valid, error_message) - Always returns (True, "") for AITrackdown
|
|
54
72
|
|
|
55
73
|
"""
|
|
@@ -60,10 +78,15 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
60
78
|
return True, ""
|
|
61
79
|
|
|
62
80
|
def _get_state_mapping(self) -> dict[TicketState, str]:
|
|
63
|
-
"""Map universal states to AI-Trackdown states.
|
|
81
|
+
"""Map universal states to AI-Trackdown states.
|
|
82
|
+
|
|
83
|
+
Note: We use the exact enum values (snake_case) to match what
|
|
84
|
+
Pydantic's use_enum_values=True produces. This ensures consistency
|
|
85
|
+
between what's written to files and what's read back.
|
|
86
|
+
"""
|
|
64
87
|
return {
|
|
65
88
|
TicketState.OPEN: "open",
|
|
66
|
-
TicketState.IN_PROGRESS: "
|
|
89
|
+
TicketState.IN_PROGRESS: "in_progress", # snake_case, not kebab-case
|
|
67
90
|
TicketState.READY: "ready",
|
|
68
91
|
TicketState.TESTED: "tested",
|
|
69
92
|
TicketState.DONE: "done",
|
|
@@ -72,7 +95,7 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
72
95
|
TicketState.CLOSED: "closed",
|
|
73
96
|
}
|
|
74
97
|
|
|
75
|
-
def _priority_to_ai(self, priority:
|
|
98
|
+
def _priority_to_ai(self, priority: Priority | str) -> str:
|
|
76
99
|
"""Convert universal priority to AI-Trackdown priority."""
|
|
77
100
|
if isinstance(priority, Priority):
|
|
78
101
|
return priority.value
|
|
@@ -87,6 +110,18 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
87
110
|
|
|
88
111
|
def _task_from_ai_ticket(self, ai_ticket: dict[str, Any]) -> Task:
|
|
89
112
|
"""Convert AI-Trackdown ticket to universal Task."""
|
|
113
|
+
# Get user metadata from ticket file
|
|
114
|
+
user_metadata = ai_ticket.get("metadata", {})
|
|
115
|
+
|
|
116
|
+
# Create adapter metadata
|
|
117
|
+
adapter_metadata = {
|
|
118
|
+
"ai_ticket_id": ai_ticket.get("id"),
|
|
119
|
+
"source": "aitrackdown",
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
# Merge user metadata with adapter metadata (user takes priority)
|
|
123
|
+
combined_metadata = {**adapter_metadata, **user_metadata}
|
|
124
|
+
|
|
90
125
|
return Task(
|
|
91
126
|
id=ai_ticket.get("id"),
|
|
92
127
|
title=ai_ticket.get("title", ""),
|
|
@@ -97,6 +132,8 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
97
132
|
parent_issue=ai_ticket.get("parent_issue"),
|
|
98
133
|
parent_epic=ai_ticket.get("parent_epic"),
|
|
99
134
|
assignee=ai_ticket.get("assignee"),
|
|
135
|
+
estimated_hours=ai_ticket.get("estimated_hours"),
|
|
136
|
+
actual_hours=ai_ticket.get("actual_hours"),
|
|
100
137
|
created_at=(
|
|
101
138
|
datetime.fromisoformat(ai_ticket["created_at"])
|
|
102
139
|
if "created_at" in ai_ticket
|
|
@@ -107,11 +144,23 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
107
144
|
if "updated_at" in ai_ticket
|
|
108
145
|
else None
|
|
109
146
|
),
|
|
110
|
-
metadata=
|
|
147
|
+
metadata=combined_metadata, # Use merged metadata
|
|
111
148
|
)
|
|
112
149
|
|
|
113
150
|
def _epic_from_ai_ticket(self, ai_ticket: dict[str, Any]) -> Epic:
|
|
114
151
|
"""Convert AI-Trackdown ticket to universal Epic."""
|
|
152
|
+
# Get user metadata from ticket file
|
|
153
|
+
user_metadata = ai_ticket.get("metadata", {})
|
|
154
|
+
|
|
155
|
+
# Create adapter metadata
|
|
156
|
+
adapter_metadata = {
|
|
157
|
+
"ai_ticket_id": ai_ticket.get("id"),
|
|
158
|
+
"source": "aitrackdown",
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
# Merge user metadata with adapter metadata (user takes priority)
|
|
162
|
+
combined_metadata = {**adapter_metadata, **user_metadata}
|
|
163
|
+
|
|
115
164
|
return Epic(
|
|
116
165
|
id=ai_ticket.get("id"),
|
|
117
166
|
title=ai_ticket.get("title", ""),
|
|
@@ -130,20 +179,22 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
130
179
|
if "updated_at" in ai_ticket and ai_ticket["updated_at"]
|
|
131
180
|
else None
|
|
132
181
|
),
|
|
133
|
-
metadata=
|
|
182
|
+
metadata=combined_metadata, # Use merged metadata
|
|
134
183
|
)
|
|
135
184
|
|
|
136
185
|
def _task_to_ai_ticket(self, task: Task) -> dict[str, Any]:
|
|
137
186
|
"""Convert universal Task to AI-Trackdown ticket."""
|
|
138
187
|
# Handle enum values that may be stored as strings due to use_enum_values=True
|
|
139
|
-
|
|
188
|
+
# Note: task.state is always a string due to ConfigDict(use_enum_values=True)
|
|
189
|
+
state_value: str
|
|
140
190
|
if isinstance(task.state, TicketState):
|
|
141
191
|
state_value = self._get_state_mapping()[task.state]
|
|
142
192
|
elif isinstance(task.state, str):
|
|
143
|
-
# Already a string
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
193
|
+
# Already a string - keep as-is (don't convert to kebab-case)
|
|
194
|
+
# The state is already in snake_case format from the enum value
|
|
195
|
+
state_value = task.state
|
|
196
|
+
else:
|
|
197
|
+
state_value = str(task.state)
|
|
147
198
|
|
|
148
199
|
return {
|
|
149
200
|
"id": task.id,
|
|
@@ -157,20 +208,23 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
157
208
|
"assignee": task.assignee,
|
|
158
209
|
"created_at": task.created_at.isoformat() if task.created_at else None,
|
|
159
210
|
"updated_at": task.updated_at.isoformat() if task.updated_at else None,
|
|
211
|
+
"metadata": task.metadata or {}, # Serialize user metadata
|
|
160
212
|
"type": "task",
|
|
161
213
|
}
|
|
162
214
|
|
|
163
215
|
def _epic_to_ai_ticket(self, epic: Epic) -> dict[str, Any]:
|
|
164
216
|
"""Convert universal Epic to AI-Trackdown ticket."""
|
|
165
217
|
# Handle enum values that may be stored as strings due to use_enum_values=True
|
|
166
|
-
|
|
218
|
+
# Note: epic.state is always a string due to ConfigDict(use_enum_values=True)
|
|
219
|
+
state_value: str
|
|
167
220
|
if isinstance(epic.state, TicketState):
|
|
168
221
|
state_value = self._get_state_mapping()[epic.state]
|
|
169
222
|
elif isinstance(epic.state, str):
|
|
170
|
-
# Already a string
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
223
|
+
# Already a string - keep as-is (don't convert to kebab-case)
|
|
224
|
+
# The state is already in snake_case format from the enum value
|
|
225
|
+
state_value = epic.state
|
|
226
|
+
else:
|
|
227
|
+
state_value = str(epic.state)
|
|
174
228
|
|
|
175
229
|
return {
|
|
176
230
|
"id": epic.id,
|
|
@@ -182,10 +236,11 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
182
236
|
"child_issues": epic.child_issues,
|
|
183
237
|
"created_at": epic.created_at.isoformat() if epic.created_at else None,
|
|
184
238
|
"updated_at": epic.updated_at.isoformat() if epic.updated_at else None,
|
|
239
|
+
"metadata": epic.metadata or {}, # Serialize user metadata
|
|
185
240
|
"type": "epic",
|
|
186
241
|
}
|
|
187
242
|
|
|
188
|
-
def _read_ticket_file(self, ticket_id: str) ->
|
|
243
|
+
def _read_ticket_file(self, ticket_id: str) -> dict[str, Any] | None:
|
|
189
244
|
"""Read ticket from file system."""
|
|
190
245
|
ticket_file = self.tickets_dir / f"{ticket_id}.json"
|
|
191
246
|
if ticket_file.exists():
|
|
@@ -199,7 +254,7 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
199
254
|
with open(ticket_file, "w") as f:
|
|
200
255
|
json.dump(data, f, indent=2, default=str)
|
|
201
256
|
|
|
202
|
-
async def create(self, ticket:
|
|
257
|
+
async def create(self, ticket: Task | Epic) -> Task | Epic:
|
|
203
258
|
"""Create a new task."""
|
|
204
259
|
# Generate ID if not provided
|
|
205
260
|
if not ticket.id:
|
|
@@ -235,65 +290,74 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
235
290
|
|
|
236
291
|
return ticket
|
|
237
292
|
|
|
238
|
-
async def create_epic(
|
|
293
|
+
async def create_epic(
|
|
294
|
+
self, title: str, description: str = None, **kwargs: Any
|
|
295
|
+
) -> Epic:
|
|
239
296
|
"""Create a new epic.
|
|
240
297
|
|
|
241
298
|
Args:
|
|
299
|
+
----
|
|
242
300
|
title: Epic title
|
|
243
301
|
description: Epic description
|
|
244
302
|
**kwargs: Additional epic properties
|
|
245
303
|
|
|
246
304
|
Returns:
|
|
305
|
+
-------
|
|
247
306
|
Created Epic instance
|
|
307
|
+
|
|
248
308
|
"""
|
|
249
|
-
epic = Epic(
|
|
250
|
-
title=title,
|
|
251
|
-
description=description,
|
|
252
|
-
**kwargs
|
|
253
|
-
)
|
|
309
|
+
epic = Epic(title=title, description=description, **kwargs)
|
|
254
310
|
return await self.create(epic)
|
|
255
311
|
|
|
256
|
-
async def create_issue(
|
|
312
|
+
async def create_issue(
|
|
313
|
+
self,
|
|
314
|
+
title: str,
|
|
315
|
+
parent_epic: str = None,
|
|
316
|
+
description: str = None,
|
|
317
|
+
**kwargs: Any,
|
|
318
|
+
) -> Task:
|
|
257
319
|
"""Create a new issue.
|
|
258
320
|
|
|
259
321
|
Args:
|
|
322
|
+
----
|
|
260
323
|
title: Issue title
|
|
261
324
|
parent_epic: Parent epic ID
|
|
262
325
|
description: Issue description
|
|
263
326
|
**kwargs: Additional issue properties
|
|
264
327
|
|
|
265
328
|
Returns:
|
|
329
|
+
-------
|
|
266
330
|
Created Task instance (representing an issue)
|
|
331
|
+
|
|
267
332
|
"""
|
|
268
333
|
task = Task(
|
|
269
|
-
title=title,
|
|
270
|
-
description=description,
|
|
271
|
-
parent_epic=parent_epic,
|
|
272
|
-
**kwargs
|
|
334
|
+
title=title, description=description, parent_epic=parent_epic, **kwargs
|
|
273
335
|
)
|
|
274
336
|
return await self.create(task)
|
|
275
337
|
|
|
276
|
-
async def create_task(
|
|
338
|
+
async def create_task(
|
|
339
|
+
self, title: str, parent_id: str, description: str = None, **kwargs: Any
|
|
340
|
+
) -> Task:
|
|
277
341
|
"""Create a new task under an issue.
|
|
278
342
|
|
|
279
343
|
Args:
|
|
344
|
+
----
|
|
280
345
|
title: Task title
|
|
281
346
|
parent_id: Parent issue ID
|
|
282
347
|
description: Task description
|
|
283
348
|
**kwargs: Additional task properties
|
|
284
349
|
|
|
285
350
|
Returns:
|
|
351
|
+
-------
|
|
286
352
|
Created Task instance
|
|
353
|
+
|
|
287
354
|
"""
|
|
288
355
|
task = Task(
|
|
289
|
-
title=title,
|
|
290
|
-
description=description,
|
|
291
|
-
parent_issue=parent_id,
|
|
292
|
-
**kwargs
|
|
356
|
+
title=title, description=description, parent_issue=parent_id, **kwargs
|
|
293
357
|
)
|
|
294
358
|
return await self.create(task)
|
|
295
359
|
|
|
296
|
-
async def read(self, ticket_id: str) ->
|
|
360
|
+
async def read(self, ticket_id: str) -> Task | Epic | None:
|
|
297
361
|
"""Read a task by ID."""
|
|
298
362
|
if self.tracker:
|
|
299
363
|
ai_ticket = self.tracker.get_ticket(ticket_id)
|
|
@@ -309,9 +373,24 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
309
373
|
return None
|
|
310
374
|
|
|
311
375
|
async def update(
|
|
312
|
-
self, ticket_id: str, updates:
|
|
313
|
-
) ->
|
|
314
|
-
"""Update a task.
|
|
376
|
+
self, ticket_id: str, updates: dict[str, Any] | Task
|
|
377
|
+
) -> Task | Epic | None:
|
|
378
|
+
"""Update a task or epic.
|
|
379
|
+
|
|
380
|
+
Args:
|
|
381
|
+
----
|
|
382
|
+
ticket_id: ID of ticket to update
|
|
383
|
+
updates: Dictionary of updates or Task object with new values
|
|
384
|
+
|
|
385
|
+
Returns:
|
|
386
|
+
-------
|
|
387
|
+
Updated Task or Epic, or None if ticket not found
|
|
388
|
+
|
|
389
|
+
Raises:
|
|
390
|
+
------
|
|
391
|
+
AttributeError: If update fails due to invalid fields
|
|
392
|
+
|
|
393
|
+
"""
|
|
315
394
|
# Read existing ticket
|
|
316
395
|
existing = await self.read(ticket_id)
|
|
317
396
|
if not existing:
|
|
@@ -335,8 +414,12 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
335
414
|
|
|
336
415
|
existing.updated_at = datetime.now()
|
|
337
416
|
|
|
338
|
-
# Write back
|
|
339
|
-
|
|
417
|
+
# Write back - use appropriate converter based on ticket type
|
|
418
|
+
if isinstance(existing, Epic):
|
|
419
|
+
ai_ticket = self._epic_to_ai_ticket(existing)
|
|
420
|
+
else:
|
|
421
|
+
ai_ticket = self._task_to_ai_ticket(existing)
|
|
422
|
+
|
|
340
423
|
if self.tracker:
|
|
341
424
|
self.tracker.update_ticket(ticket_id, **updates)
|
|
342
425
|
else:
|
|
@@ -356,7 +439,7 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
356
439
|
return False
|
|
357
440
|
|
|
358
441
|
async def list(
|
|
359
|
-
self, limit: int = 10, offset: int = 0, filters:
|
|
442
|
+
self, limit: int = 10, offset: int = 0, filters: dict[str, Any] | None = None
|
|
360
443
|
) -> list[Task]:
|
|
361
444
|
"""List tasks with pagination."""
|
|
362
445
|
tasks = []
|
|
@@ -440,7 +523,7 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
440
523
|
|
|
441
524
|
async def transition_state(
|
|
442
525
|
self, ticket_id: str, target_state: TicketState
|
|
443
|
-
) ->
|
|
526
|
+
) -> Task | None:
|
|
444
527
|
"""Transition task to new state."""
|
|
445
528
|
# Validate transition
|
|
446
529
|
if not await self.validate_transition(ticket_id, target_state):
|
|
@@ -451,10 +534,11 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
451
534
|
|
|
452
535
|
async def add_comment(self, comment: Comment) -> Comment:
|
|
453
536
|
"""Add comment to a task."""
|
|
454
|
-
# Generate ID
|
|
537
|
+
# Generate ID with counter to ensure uniqueness
|
|
455
538
|
if not comment.id:
|
|
456
539
|
timestamp = datetime.now().strftime("%Y%m%d%H%M%S%f")
|
|
457
|
-
|
|
540
|
+
self._comment_counter += 1
|
|
541
|
+
comment.id = f"comment-{timestamp}-{self._comment_counter:04d}"
|
|
458
542
|
|
|
459
543
|
comment.created_at = datetime.now()
|
|
460
544
|
|
|
@@ -475,14 +559,808 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
475
559
|
comments_dir = self.base_path / "comments"
|
|
476
560
|
|
|
477
561
|
if comments_dir.exists():
|
|
562
|
+
# Get all comment files and filter by ticket_id first
|
|
478
563
|
comment_files = sorted(comments_dir.glob("*.json"))
|
|
479
|
-
for comment_file in comment_files
|
|
564
|
+
for comment_file in comment_files:
|
|
480
565
|
with open(comment_file) as f:
|
|
481
566
|
data = json.load(f)
|
|
482
567
|
if data.get("ticket_id") == ticket_id:
|
|
483
568
|
comments.append(Comment(**data))
|
|
484
569
|
|
|
485
|
-
|
|
570
|
+
# Apply limit and offset AFTER filtering
|
|
571
|
+
return comments[offset : offset + limit]
|
|
572
|
+
|
|
573
|
+
async def get_epic(self, epic_id: str) -> Epic | None:
|
|
574
|
+
"""Get epic by ID.
|
|
575
|
+
|
|
576
|
+
Args:
|
|
577
|
+
----
|
|
578
|
+
epic_id: Epic ID to retrieve
|
|
579
|
+
|
|
580
|
+
Returns:
|
|
581
|
+
-------
|
|
582
|
+
Epic if found, None otherwise
|
|
583
|
+
|
|
584
|
+
"""
|
|
585
|
+
ticket = await self.read(epic_id)
|
|
586
|
+
if ticket:
|
|
587
|
+
# Check if it's an Epic (can be Epic instance or have epic ticket_type)
|
|
588
|
+
if isinstance(ticket, Epic):
|
|
589
|
+
return ticket
|
|
590
|
+
# Check ticket_type (may be string or enum)
|
|
591
|
+
ticket_type_str = (
|
|
592
|
+
str(ticket.ticket_type).lower()
|
|
593
|
+
if hasattr(ticket, "ticket_type")
|
|
594
|
+
else None
|
|
595
|
+
)
|
|
596
|
+
if ticket_type_str and "epic" in ticket_type_str:
|
|
597
|
+
return Epic(**ticket.model_dump())
|
|
598
|
+
return None
|
|
599
|
+
|
|
600
|
+
async def list_epics(self, limit: int = 10, offset: int = 0) -> builtins.list[Epic]:
|
|
601
|
+
"""List all epics.
|
|
602
|
+
|
|
603
|
+
Args:
|
|
604
|
+
----
|
|
605
|
+
limit: Maximum number of epics to return
|
|
606
|
+
offset: Number of epics to skip
|
|
607
|
+
|
|
608
|
+
Returns:
|
|
609
|
+
-------
|
|
610
|
+
List of epics
|
|
611
|
+
|
|
612
|
+
"""
|
|
613
|
+
all_tickets = await self.list(limit=100, offset=0, filters={"type": "epic"})
|
|
614
|
+
epics = []
|
|
615
|
+
for ticket in all_tickets:
|
|
616
|
+
if ticket.ticket_type == "epic":
|
|
617
|
+
epics.append(Epic(**ticket.model_dump()))
|
|
618
|
+
return epics[offset : offset + limit]
|
|
619
|
+
|
|
620
|
+
async def list_issues_by_epic(self, epic_id: str) -> builtins.list[Task]:
|
|
621
|
+
"""List all issues belonging to an epic.
|
|
622
|
+
|
|
623
|
+
Args:
|
|
624
|
+
----
|
|
625
|
+
epic_id: Epic ID to get issues for
|
|
626
|
+
|
|
627
|
+
Returns:
|
|
628
|
+
-------
|
|
629
|
+
List of issues (tasks with parent_epic set)
|
|
630
|
+
|
|
631
|
+
"""
|
|
632
|
+
all_tickets = await self.list(limit=1000, offset=0, filters={})
|
|
633
|
+
issues = []
|
|
634
|
+
for ticket in all_tickets:
|
|
635
|
+
if hasattr(ticket, "parent_epic") and ticket.parent_epic == epic_id:
|
|
636
|
+
issues.append(ticket)
|
|
637
|
+
return issues
|
|
638
|
+
|
|
639
|
+
async def list_tasks_by_issue(self, issue_id: str) -> builtins.list[Task]:
|
|
640
|
+
"""List all tasks belonging to an issue.
|
|
641
|
+
|
|
642
|
+
Args:
|
|
643
|
+
----
|
|
644
|
+
issue_id: Issue ID (parent task) to get child tasks for
|
|
645
|
+
|
|
646
|
+
Returns:
|
|
647
|
+
-------
|
|
648
|
+
List of tasks
|
|
649
|
+
|
|
650
|
+
"""
|
|
651
|
+
all_tickets = await self.list(limit=1000, offset=0, filters={})
|
|
652
|
+
tasks = []
|
|
653
|
+
for ticket in all_tickets:
|
|
654
|
+
# Check if this ticket has parent_issue matching the issue
|
|
655
|
+
if hasattr(ticket, "parent_issue") and ticket.parent_issue == issue_id:
|
|
656
|
+
tasks.append(ticket)
|
|
657
|
+
return tasks
|
|
658
|
+
|
|
659
|
+
def _sanitize_filename(self, filename: str) -> str:
|
|
660
|
+
"""Sanitize filename to prevent security issues.
|
|
661
|
+
|
|
662
|
+
Args:
|
|
663
|
+
----
|
|
664
|
+
filename: Original filename
|
|
665
|
+
|
|
666
|
+
Returns:
|
|
667
|
+
-------
|
|
668
|
+
Sanitized filename safe for filesystem
|
|
669
|
+
|
|
670
|
+
"""
|
|
671
|
+
# Remove path separators and other dangerous characters
|
|
672
|
+
safe_chars = set(
|
|
673
|
+
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._- "
|
|
674
|
+
)
|
|
675
|
+
sanitized = "".join(c if c in safe_chars else "_" for c in filename)
|
|
676
|
+
|
|
677
|
+
# Ensure filename is not empty
|
|
678
|
+
if not sanitized.strip():
|
|
679
|
+
return "unnamed_file"
|
|
680
|
+
|
|
681
|
+
return sanitized.strip()
|
|
682
|
+
|
|
683
|
+
def _guess_content_type(self, file_path: Path) -> str:
|
|
684
|
+
"""Guess MIME type from file extension.
|
|
685
|
+
|
|
686
|
+
Args:
|
|
687
|
+
----
|
|
688
|
+
file_path: Path to file
|
|
689
|
+
|
|
690
|
+
Returns:
|
|
691
|
+
-------
|
|
692
|
+
MIME type string
|
|
693
|
+
|
|
694
|
+
"""
|
|
695
|
+
import mimetypes
|
|
696
|
+
|
|
697
|
+
content_type, _ = mimetypes.guess_type(str(file_path))
|
|
698
|
+
return content_type or "application/octet-stream"
|
|
699
|
+
|
|
700
|
+
def _calculate_checksum(self, file_path: Path) -> str:
|
|
701
|
+
"""Calculate SHA256 checksum of file.
|
|
702
|
+
|
|
703
|
+
Args:
|
|
704
|
+
----
|
|
705
|
+
file_path: Path to file
|
|
706
|
+
|
|
707
|
+
Returns:
|
|
708
|
+
-------
|
|
709
|
+
Hexadecimal checksum string
|
|
710
|
+
|
|
711
|
+
"""
|
|
712
|
+
import hashlib
|
|
713
|
+
|
|
714
|
+
sha256 = hashlib.sha256()
|
|
715
|
+
with open(file_path, "rb") as f:
|
|
716
|
+
# Read in chunks to handle large files
|
|
717
|
+
for chunk in iter(lambda: f.read(4096), b""):
|
|
718
|
+
sha256.update(chunk)
|
|
719
|
+
|
|
720
|
+
return sha256.hexdigest()
|
|
721
|
+
|
|
722
|
+
async def add_attachment(
|
|
723
|
+
self,
|
|
724
|
+
ticket_id: str,
|
|
725
|
+
file_path: str,
|
|
726
|
+
description: str | None = None,
|
|
727
|
+
) -> Attachment:
|
|
728
|
+
"""Attach a file to a ticket (local filesystem storage).
|
|
729
|
+
|
|
730
|
+
Args:
|
|
731
|
+
----
|
|
732
|
+
ticket_id: Ticket identifier
|
|
733
|
+
file_path: Local file path to attach
|
|
734
|
+
description: Optional attachment description
|
|
735
|
+
|
|
736
|
+
Returns:
|
|
737
|
+
-------
|
|
738
|
+
Attachment metadata
|
|
739
|
+
|
|
740
|
+
Raises:
|
|
741
|
+
------
|
|
742
|
+
ValueError: If ticket doesn't exist
|
|
743
|
+
FileNotFoundError: If file doesn't exist
|
|
744
|
+
|
|
745
|
+
"""
|
|
746
|
+
import shutil
|
|
747
|
+
|
|
748
|
+
# Validate ticket exists
|
|
749
|
+
ticket = await self.read(ticket_id)
|
|
750
|
+
if not ticket:
|
|
751
|
+
raise ValueError(f"Ticket {ticket_id} not found")
|
|
752
|
+
|
|
753
|
+
# Validate file exists
|
|
754
|
+
source_path = Path(file_path).resolve()
|
|
755
|
+
if not source_path.exists():
|
|
756
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
757
|
+
|
|
758
|
+
# Check file size (max 100MB for local storage)
|
|
759
|
+
size_mb = source_path.stat().st_size / (1024 * 1024)
|
|
760
|
+
if size_mb > 100:
|
|
761
|
+
raise ValueError(f"File too large: {size_mb:.2f}MB (max: 100MB)")
|
|
762
|
+
|
|
763
|
+
# Create attachments directory for this ticket
|
|
764
|
+
attachments_dir = self.base_path / "attachments" / ticket_id
|
|
765
|
+
attachments_dir.mkdir(parents=True, exist_ok=True)
|
|
766
|
+
|
|
767
|
+
# Generate unique filename with timestamp
|
|
768
|
+
timestamp = datetime.now().strftime("%Y%m%d%H%M%S%f")
|
|
769
|
+
safe_filename = self._sanitize_filename(source_path.name)
|
|
770
|
+
attachment_id = f"{timestamp}-{safe_filename}"
|
|
771
|
+
dest_path = attachments_dir / attachment_id
|
|
772
|
+
|
|
773
|
+
# Copy file to attachments directory
|
|
774
|
+
shutil.copy2(source_path, dest_path)
|
|
775
|
+
|
|
776
|
+
# Create attachment metadata
|
|
777
|
+
attachment = Attachment(
|
|
778
|
+
id=attachment_id,
|
|
779
|
+
ticket_id=ticket_id,
|
|
780
|
+
filename=source_path.name,
|
|
781
|
+
url=f"file://{dest_path.absolute()}",
|
|
782
|
+
content_type=self._guess_content_type(source_path),
|
|
783
|
+
size_bytes=source_path.stat().st_size,
|
|
784
|
+
created_at=datetime.now(),
|
|
785
|
+
description=description,
|
|
786
|
+
metadata={
|
|
787
|
+
"original_path": str(source_path),
|
|
788
|
+
"storage_path": str(dest_path),
|
|
789
|
+
"checksum": self._calculate_checksum(dest_path),
|
|
790
|
+
},
|
|
791
|
+
)
|
|
792
|
+
|
|
793
|
+
# Save metadata to JSON file
|
|
794
|
+
metadata_file = attachments_dir / f"{attachment_id}.json"
|
|
795
|
+
with open(metadata_file, "w") as f:
|
|
796
|
+
# Convert to dict and handle datetime serialization
|
|
797
|
+
data = attachment.model_dump()
|
|
798
|
+
json.dump(data, f, indent=2, default=str)
|
|
799
|
+
|
|
800
|
+
return attachment
|
|
801
|
+
|
|
802
|
+
async def get_attachments(self, ticket_id: str) -> builtins.list[Attachment]:
|
|
803
|
+
"""Get all attachments for a ticket with path traversal protection.
|
|
804
|
+
|
|
805
|
+
Args:
|
|
806
|
+
----
|
|
807
|
+
ticket_id: Ticket identifier
|
|
808
|
+
|
|
809
|
+
Returns:
|
|
810
|
+
-------
|
|
811
|
+
List of attachments (empty if none)
|
|
812
|
+
|
|
813
|
+
"""
|
|
814
|
+
# Resolve and validate attachments directory
|
|
815
|
+
attachments_dir = (self.base_path / "attachments" / ticket_id).resolve()
|
|
816
|
+
|
|
817
|
+
# CRITICAL SECURITY CHECK: Ensure ticket directory is within base attachments
|
|
818
|
+
base_attachments = (self.base_path / "attachments").resolve()
|
|
819
|
+
if not str(attachments_dir).startswith(str(base_attachments)):
|
|
820
|
+
raise ValueError("Invalid ticket_id: path traversal detected")
|
|
821
|
+
|
|
822
|
+
if not attachments_dir.exists():
|
|
823
|
+
return []
|
|
824
|
+
|
|
825
|
+
attachments = []
|
|
826
|
+
for metadata_file in attachments_dir.glob("*.json"):
|
|
827
|
+
try:
|
|
828
|
+
with open(metadata_file) as f:
|
|
829
|
+
data = json.load(f)
|
|
830
|
+
# Convert ISO datetime strings back to datetime objects
|
|
831
|
+
if isinstance(data.get("created_at"), str):
|
|
832
|
+
data["created_at"] = datetime.fromisoformat(
|
|
833
|
+
data["created_at"].replace("Z", "+00:00")
|
|
834
|
+
)
|
|
835
|
+
attachment = Attachment(**data)
|
|
836
|
+
attachments.append(attachment)
|
|
837
|
+
except (json.JSONDecodeError, ValueError) as e:
|
|
838
|
+
# Log error but continue processing other attachments
|
|
839
|
+
logger.warning(
|
|
840
|
+
"Failed to load attachment metadata from %s: %s",
|
|
841
|
+
metadata_file,
|
|
842
|
+
e,
|
|
843
|
+
)
|
|
844
|
+
continue
|
|
845
|
+
|
|
846
|
+
# Sort by creation time (newest first)
|
|
847
|
+
return sorted(
|
|
848
|
+
attachments,
|
|
849
|
+
key=lambda a: a.created_at or datetime.min,
|
|
850
|
+
reverse=True,
|
|
851
|
+
)
|
|
852
|
+
|
|
853
|
+
async def delete_attachment(
|
|
854
|
+
self,
|
|
855
|
+
ticket_id: str,
|
|
856
|
+
attachment_id: str,
|
|
857
|
+
) -> bool:
|
|
858
|
+
"""Delete an attachment and its metadata with path traversal protection.
|
|
859
|
+
|
|
860
|
+
Args:
|
|
861
|
+
----
|
|
862
|
+
ticket_id: Ticket identifier
|
|
863
|
+
attachment_id: Attachment identifier
|
|
864
|
+
|
|
865
|
+
Returns:
|
|
866
|
+
-------
|
|
867
|
+
True if deleted, False if not found
|
|
868
|
+
|
|
869
|
+
"""
|
|
870
|
+
# Resolve base directory
|
|
871
|
+
attachments_dir = (self.base_path / "attachments" / ticket_id).resolve()
|
|
872
|
+
|
|
873
|
+
# Validate attachments directory exists
|
|
874
|
+
if not attachments_dir.exists():
|
|
875
|
+
return False
|
|
876
|
+
|
|
877
|
+
# Resolve file paths
|
|
878
|
+
attachment_file = (attachments_dir / attachment_id).resolve()
|
|
879
|
+
metadata_file = (attachments_dir / f"{attachment_id}.json").resolve()
|
|
880
|
+
|
|
881
|
+
# CRITICAL SECURITY CHECK: Ensure paths are within attachments_dir
|
|
882
|
+
base_resolved = attachments_dir.resolve()
|
|
883
|
+
if not str(attachment_file).startswith(str(base_resolved)):
|
|
884
|
+
raise ValueError(
|
|
885
|
+
"Invalid attachment path: path traversal detected in attachment_id"
|
|
886
|
+
)
|
|
887
|
+
if not str(metadata_file).startswith(str(base_resolved)):
|
|
888
|
+
raise ValueError(
|
|
889
|
+
"Invalid attachment path: path traversal detected in attachment_id"
|
|
890
|
+
)
|
|
891
|
+
|
|
892
|
+
# Delete files if they exist
|
|
893
|
+
deleted = False
|
|
894
|
+
if attachment_file.exists():
|
|
895
|
+
attachment_file.unlink()
|
|
896
|
+
deleted = True
|
|
897
|
+
|
|
898
|
+
if metadata_file.exists():
|
|
899
|
+
metadata_file.unlink()
|
|
900
|
+
deleted = True
|
|
901
|
+
|
|
902
|
+
return deleted
|
|
903
|
+
|
|
904
|
+
async def update_epic(self, epic_id: str, updates: dict[str, Any]) -> Epic | None:
|
|
905
|
+
"""Update an epic (project) in AITrackdown.
|
|
906
|
+
|
|
907
|
+
Args:
|
|
908
|
+
----
|
|
909
|
+
epic_id: Epic identifier (filename without .json)
|
|
910
|
+
updates: Dictionary of fields to update. Supported fields:
|
|
911
|
+
- title: Epic title
|
|
912
|
+
- description: Epic description
|
|
913
|
+
- state: TicketState value
|
|
914
|
+
- priority: Priority value
|
|
915
|
+
- tags: List of tags
|
|
916
|
+
- target_date: Target completion date
|
|
917
|
+
- metadata: User metadata dictionary
|
|
918
|
+
|
|
919
|
+
Returns:
|
|
920
|
+
-------
|
|
921
|
+
Updated Epic object or None if epic not found
|
|
922
|
+
|
|
923
|
+
Raises:
|
|
924
|
+
------
|
|
925
|
+
ValueError: If epic_id is invalid or epic not found
|
|
926
|
+
|
|
927
|
+
Note:
|
|
928
|
+
----
|
|
929
|
+
AITrackdown stores epics as JSON files in {storage_path}/tickets/
|
|
930
|
+
Updates are applied as partial updates (only specified fields changed)
|
|
931
|
+
|
|
932
|
+
"""
|
|
933
|
+
# Validate epic_id
|
|
934
|
+
if not epic_id:
|
|
935
|
+
raise ValueError("epic_id is required")
|
|
936
|
+
|
|
937
|
+
# Read existing epic
|
|
938
|
+
existing = await self.read(epic_id)
|
|
939
|
+
if not existing:
|
|
940
|
+
logger.warning("Epic %s not found for update", epic_id)
|
|
941
|
+
return None
|
|
942
|
+
|
|
943
|
+
# Ensure it's an epic, not a task
|
|
944
|
+
if not isinstance(existing, Epic):
|
|
945
|
+
logger.warning("Ticket %s is not an epic", epic_id)
|
|
946
|
+
return None
|
|
947
|
+
|
|
948
|
+
# Apply updates to the existing epic
|
|
949
|
+
for key, value in updates.items():
|
|
950
|
+
if hasattr(existing, key) and value is not None:
|
|
951
|
+
setattr(existing, key, value)
|
|
952
|
+
|
|
953
|
+
# Update timestamp
|
|
954
|
+
existing.updated_at = datetime.now()
|
|
955
|
+
|
|
956
|
+
# Write back to file
|
|
957
|
+
ai_ticket = self._epic_to_ai_ticket(existing)
|
|
958
|
+
self._write_ticket_file(epic_id, ai_ticket)
|
|
959
|
+
|
|
960
|
+
logger.info("Updated epic %s with fields: %s", epic_id, list(updates.keys()))
|
|
961
|
+
return existing
|
|
962
|
+
|
|
963
|
+
async def list_labels(self, limit: int = 100) -> builtins.list[dict[str, Any]]:
|
|
964
|
+
"""List all tags (labels) used across tickets.
|
|
965
|
+
|
|
966
|
+
Args:
|
|
967
|
+
----
|
|
968
|
+
limit: Maximum number of labels to return (default: 100)
|
|
969
|
+
|
|
970
|
+
Returns:
|
|
971
|
+
-------
|
|
972
|
+
List of label dictionaries sorted by usage count (descending).
|
|
973
|
+
Each dictionary contains:
|
|
974
|
+
- id: Tag name (same as name in AITrackdown)
|
|
975
|
+
- name: Tag name
|
|
976
|
+
- count: Number of tickets using this tag
|
|
977
|
+
|
|
978
|
+
Note:
|
|
979
|
+
----
|
|
980
|
+
AITrackdown uses 'tags' terminology. This method scans
|
|
981
|
+
all task and epic files to extract unique tags.
|
|
982
|
+
|
|
983
|
+
"""
|
|
984
|
+
# Initialize tag counter
|
|
985
|
+
tag_counts: dict[str, int] = {}
|
|
986
|
+
|
|
987
|
+
# Scan all ticket JSON files
|
|
988
|
+
if self.tickets_dir.exists():
|
|
989
|
+
for ticket_file in self.tickets_dir.glob("*.json"):
|
|
990
|
+
try:
|
|
991
|
+
with open(ticket_file) as f:
|
|
992
|
+
ticket_data = json.load(f)
|
|
993
|
+
tags = ticket_data.get("tags", [])
|
|
994
|
+
for tag in tags:
|
|
995
|
+
tag_counts[tag] = tag_counts.get(tag, 0) + 1
|
|
996
|
+
except (json.JSONDecodeError, OSError) as e:
|
|
997
|
+
logger.warning("Failed to read ticket file %s: %s", ticket_file, e)
|
|
998
|
+
continue
|
|
999
|
+
|
|
1000
|
+
# Sort by usage count (descending)
|
|
1001
|
+
sorted_tags = sorted(tag_counts.items(), key=lambda x: x[1], reverse=True)
|
|
1002
|
+
|
|
1003
|
+
# Return top N tags with standardized format
|
|
1004
|
+
return [
|
|
1005
|
+
{"id": tag, "name": tag, "count": count}
|
|
1006
|
+
for tag, count in sorted_tags[:limit]
|
|
1007
|
+
]
|
|
1008
|
+
|
|
1009
|
+
async def create_issue_label(
|
|
1010
|
+
self, name: str, color: str | None = None
|
|
1011
|
+
) -> dict[str, Any]:
|
|
1012
|
+
"""Create/register a label (tag) in AITrackdown.
|
|
1013
|
+
|
|
1014
|
+
Args:
|
|
1015
|
+
----
|
|
1016
|
+
name: Label name (alphanumeric, hyphens, underscores allowed)
|
|
1017
|
+
color: Optional color (not used in file-based storage)
|
|
1018
|
+
|
|
1019
|
+
Returns:
|
|
1020
|
+
-------
|
|
1021
|
+
Label dictionary with:
|
|
1022
|
+
- id: Label name
|
|
1023
|
+
- name: Label name
|
|
1024
|
+
- color: Color value (if provided)
|
|
1025
|
+
- created: True (always, as tags are created on use)
|
|
1026
|
+
|
|
1027
|
+
Raises:
|
|
1028
|
+
------
|
|
1029
|
+
ValueError: If label name is invalid
|
|
1030
|
+
|
|
1031
|
+
Note:
|
|
1032
|
+
----
|
|
1033
|
+
AITrackdown creates tags implicitly when used on tickets.
|
|
1034
|
+
This method validates the tag name and returns success.
|
|
1035
|
+
Tags are stored as arrays in ticket JSON files.
|
|
1036
|
+
|
|
1037
|
+
"""
|
|
1038
|
+
# Validate tag name
|
|
1039
|
+
if not name:
|
|
1040
|
+
raise ValueError("Label name cannot be empty")
|
|
1041
|
+
|
|
1042
|
+
# Check for valid characters (alphanumeric, hyphens, underscores, spaces)
|
|
1043
|
+
import re
|
|
1044
|
+
|
|
1045
|
+
if not re.match(r"^[a-zA-Z0-9_\- ]+$", name):
|
|
1046
|
+
raise ValueError(
|
|
1047
|
+
"Label name must contain only alphanumeric characters, hyphens, underscores, or spaces"
|
|
1048
|
+
)
|
|
1049
|
+
|
|
1050
|
+
# Return success response
|
|
1051
|
+
logger.info("Label '%s' registered (created implicitly on use)", name)
|
|
1052
|
+
return {
|
|
1053
|
+
"id": name,
|
|
1054
|
+
"name": name,
|
|
1055
|
+
"color": color,
|
|
1056
|
+
"created": True,
|
|
1057
|
+
}
|
|
1058
|
+
|
|
1059
|
+
async def list_project_labels(
|
|
1060
|
+
self, epic_id: str, limit: int = 100
|
|
1061
|
+
) -> builtins.list[dict[str, Any]]:
|
|
1062
|
+
"""List labels (tags) used in a specific epic and its tasks.
|
|
1063
|
+
|
|
1064
|
+
Args:
|
|
1065
|
+
----
|
|
1066
|
+
epic_id: Epic identifier
|
|
1067
|
+
limit: Maximum number of labels to return (default: 100)
|
|
1068
|
+
|
|
1069
|
+
Returns:
|
|
1070
|
+
-------
|
|
1071
|
+
List of label dictionaries used in the epic, sorted by usage count.
|
|
1072
|
+
Each dictionary contains:
|
|
1073
|
+
- id: Tag name
|
|
1074
|
+
- name: Tag name
|
|
1075
|
+
- count: Number of tickets using this tag within the epic
|
|
1076
|
+
|
|
1077
|
+
Raises:
|
|
1078
|
+
------
|
|
1079
|
+
ValueError: If epic not found
|
|
1080
|
+
|
|
1081
|
+
Note:
|
|
1082
|
+
----
|
|
1083
|
+
Scans the epic and all tasks with parent_epic == epic_id.
|
|
1084
|
+
|
|
1085
|
+
"""
|
|
1086
|
+
# Validate epic exists
|
|
1087
|
+
epic = await self.get_epic(epic_id)
|
|
1088
|
+
if not epic:
|
|
1089
|
+
raise ValueError(f"Epic {epic_id} not found")
|
|
1090
|
+
|
|
1091
|
+
# Initialize tag counter
|
|
1092
|
+
tag_counts: dict[str, int] = {}
|
|
1093
|
+
|
|
1094
|
+
# Add tags from the epic itself
|
|
1095
|
+
if epic.tags:
|
|
1096
|
+
for tag in epic.tags:
|
|
1097
|
+
tag_counts[tag] = tag_counts.get(tag, 0) + 1
|
|
1098
|
+
|
|
1099
|
+
# Find all tasks with parent_epic == epic_id
|
|
1100
|
+
all_tasks = await self.list_issues_by_epic(epic_id)
|
|
1101
|
+
for task in all_tasks:
|
|
1102
|
+
if task.tags:
|
|
1103
|
+
for tag in task.tags:
|
|
1104
|
+
tag_counts[tag] = tag_counts.get(tag, 0) + 1
|
|
1105
|
+
|
|
1106
|
+
# Sort by usage count (descending)
|
|
1107
|
+
sorted_tags = sorted(tag_counts.items(), key=lambda x: x[1], reverse=True)
|
|
1108
|
+
|
|
1109
|
+
# Return top N tags
|
|
1110
|
+
return [
|
|
1111
|
+
{"id": tag, "name": tag, "count": count}
|
|
1112
|
+
for tag, count in sorted_tags[:limit]
|
|
1113
|
+
]
|
|
1114
|
+
|
|
1115
|
+
async def list_cycles(self, limit: int = 50) -> builtins.list[dict[str, Any]]:
|
|
1116
|
+
"""List cycles (sprints) - Not supported in file-based AITrackdown.
|
|
1117
|
+
|
|
1118
|
+
Args:
|
|
1119
|
+
----
|
|
1120
|
+
limit: Maximum number of cycles to return (unused)
|
|
1121
|
+
|
|
1122
|
+
Returns:
|
|
1123
|
+
-------
|
|
1124
|
+
Empty list (cycles not supported)
|
|
1125
|
+
|
|
1126
|
+
Note:
|
|
1127
|
+
----
|
|
1128
|
+
AITrackdown is a simple file-based system without
|
|
1129
|
+
cycle/sprint management. Returns empty list.
|
|
1130
|
+
|
|
1131
|
+
"""
|
|
1132
|
+
logger.info("list_cycles called but cycles not supported in AITrackdown")
|
|
1133
|
+
return []
|
|
1134
|
+
|
|
1135
|
+
async def get_issue_status(self, ticket_id: str) -> dict[str, Any] | None:
|
|
1136
|
+
"""Get status details for a ticket.
|
|
1137
|
+
|
|
1138
|
+
Args:
|
|
1139
|
+
----
|
|
1140
|
+
ticket_id: Ticket identifier
|
|
1141
|
+
|
|
1142
|
+
Returns:
|
|
1143
|
+
-------
|
|
1144
|
+
Status dictionary with:
|
|
1145
|
+
- id: Ticket ID
|
|
1146
|
+
- state: Current state
|
|
1147
|
+
- priority: Current priority
|
|
1148
|
+
- updated_at: Last update timestamp
|
|
1149
|
+
- created_at: Creation timestamp
|
|
1150
|
+
- title: Ticket title
|
|
1151
|
+
- assignee: Assignee (if Task, None for Epic)
|
|
1152
|
+
Returns None if ticket not found
|
|
1153
|
+
|
|
1154
|
+
Raises:
|
|
1155
|
+
------
|
|
1156
|
+
ValueError: If ticket_id is invalid
|
|
1157
|
+
|
|
1158
|
+
"""
|
|
1159
|
+
if not ticket_id:
|
|
1160
|
+
raise ValueError("ticket_id is required")
|
|
1161
|
+
|
|
1162
|
+
# Read ticket
|
|
1163
|
+
ticket = await self.read(ticket_id)
|
|
1164
|
+
if not ticket:
|
|
1165
|
+
logger.warning("Ticket %s not found", ticket_id)
|
|
1166
|
+
return None
|
|
1167
|
+
|
|
1168
|
+
# Return comprehensive status object
|
|
1169
|
+
status = {
|
|
1170
|
+
"id": ticket.id,
|
|
1171
|
+
"state": ticket.state,
|
|
1172
|
+
"priority": ticket.priority,
|
|
1173
|
+
"updated_at": ticket.updated_at.isoformat() if ticket.updated_at else None,
|
|
1174
|
+
"created_at": ticket.created_at.isoformat() if ticket.created_at else None,
|
|
1175
|
+
"title": ticket.title,
|
|
1176
|
+
}
|
|
1177
|
+
|
|
1178
|
+
# Add assignee only if ticket is a Task (Epic doesn't have assignee)
|
|
1179
|
+
if hasattr(ticket, "assignee"):
|
|
1180
|
+
status["assignee"] = ticket.assignee
|
|
1181
|
+
|
|
1182
|
+
return status
|
|
1183
|
+
|
|
1184
|
+
async def list_issue_statuses(self) -> builtins.list[dict[str, Any]]:
|
|
1185
|
+
"""List available ticket statuses.
|
|
1186
|
+
|
|
1187
|
+
Returns:
|
|
1188
|
+
-------
|
|
1189
|
+
List of status dictionaries with:
|
|
1190
|
+
- id: State identifier
|
|
1191
|
+
- name: Human-readable state name
|
|
1192
|
+
- description: State description
|
|
1193
|
+
|
|
1194
|
+
Note:
|
|
1195
|
+
----
|
|
1196
|
+
AITrackdown uses standard TicketState enum values:
|
|
1197
|
+
open, in_progress, ready, tested, done, closed, waiting, blocked
|
|
1198
|
+
|
|
1199
|
+
"""
|
|
1200
|
+
# Return hardcoded list of TicketState values
|
|
1201
|
+
statuses = [
|
|
1202
|
+
{
|
|
1203
|
+
"id": "open",
|
|
1204
|
+
"name": "Open",
|
|
1205
|
+
"description": "Ticket is created and ready to be worked on",
|
|
1206
|
+
},
|
|
1207
|
+
{
|
|
1208
|
+
"id": "in_progress",
|
|
1209
|
+
"name": "In Progress",
|
|
1210
|
+
"description": "Ticket is actively being worked on",
|
|
1211
|
+
},
|
|
1212
|
+
{
|
|
1213
|
+
"id": "ready",
|
|
1214
|
+
"name": "Ready",
|
|
1215
|
+
"description": "Ticket is ready for review or testing",
|
|
1216
|
+
},
|
|
1217
|
+
{
|
|
1218
|
+
"id": "tested",
|
|
1219
|
+
"name": "Tested",
|
|
1220
|
+
"description": "Ticket has been tested and verified",
|
|
1221
|
+
},
|
|
1222
|
+
{
|
|
1223
|
+
"id": "done",
|
|
1224
|
+
"name": "Done",
|
|
1225
|
+
"description": "Ticket work is completed",
|
|
1226
|
+
},
|
|
1227
|
+
{
|
|
1228
|
+
"id": "closed",
|
|
1229
|
+
"name": "Closed",
|
|
1230
|
+
"description": "Ticket is closed and archived",
|
|
1231
|
+
},
|
|
1232
|
+
{
|
|
1233
|
+
"id": "waiting",
|
|
1234
|
+
"name": "Waiting",
|
|
1235
|
+
"description": "Ticket is waiting for external dependency",
|
|
1236
|
+
},
|
|
1237
|
+
{
|
|
1238
|
+
"id": "blocked",
|
|
1239
|
+
"name": "Blocked",
|
|
1240
|
+
"description": "Ticket is blocked by an issue or dependency",
|
|
1241
|
+
},
|
|
1242
|
+
]
|
|
1243
|
+
return statuses
|
|
1244
|
+
|
|
1245
|
+
# Milestone Methods (Not yet implemented)
|
|
1246
|
+
|
|
1247
|
+
async def milestone_create(
|
|
1248
|
+
self,
|
|
1249
|
+
name: str,
|
|
1250
|
+
target_date: datetime | None = None,
|
|
1251
|
+
labels: list[str] | None = None,
|
|
1252
|
+
description: str = "",
|
|
1253
|
+
project_id: str | None = None,
|
|
1254
|
+
) -> Any:
|
|
1255
|
+
"""Create milestone - not yet implemented for AITrackdown.
|
|
1256
|
+
|
|
1257
|
+
Args:
|
|
1258
|
+
----
|
|
1259
|
+
name: Milestone name
|
|
1260
|
+
target_date: Target completion date
|
|
1261
|
+
labels: Labels that define this milestone
|
|
1262
|
+
description: Milestone description
|
|
1263
|
+
project_id: Associated project ID
|
|
1264
|
+
|
|
1265
|
+
Raises:
|
|
1266
|
+
------
|
|
1267
|
+
NotImplementedError: Milestone support coming in v2.1.0
|
|
1268
|
+
|
|
1269
|
+
"""
|
|
1270
|
+
raise NotImplementedError("Milestone support for AITrackdown coming in v2.1.0")
|
|
1271
|
+
|
|
1272
|
+
async def milestone_get(self, milestone_id: str) -> Any:
|
|
1273
|
+
"""Get milestone - not yet implemented for AITrackdown.
|
|
1274
|
+
|
|
1275
|
+
Args:
|
|
1276
|
+
----
|
|
1277
|
+
milestone_id: Milestone identifier
|
|
1278
|
+
|
|
1279
|
+
Raises:
|
|
1280
|
+
------
|
|
1281
|
+
NotImplementedError: Milestone support coming in v2.1.0
|
|
1282
|
+
|
|
1283
|
+
"""
|
|
1284
|
+
raise NotImplementedError("Milestone support for AITrackdown coming in v2.1.0")
|
|
1285
|
+
|
|
1286
|
+
async def milestone_list(
|
|
1287
|
+
self,
|
|
1288
|
+
project_id: str | None = None,
|
|
1289
|
+
state: str | None = None,
|
|
1290
|
+
) -> list[Any]:
|
|
1291
|
+
"""List milestones - not yet implemented for AITrackdown.
|
|
1292
|
+
|
|
1293
|
+
Args:
|
|
1294
|
+
----
|
|
1295
|
+
project_id: Filter by project
|
|
1296
|
+
state: Filter by state
|
|
1297
|
+
|
|
1298
|
+
Raises:
|
|
1299
|
+
------
|
|
1300
|
+
NotImplementedError: Milestone support coming in v2.1.0
|
|
1301
|
+
|
|
1302
|
+
"""
|
|
1303
|
+
raise NotImplementedError("Milestone support for AITrackdown coming in v2.1.0")
|
|
1304
|
+
|
|
1305
|
+
async def milestone_update(
|
|
1306
|
+
self,
|
|
1307
|
+
milestone_id: str,
|
|
1308
|
+
name: str | None = None,
|
|
1309
|
+
target_date: datetime | None = None,
|
|
1310
|
+
state: str | None = None,
|
|
1311
|
+
labels: list[str] | None = None,
|
|
1312
|
+
description: str | None = None,
|
|
1313
|
+
) -> Any:
|
|
1314
|
+
"""Update milestone - not yet implemented for AITrackdown.
|
|
1315
|
+
|
|
1316
|
+
Args:
|
|
1317
|
+
----
|
|
1318
|
+
milestone_id: Milestone identifier
|
|
1319
|
+
name: New name
|
|
1320
|
+
target_date: New target date
|
|
1321
|
+
state: New state
|
|
1322
|
+
labels: New labels
|
|
1323
|
+
description: New description
|
|
1324
|
+
|
|
1325
|
+
Raises:
|
|
1326
|
+
------
|
|
1327
|
+
NotImplementedError: Milestone support coming in v2.1.0
|
|
1328
|
+
|
|
1329
|
+
"""
|
|
1330
|
+
raise NotImplementedError("Milestone support for AITrackdown coming in v2.1.0")
|
|
1331
|
+
|
|
1332
|
+
async def milestone_delete(self, milestone_id: str) -> bool:
|
|
1333
|
+
"""Delete milestone - not yet implemented for AITrackdown.
|
|
1334
|
+
|
|
1335
|
+
Args:
|
|
1336
|
+
----
|
|
1337
|
+
milestone_id: Milestone identifier
|
|
1338
|
+
|
|
1339
|
+
Raises:
|
|
1340
|
+
------
|
|
1341
|
+
NotImplementedError: Milestone support coming in v2.1.0
|
|
1342
|
+
|
|
1343
|
+
"""
|
|
1344
|
+
raise NotImplementedError("Milestone support for AITrackdown coming in v2.1.0")
|
|
1345
|
+
|
|
1346
|
+
async def milestone_get_issues(
|
|
1347
|
+
self,
|
|
1348
|
+
milestone_id: str,
|
|
1349
|
+
state: str | None = None,
|
|
1350
|
+
) -> list[Any]:
|
|
1351
|
+
"""Get milestone issues - not yet implemented for AITrackdown.
|
|
1352
|
+
|
|
1353
|
+
Args:
|
|
1354
|
+
----
|
|
1355
|
+
milestone_id: Milestone identifier
|
|
1356
|
+
state: Filter by issue state
|
|
1357
|
+
|
|
1358
|
+
Raises:
|
|
1359
|
+
------
|
|
1360
|
+
NotImplementedError: Milestone support coming in v2.1.0
|
|
1361
|
+
|
|
1362
|
+
"""
|
|
1363
|
+
raise NotImplementedError("Milestone support for AITrackdown coming in v2.1.0")
|
|
486
1364
|
|
|
487
1365
|
|
|
488
1366
|
# Register the adapter
|