mcp-ticketer 0.1.30__py3-none-any.whl → 1.2.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__init__.py +10 -10
- mcp_ticketer/__version__.py +3 -3
- mcp_ticketer/adapters/__init__.py +2 -0
- mcp_ticketer/adapters/aitrackdown.py +796 -46
- mcp_ticketer/adapters/asana/__init__.py +15 -0
- mcp_ticketer/adapters/asana/adapter.py +1416 -0
- mcp_ticketer/adapters/asana/client.py +292 -0
- mcp_ticketer/adapters/asana/mappers.py +348 -0
- mcp_ticketer/adapters/asana/types.py +146 -0
- mcp_ticketer/adapters/github.py +879 -129
- mcp_ticketer/adapters/hybrid.py +11 -11
- mcp_ticketer/adapters/jira.py +973 -73
- mcp_ticketer/adapters/linear/__init__.py +24 -0
- mcp_ticketer/adapters/linear/adapter.py +2732 -0
- mcp_ticketer/adapters/linear/client.py +344 -0
- mcp_ticketer/adapters/linear/mappers.py +420 -0
- mcp_ticketer/adapters/linear/queries.py +479 -0
- mcp_ticketer/adapters/linear/types.py +360 -0
- mcp_ticketer/adapters/linear.py +10 -2315
- mcp_ticketer/analysis/__init__.py +23 -0
- mcp_ticketer/analysis/orphaned.py +218 -0
- mcp_ticketer/analysis/similarity.py +224 -0
- mcp_ticketer/analysis/staleness.py +266 -0
- mcp_ticketer/cache/memory.py +9 -8
- mcp_ticketer/cli/adapter_diagnostics.py +421 -0
- mcp_ticketer/cli/auggie_configure.py +116 -15
- mcp_ticketer/cli/codex_configure.py +274 -82
- mcp_ticketer/cli/configure.py +888 -151
- mcp_ticketer/cli/diagnostics.py +400 -157
- mcp_ticketer/cli/discover.py +297 -26
- mcp_ticketer/cli/gemini_configure.py +119 -26
- mcp_ticketer/cli/init_command.py +880 -0
- mcp_ticketer/cli/instruction_commands.py +435 -0
- mcp_ticketer/cli/linear_commands.py +616 -0
- mcp_ticketer/cli/main.py +203 -1165
- mcp_ticketer/cli/mcp_configure.py +474 -90
- mcp_ticketer/cli/mcp_server_commands.py +415 -0
- mcp_ticketer/cli/migrate_config.py +12 -8
- mcp_ticketer/cli/platform_commands.py +123 -0
- mcp_ticketer/cli/platform_detection.py +418 -0
- mcp_ticketer/cli/platform_installer.py +513 -0
- mcp_ticketer/cli/python_detection.py +126 -0
- mcp_ticketer/cli/queue_commands.py +15 -15
- mcp_ticketer/cli/setup_command.py +639 -0
- mcp_ticketer/cli/simple_health.py +90 -65
- mcp_ticketer/cli/ticket_commands.py +1013 -0
- mcp_ticketer/cli/update_checker.py +313 -0
- mcp_ticketer/cli/utils.py +114 -66
- mcp_ticketer/core/__init__.py +24 -1
- mcp_ticketer/core/adapter.py +250 -16
- mcp_ticketer/core/config.py +145 -37
- mcp_ticketer/core/env_discovery.py +101 -22
- mcp_ticketer/core/env_loader.py +349 -0
- mcp_ticketer/core/exceptions.py +160 -0
- mcp_ticketer/core/http_client.py +26 -26
- mcp_ticketer/core/instructions.py +405 -0
- mcp_ticketer/core/label_manager.py +732 -0
- mcp_ticketer/core/mappers.py +42 -30
- mcp_ticketer/core/models.py +280 -28
- mcp_ticketer/core/onepassword_secrets.py +379 -0
- mcp_ticketer/core/project_config.py +183 -49
- mcp_ticketer/core/registry.py +3 -3
- mcp_ticketer/core/session_state.py +171 -0
- mcp_ticketer/core/state_matcher.py +592 -0
- mcp_ticketer/core/url_parser.py +425 -0
- mcp_ticketer/core/validators.py +69 -0
- mcp_ticketer/defaults/ticket_instructions.md +644 -0
- mcp_ticketer/mcp/__init__.py +29 -1
- mcp_ticketer/mcp/__main__.py +60 -0
- mcp_ticketer/mcp/server/__init__.py +25 -0
- mcp_ticketer/mcp/server/__main__.py +60 -0
- mcp_ticketer/mcp/server/constants.py +58 -0
- mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
- mcp_ticketer/mcp/server/dto.py +195 -0
- mcp_ticketer/mcp/server/main.py +1343 -0
- mcp_ticketer/mcp/server/response_builder.py +206 -0
- mcp_ticketer/mcp/server/routing.py +655 -0
- mcp_ticketer/mcp/server/server_sdk.py +151 -0
- mcp_ticketer/mcp/server/tools/__init__.py +56 -0
- mcp_ticketer/mcp/server/tools/analysis_tools.py +495 -0
- mcp_ticketer/mcp/server/tools/attachment_tools.py +226 -0
- mcp_ticketer/mcp/server/tools/bulk_tools.py +273 -0
- mcp_ticketer/mcp/server/tools/comment_tools.py +152 -0
- mcp_ticketer/mcp/server/tools/config_tools.py +1439 -0
- mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
- mcp_ticketer/mcp/server/tools/hierarchy_tools.py +921 -0
- mcp_ticketer/mcp/server/tools/instruction_tools.py +300 -0
- mcp_ticketer/mcp/server/tools/label_tools.py +948 -0
- mcp_ticketer/mcp/server/tools/pr_tools.py +152 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +215 -0
- mcp_ticketer/mcp/server/tools/session_tools.py +170 -0
- mcp_ticketer/mcp/server/tools/ticket_tools.py +1268 -0
- mcp_ticketer/mcp/server/tools/user_ticket_tools.py +547 -0
- mcp_ticketer/queue/__init__.py +1 -0
- mcp_ticketer/queue/health_monitor.py +168 -136
- mcp_ticketer/queue/manager.py +95 -25
- mcp_ticketer/queue/queue.py +40 -21
- mcp_ticketer/queue/run_worker.py +6 -1
- mcp_ticketer/queue/ticket_registry.py +213 -155
- mcp_ticketer/queue/worker.py +109 -49
- mcp_ticketer-1.2.11.dist-info/METADATA +792 -0
- mcp_ticketer-1.2.11.dist-info/RECORD +110 -0
- mcp_ticketer/mcp/server.py +0 -1895
- mcp_ticketer-0.1.30.dist-info/METADATA +0 -413
- mcp_ticketer-0.1.30.dist-info/RECORD +0 -49
- {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/licenses/LICENSE +0 -0
- {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/top_level.txt +0 -0
|
@@ -2,14 +2,25 @@
|
|
|
2
2
|
|
|
3
3
|
import builtins
|
|
4
4
|
import json
|
|
5
|
+
import logging
|
|
5
6
|
from datetime import datetime
|
|
6
7
|
from pathlib import Path
|
|
7
|
-
from typing import Any
|
|
8
|
+
from typing import Any
|
|
8
9
|
|
|
9
10
|
from ..core.adapter import BaseAdapter
|
|
10
|
-
from ..core.models import
|
|
11
|
+
from ..core.models import (
|
|
12
|
+
Attachment,
|
|
13
|
+
Comment,
|
|
14
|
+
Epic,
|
|
15
|
+
Priority,
|
|
16
|
+
SearchQuery,
|
|
17
|
+
Task,
|
|
18
|
+
TicketState,
|
|
19
|
+
)
|
|
11
20
|
from ..core.registry import AdapterRegistry
|
|
12
21
|
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
13
24
|
# Import ai-trackdown-pytools when available
|
|
14
25
|
try:
|
|
15
26
|
from ai_trackdown_pytools import AITrackdown
|
|
@@ -29,20 +40,24 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
29
40
|
"""Initialize AI-Trackdown adapter.
|
|
30
41
|
|
|
31
42
|
Args:
|
|
43
|
+
----
|
|
32
44
|
config: Configuration with 'base_path' for tickets directory
|
|
33
45
|
|
|
34
46
|
"""
|
|
35
47
|
super().__init__(config)
|
|
36
48
|
self.base_path = Path(config.get("base_path", ".aitrackdown"))
|
|
37
49
|
self.tickets_dir = self.base_path / "tickets"
|
|
50
|
+
self._comment_counter = 0 # Counter for unique comment IDs
|
|
38
51
|
|
|
39
52
|
# Initialize AI-Trackdown if available
|
|
53
|
+
# Always create tickets directory (needed for both modes)
|
|
54
|
+
self.tickets_dir.mkdir(parents=True, exist_ok=True)
|
|
55
|
+
|
|
40
56
|
if HAS_AITRACKDOWN:
|
|
41
57
|
self.tracker = AITrackdown(str(self.base_path))
|
|
42
58
|
else:
|
|
43
59
|
# Fallback to direct file operations
|
|
44
60
|
self.tracker = None
|
|
45
|
-
self.tickets_dir.mkdir(parents=True, exist_ok=True)
|
|
46
61
|
|
|
47
62
|
def validate_credentials(self) -> tuple[bool, str]:
|
|
48
63
|
"""Validate that required credentials are present.
|
|
@@ -50,6 +65,7 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
50
65
|
AITrackdown is file-based and doesn't require credentials.
|
|
51
66
|
|
|
52
67
|
Returns:
|
|
68
|
+
-------
|
|
53
69
|
(is_valid, error_message) - Always returns (True, "") for AITrackdown
|
|
54
70
|
|
|
55
71
|
"""
|
|
@@ -60,10 +76,15 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
60
76
|
return True, ""
|
|
61
77
|
|
|
62
78
|
def _get_state_mapping(self) -> dict[TicketState, str]:
|
|
63
|
-
"""Map universal states to AI-Trackdown states.
|
|
79
|
+
"""Map universal states to AI-Trackdown states.
|
|
80
|
+
|
|
81
|
+
Note: We use the exact enum values (snake_case) to match what
|
|
82
|
+
Pydantic's use_enum_values=True produces. This ensures consistency
|
|
83
|
+
between what's written to files and what's read back.
|
|
84
|
+
"""
|
|
64
85
|
return {
|
|
65
86
|
TicketState.OPEN: "open",
|
|
66
|
-
TicketState.IN_PROGRESS: "
|
|
87
|
+
TicketState.IN_PROGRESS: "in_progress", # snake_case, not kebab-case
|
|
67
88
|
TicketState.READY: "ready",
|
|
68
89
|
TicketState.TESTED: "tested",
|
|
69
90
|
TicketState.DONE: "done",
|
|
@@ -72,7 +93,7 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
72
93
|
TicketState.CLOSED: "closed",
|
|
73
94
|
}
|
|
74
95
|
|
|
75
|
-
def _priority_to_ai(self, priority:
|
|
96
|
+
def _priority_to_ai(self, priority: Priority | str) -> str:
|
|
76
97
|
"""Convert universal priority to AI-Trackdown priority."""
|
|
77
98
|
if isinstance(priority, Priority):
|
|
78
99
|
return priority.value
|
|
@@ -87,6 +108,18 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
87
108
|
|
|
88
109
|
def _task_from_ai_ticket(self, ai_ticket: dict[str, Any]) -> Task:
|
|
89
110
|
"""Convert AI-Trackdown ticket to universal Task."""
|
|
111
|
+
# Get user metadata from ticket file
|
|
112
|
+
user_metadata = ai_ticket.get("metadata", {})
|
|
113
|
+
|
|
114
|
+
# Create adapter metadata
|
|
115
|
+
adapter_metadata = {
|
|
116
|
+
"ai_ticket_id": ai_ticket.get("id"),
|
|
117
|
+
"source": "aitrackdown",
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
# Merge user metadata with adapter metadata (user takes priority)
|
|
121
|
+
combined_metadata = {**adapter_metadata, **user_metadata}
|
|
122
|
+
|
|
90
123
|
return Task(
|
|
91
124
|
id=ai_ticket.get("id"),
|
|
92
125
|
title=ai_ticket.get("title", ""),
|
|
@@ -107,11 +140,23 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
107
140
|
if "updated_at" in ai_ticket
|
|
108
141
|
else None
|
|
109
142
|
),
|
|
110
|
-
metadata=
|
|
143
|
+
metadata=combined_metadata, # Use merged metadata
|
|
111
144
|
)
|
|
112
145
|
|
|
113
146
|
def _epic_from_ai_ticket(self, ai_ticket: dict[str, Any]) -> Epic:
|
|
114
147
|
"""Convert AI-Trackdown ticket to universal Epic."""
|
|
148
|
+
# Get user metadata from ticket file
|
|
149
|
+
user_metadata = ai_ticket.get("metadata", {})
|
|
150
|
+
|
|
151
|
+
# Create adapter metadata
|
|
152
|
+
adapter_metadata = {
|
|
153
|
+
"ai_ticket_id": ai_ticket.get("id"),
|
|
154
|
+
"source": "aitrackdown",
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
# Merge user metadata with adapter metadata (user takes priority)
|
|
158
|
+
combined_metadata = {**adapter_metadata, **user_metadata}
|
|
159
|
+
|
|
115
160
|
return Epic(
|
|
116
161
|
id=ai_ticket.get("id"),
|
|
117
162
|
title=ai_ticket.get("title", ""),
|
|
@@ -130,20 +175,20 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
130
175
|
if "updated_at" in ai_ticket and ai_ticket["updated_at"]
|
|
131
176
|
else None
|
|
132
177
|
),
|
|
133
|
-
metadata=
|
|
178
|
+
metadata=combined_metadata, # Use merged metadata
|
|
134
179
|
)
|
|
135
180
|
|
|
136
181
|
def _task_to_ai_ticket(self, task: Task) -> dict[str, Any]:
|
|
137
182
|
"""Convert universal Task to AI-Trackdown ticket."""
|
|
138
183
|
# Handle enum values that may be stored as strings due to use_enum_values=True
|
|
184
|
+
# Note: task.state is always a string due to ConfigDict(use_enum_values=True)
|
|
139
185
|
state_value = task.state
|
|
140
186
|
if isinstance(task.state, TicketState):
|
|
141
187
|
state_value = self._get_state_mapping()[task.state]
|
|
142
188
|
elif isinstance(task.state, str):
|
|
143
|
-
# Already a string
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
) # Convert snake_case to kebab-case
|
|
189
|
+
# Already a string - keep as-is (don't convert to kebab-case)
|
|
190
|
+
# The state is already in snake_case format from the enum value
|
|
191
|
+
state_value = task.state
|
|
147
192
|
|
|
148
193
|
return {
|
|
149
194
|
"id": task.id,
|
|
@@ -157,20 +202,21 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
157
202
|
"assignee": task.assignee,
|
|
158
203
|
"created_at": task.created_at.isoformat() if task.created_at else None,
|
|
159
204
|
"updated_at": task.updated_at.isoformat() if task.updated_at else None,
|
|
205
|
+
"metadata": task.metadata or {}, # Serialize user metadata
|
|
160
206
|
"type": "task",
|
|
161
207
|
}
|
|
162
208
|
|
|
163
209
|
def _epic_to_ai_ticket(self, epic: Epic) -> dict[str, Any]:
|
|
164
210
|
"""Convert universal Epic to AI-Trackdown ticket."""
|
|
165
211
|
# Handle enum values that may be stored as strings due to use_enum_values=True
|
|
212
|
+
# Note: epic.state is always a string due to ConfigDict(use_enum_values=True)
|
|
166
213
|
state_value = epic.state
|
|
167
214
|
if isinstance(epic.state, TicketState):
|
|
168
215
|
state_value = self._get_state_mapping()[epic.state]
|
|
169
216
|
elif isinstance(epic.state, str):
|
|
170
|
-
# Already a string
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
) # Convert snake_case to kebab-case
|
|
217
|
+
# Already a string - keep as-is (don't convert to kebab-case)
|
|
218
|
+
# The state is already in snake_case format from the enum value
|
|
219
|
+
state_value = epic.state
|
|
174
220
|
|
|
175
221
|
return {
|
|
176
222
|
"id": epic.id,
|
|
@@ -182,10 +228,11 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
182
228
|
"child_issues": epic.child_issues,
|
|
183
229
|
"created_at": epic.created_at.isoformat() if epic.created_at else None,
|
|
184
230
|
"updated_at": epic.updated_at.isoformat() if epic.updated_at else None,
|
|
231
|
+
"metadata": epic.metadata or {}, # Serialize user metadata
|
|
185
232
|
"type": "epic",
|
|
186
233
|
}
|
|
187
234
|
|
|
188
|
-
def _read_ticket_file(self, ticket_id: str) ->
|
|
235
|
+
def _read_ticket_file(self, ticket_id: str) -> dict[str, Any] | None:
|
|
189
236
|
"""Read ticket from file system."""
|
|
190
237
|
ticket_file = self.tickets_dir / f"{ticket_id}.json"
|
|
191
238
|
if ticket_file.exists():
|
|
@@ -199,7 +246,7 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
199
246
|
with open(ticket_file, "w") as f:
|
|
200
247
|
json.dump(data, f, indent=2, default=str)
|
|
201
248
|
|
|
202
|
-
async def create(self, ticket:
|
|
249
|
+
async def create(self, ticket: Task | Epic) -> Task | Epic:
|
|
203
250
|
"""Create a new task."""
|
|
204
251
|
# Generate ID if not provided
|
|
205
252
|
if not ticket.id:
|
|
@@ -235,65 +282,74 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
235
282
|
|
|
236
283
|
return ticket
|
|
237
284
|
|
|
238
|
-
async def create_epic(
|
|
285
|
+
async def create_epic(
|
|
286
|
+
self, title: str, description: str = None, **kwargs: Any
|
|
287
|
+
) -> Epic:
|
|
239
288
|
"""Create a new epic.
|
|
240
289
|
|
|
241
290
|
Args:
|
|
291
|
+
----
|
|
242
292
|
title: Epic title
|
|
243
293
|
description: Epic description
|
|
244
294
|
**kwargs: Additional epic properties
|
|
245
295
|
|
|
246
296
|
Returns:
|
|
297
|
+
-------
|
|
247
298
|
Created Epic instance
|
|
299
|
+
|
|
248
300
|
"""
|
|
249
|
-
epic = Epic(
|
|
250
|
-
title=title,
|
|
251
|
-
description=description,
|
|
252
|
-
**kwargs
|
|
253
|
-
)
|
|
301
|
+
epic = Epic(title=title, description=description, **kwargs)
|
|
254
302
|
return await self.create(epic)
|
|
255
303
|
|
|
256
|
-
async def create_issue(
|
|
304
|
+
async def create_issue(
|
|
305
|
+
self,
|
|
306
|
+
title: str,
|
|
307
|
+
parent_epic: str = None,
|
|
308
|
+
description: str = None,
|
|
309
|
+
**kwargs: Any,
|
|
310
|
+
) -> Task:
|
|
257
311
|
"""Create a new issue.
|
|
258
312
|
|
|
259
313
|
Args:
|
|
314
|
+
----
|
|
260
315
|
title: Issue title
|
|
261
316
|
parent_epic: Parent epic ID
|
|
262
317
|
description: Issue description
|
|
263
318
|
**kwargs: Additional issue properties
|
|
264
319
|
|
|
265
320
|
Returns:
|
|
321
|
+
-------
|
|
266
322
|
Created Task instance (representing an issue)
|
|
323
|
+
|
|
267
324
|
"""
|
|
268
325
|
task = Task(
|
|
269
|
-
title=title,
|
|
270
|
-
description=description,
|
|
271
|
-
parent_epic=parent_epic,
|
|
272
|
-
**kwargs
|
|
326
|
+
title=title, description=description, parent_epic=parent_epic, **kwargs
|
|
273
327
|
)
|
|
274
328
|
return await self.create(task)
|
|
275
329
|
|
|
276
|
-
async def create_task(
|
|
330
|
+
async def create_task(
|
|
331
|
+
self, title: str, parent_id: str, description: str = None, **kwargs: Any
|
|
332
|
+
) -> Task:
|
|
277
333
|
"""Create a new task under an issue.
|
|
278
334
|
|
|
279
335
|
Args:
|
|
336
|
+
----
|
|
280
337
|
title: Task title
|
|
281
338
|
parent_id: Parent issue ID
|
|
282
339
|
description: Task description
|
|
283
340
|
**kwargs: Additional task properties
|
|
284
341
|
|
|
285
342
|
Returns:
|
|
343
|
+
-------
|
|
286
344
|
Created Task instance
|
|
345
|
+
|
|
287
346
|
"""
|
|
288
347
|
task = Task(
|
|
289
|
-
title=title,
|
|
290
|
-
description=description,
|
|
291
|
-
parent_issue=parent_id,
|
|
292
|
-
**kwargs
|
|
348
|
+
title=title, description=description, parent_issue=parent_id, **kwargs
|
|
293
349
|
)
|
|
294
350
|
return await self.create(task)
|
|
295
351
|
|
|
296
|
-
async def read(self, ticket_id: str) ->
|
|
352
|
+
async def read(self, ticket_id: str) -> Task | Epic | None:
|
|
297
353
|
"""Read a task by ID."""
|
|
298
354
|
if self.tracker:
|
|
299
355
|
ai_ticket = self.tracker.get_ticket(ticket_id)
|
|
@@ -309,9 +365,24 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
309
365
|
return None
|
|
310
366
|
|
|
311
367
|
async def update(
|
|
312
|
-
self, ticket_id: str, updates:
|
|
313
|
-
) ->
|
|
314
|
-
"""Update a task.
|
|
368
|
+
self, ticket_id: str, updates: dict[str, Any] | Task
|
|
369
|
+
) -> Task | Epic | None:
|
|
370
|
+
"""Update a task or epic.
|
|
371
|
+
|
|
372
|
+
Args:
|
|
373
|
+
----
|
|
374
|
+
ticket_id: ID of ticket to update
|
|
375
|
+
updates: Dictionary of updates or Task object with new values
|
|
376
|
+
|
|
377
|
+
Returns:
|
|
378
|
+
-------
|
|
379
|
+
Updated Task or Epic, or None if ticket not found
|
|
380
|
+
|
|
381
|
+
Raises:
|
|
382
|
+
------
|
|
383
|
+
AttributeError: If update fails due to invalid fields
|
|
384
|
+
|
|
385
|
+
"""
|
|
315
386
|
# Read existing ticket
|
|
316
387
|
existing = await self.read(ticket_id)
|
|
317
388
|
if not existing:
|
|
@@ -335,8 +406,12 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
335
406
|
|
|
336
407
|
existing.updated_at = datetime.now()
|
|
337
408
|
|
|
338
|
-
# Write back
|
|
339
|
-
|
|
409
|
+
# Write back - use appropriate converter based on ticket type
|
|
410
|
+
if isinstance(existing, Epic):
|
|
411
|
+
ai_ticket = self._epic_to_ai_ticket(existing)
|
|
412
|
+
else:
|
|
413
|
+
ai_ticket = self._task_to_ai_ticket(existing)
|
|
414
|
+
|
|
340
415
|
if self.tracker:
|
|
341
416
|
self.tracker.update_ticket(ticket_id, **updates)
|
|
342
417
|
else:
|
|
@@ -356,7 +431,7 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
356
431
|
return False
|
|
357
432
|
|
|
358
433
|
async def list(
|
|
359
|
-
self, limit: int = 10, offset: int = 0, filters:
|
|
434
|
+
self, limit: int = 10, offset: int = 0, filters: dict[str, Any] | None = None
|
|
360
435
|
) -> list[Task]:
|
|
361
436
|
"""List tasks with pagination."""
|
|
362
437
|
tasks = []
|
|
@@ -440,7 +515,7 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
440
515
|
|
|
441
516
|
async def transition_state(
|
|
442
517
|
self, ticket_id: str, target_state: TicketState
|
|
443
|
-
) ->
|
|
518
|
+
) -> Task | None:
|
|
444
519
|
"""Transition task to new state."""
|
|
445
520
|
# Validate transition
|
|
446
521
|
if not await self.validate_transition(ticket_id, target_state):
|
|
@@ -451,10 +526,11 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
451
526
|
|
|
452
527
|
async def add_comment(self, comment: Comment) -> Comment:
|
|
453
528
|
"""Add comment to a task."""
|
|
454
|
-
# Generate ID
|
|
529
|
+
# Generate ID with counter to ensure uniqueness
|
|
455
530
|
if not comment.id:
|
|
456
531
|
timestamp = datetime.now().strftime("%Y%m%d%H%M%S%f")
|
|
457
|
-
|
|
532
|
+
self._comment_counter += 1
|
|
533
|
+
comment.id = f"comment-{timestamp}-{self._comment_counter:04d}"
|
|
458
534
|
|
|
459
535
|
comment.created_at = datetime.now()
|
|
460
536
|
|
|
@@ -475,14 +551,688 @@ class AITrackdownAdapter(BaseAdapter[Task]):
|
|
|
475
551
|
comments_dir = self.base_path / "comments"
|
|
476
552
|
|
|
477
553
|
if comments_dir.exists():
|
|
554
|
+
# Get all comment files and filter by ticket_id first
|
|
478
555
|
comment_files = sorted(comments_dir.glob("*.json"))
|
|
479
|
-
for comment_file in comment_files
|
|
556
|
+
for comment_file in comment_files:
|
|
480
557
|
with open(comment_file) as f:
|
|
481
558
|
data = json.load(f)
|
|
482
559
|
if data.get("ticket_id") == ticket_id:
|
|
483
560
|
comments.append(Comment(**data))
|
|
484
561
|
|
|
485
|
-
|
|
562
|
+
# Apply limit and offset AFTER filtering
|
|
563
|
+
return comments[offset : offset + limit]
|
|
564
|
+
|
|
565
|
+
async def get_epic(self, epic_id: str) -> Epic | None:
|
|
566
|
+
"""Get epic by ID.
|
|
567
|
+
|
|
568
|
+
Args:
|
|
569
|
+
----
|
|
570
|
+
epic_id: Epic ID to retrieve
|
|
571
|
+
|
|
572
|
+
Returns:
|
|
573
|
+
-------
|
|
574
|
+
Epic if found, None otherwise
|
|
575
|
+
|
|
576
|
+
"""
|
|
577
|
+
ticket = await self.read(epic_id)
|
|
578
|
+
if ticket:
|
|
579
|
+
# Check if it's an Epic (can be Epic instance or have epic ticket_type)
|
|
580
|
+
if isinstance(ticket, Epic):
|
|
581
|
+
return ticket
|
|
582
|
+
# Check ticket_type (may be string or enum)
|
|
583
|
+
ticket_type_str = (
|
|
584
|
+
str(ticket.ticket_type).lower()
|
|
585
|
+
if hasattr(ticket, "ticket_type")
|
|
586
|
+
else None
|
|
587
|
+
)
|
|
588
|
+
if ticket_type_str and "epic" in ticket_type_str:
|
|
589
|
+
return Epic(**ticket.model_dump())
|
|
590
|
+
return None
|
|
591
|
+
|
|
592
|
+
async def list_epics(self, limit: int = 10, offset: int = 0) -> builtins.list[Epic]:
|
|
593
|
+
"""List all epics.
|
|
594
|
+
|
|
595
|
+
Args:
|
|
596
|
+
----
|
|
597
|
+
limit: Maximum number of epics to return
|
|
598
|
+
offset: Number of epics to skip
|
|
599
|
+
|
|
600
|
+
Returns:
|
|
601
|
+
-------
|
|
602
|
+
List of epics
|
|
603
|
+
|
|
604
|
+
"""
|
|
605
|
+
all_tickets = await self.list(limit=100, offset=0, filters={"type": "epic"})
|
|
606
|
+
epics = []
|
|
607
|
+
for ticket in all_tickets:
|
|
608
|
+
if ticket.ticket_type == "epic":
|
|
609
|
+
epics.append(Epic(**ticket.model_dump()))
|
|
610
|
+
return epics[offset : offset + limit]
|
|
611
|
+
|
|
612
|
+
async def list_issues_by_epic(self, epic_id: str) -> builtins.list[Task]:
|
|
613
|
+
"""List all issues belonging to an epic.
|
|
614
|
+
|
|
615
|
+
Args:
|
|
616
|
+
----
|
|
617
|
+
epic_id: Epic ID to get issues for
|
|
618
|
+
|
|
619
|
+
Returns:
|
|
620
|
+
-------
|
|
621
|
+
List of issues (tasks with parent_epic set)
|
|
622
|
+
|
|
623
|
+
"""
|
|
624
|
+
all_tickets = await self.list(limit=1000, offset=0, filters={})
|
|
625
|
+
issues = []
|
|
626
|
+
for ticket in all_tickets:
|
|
627
|
+
if hasattr(ticket, "parent_epic") and ticket.parent_epic == epic_id:
|
|
628
|
+
issues.append(ticket)
|
|
629
|
+
return issues
|
|
630
|
+
|
|
631
|
+
async def list_tasks_by_issue(self, issue_id: str) -> builtins.list[Task]:
|
|
632
|
+
"""List all tasks belonging to an issue.
|
|
633
|
+
|
|
634
|
+
Args:
|
|
635
|
+
----
|
|
636
|
+
issue_id: Issue ID (parent task) to get child tasks for
|
|
637
|
+
|
|
638
|
+
Returns:
|
|
639
|
+
-------
|
|
640
|
+
List of tasks
|
|
641
|
+
|
|
642
|
+
"""
|
|
643
|
+
all_tickets = await self.list(limit=1000, offset=0, filters={})
|
|
644
|
+
tasks = []
|
|
645
|
+
for ticket in all_tickets:
|
|
646
|
+
# Check if this ticket has parent_issue matching the issue
|
|
647
|
+
if hasattr(ticket, "parent_issue") and ticket.parent_issue == issue_id:
|
|
648
|
+
tasks.append(ticket)
|
|
649
|
+
return tasks
|
|
650
|
+
|
|
651
|
+
def _sanitize_filename(self, filename: str) -> str:
|
|
652
|
+
"""Sanitize filename to prevent security issues.
|
|
653
|
+
|
|
654
|
+
Args:
|
|
655
|
+
----
|
|
656
|
+
filename: Original filename
|
|
657
|
+
|
|
658
|
+
Returns:
|
|
659
|
+
-------
|
|
660
|
+
Sanitized filename safe for filesystem
|
|
661
|
+
|
|
662
|
+
"""
|
|
663
|
+
# Remove path separators and other dangerous characters
|
|
664
|
+
safe_chars = set(
|
|
665
|
+
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._- "
|
|
666
|
+
)
|
|
667
|
+
sanitized = "".join(c if c in safe_chars else "_" for c in filename)
|
|
668
|
+
|
|
669
|
+
# Ensure filename is not empty
|
|
670
|
+
if not sanitized.strip():
|
|
671
|
+
return "unnamed_file"
|
|
672
|
+
|
|
673
|
+
return sanitized.strip()
|
|
674
|
+
|
|
675
|
+
def _guess_content_type(self, file_path: Path) -> str:
|
|
676
|
+
"""Guess MIME type from file extension.
|
|
677
|
+
|
|
678
|
+
Args:
|
|
679
|
+
----
|
|
680
|
+
file_path: Path to file
|
|
681
|
+
|
|
682
|
+
Returns:
|
|
683
|
+
-------
|
|
684
|
+
MIME type string
|
|
685
|
+
|
|
686
|
+
"""
|
|
687
|
+
import mimetypes
|
|
688
|
+
|
|
689
|
+
content_type, _ = mimetypes.guess_type(str(file_path))
|
|
690
|
+
return content_type or "application/octet-stream"
|
|
691
|
+
|
|
692
|
+
def _calculate_checksum(self, file_path: Path) -> str:
|
|
693
|
+
"""Calculate SHA256 checksum of file.
|
|
694
|
+
|
|
695
|
+
Args:
|
|
696
|
+
----
|
|
697
|
+
file_path: Path to file
|
|
698
|
+
|
|
699
|
+
Returns:
|
|
700
|
+
-------
|
|
701
|
+
Hexadecimal checksum string
|
|
702
|
+
|
|
703
|
+
"""
|
|
704
|
+
import hashlib
|
|
705
|
+
|
|
706
|
+
sha256 = hashlib.sha256()
|
|
707
|
+
with open(file_path, "rb") as f:
|
|
708
|
+
# Read in chunks to handle large files
|
|
709
|
+
for chunk in iter(lambda: f.read(4096), b""):
|
|
710
|
+
sha256.update(chunk)
|
|
711
|
+
|
|
712
|
+
return sha256.hexdigest()
|
|
713
|
+
|
|
714
|
+
async def add_attachment(
|
|
715
|
+
self,
|
|
716
|
+
ticket_id: str,
|
|
717
|
+
file_path: str,
|
|
718
|
+
description: str | None = None,
|
|
719
|
+
) -> Attachment:
|
|
720
|
+
"""Attach a file to a ticket (local filesystem storage).
|
|
721
|
+
|
|
722
|
+
Args:
|
|
723
|
+
----
|
|
724
|
+
ticket_id: Ticket identifier
|
|
725
|
+
file_path: Local file path to attach
|
|
726
|
+
description: Optional attachment description
|
|
727
|
+
|
|
728
|
+
Returns:
|
|
729
|
+
-------
|
|
730
|
+
Attachment metadata
|
|
731
|
+
|
|
732
|
+
Raises:
|
|
733
|
+
------
|
|
734
|
+
ValueError: If ticket doesn't exist
|
|
735
|
+
FileNotFoundError: If file doesn't exist
|
|
736
|
+
|
|
737
|
+
"""
|
|
738
|
+
import shutil
|
|
739
|
+
|
|
740
|
+
# Validate ticket exists
|
|
741
|
+
ticket = await self.read(ticket_id)
|
|
742
|
+
if not ticket:
|
|
743
|
+
raise ValueError(f"Ticket {ticket_id} not found")
|
|
744
|
+
|
|
745
|
+
# Validate file exists
|
|
746
|
+
source_path = Path(file_path).resolve()
|
|
747
|
+
if not source_path.exists():
|
|
748
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
749
|
+
|
|
750
|
+
# Check file size (max 100MB for local storage)
|
|
751
|
+
size_mb = source_path.stat().st_size / (1024 * 1024)
|
|
752
|
+
if size_mb > 100:
|
|
753
|
+
raise ValueError(f"File too large: {size_mb:.2f}MB (max: 100MB)")
|
|
754
|
+
|
|
755
|
+
# Create attachments directory for this ticket
|
|
756
|
+
attachments_dir = self.base_path / "attachments" / ticket_id
|
|
757
|
+
attachments_dir.mkdir(parents=True, exist_ok=True)
|
|
758
|
+
|
|
759
|
+
# Generate unique filename with timestamp
|
|
760
|
+
timestamp = datetime.now().strftime("%Y%m%d%H%M%S%f")
|
|
761
|
+
safe_filename = self._sanitize_filename(source_path.name)
|
|
762
|
+
attachment_id = f"{timestamp}-{safe_filename}"
|
|
763
|
+
dest_path = attachments_dir / attachment_id
|
|
764
|
+
|
|
765
|
+
# Copy file to attachments directory
|
|
766
|
+
shutil.copy2(source_path, dest_path)
|
|
767
|
+
|
|
768
|
+
# Create attachment metadata
|
|
769
|
+
attachment = Attachment(
|
|
770
|
+
id=attachment_id,
|
|
771
|
+
ticket_id=ticket_id,
|
|
772
|
+
filename=source_path.name,
|
|
773
|
+
url=f"file://{dest_path.absolute()}",
|
|
774
|
+
content_type=self._guess_content_type(source_path),
|
|
775
|
+
size_bytes=source_path.stat().st_size,
|
|
776
|
+
created_at=datetime.now(),
|
|
777
|
+
description=description,
|
|
778
|
+
metadata={
|
|
779
|
+
"original_path": str(source_path),
|
|
780
|
+
"storage_path": str(dest_path),
|
|
781
|
+
"checksum": self._calculate_checksum(dest_path),
|
|
782
|
+
},
|
|
783
|
+
)
|
|
784
|
+
|
|
785
|
+
# Save metadata to JSON file
|
|
786
|
+
metadata_file = attachments_dir / f"{attachment_id}.json"
|
|
787
|
+
with open(metadata_file, "w") as f:
|
|
788
|
+
# Convert to dict and handle datetime serialization
|
|
789
|
+
data = attachment.model_dump()
|
|
790
|
+
json.dump(data, f, indent=2, default=str)
|
|
791
|
+
|
|
792
|
+
return attachment
|
|
793
|
+
|
|
794
|
+
async def get_attachments(self, ticket_id: str) -> builtins.list[Attachment]:
|
|
795
|
+
"""Get all attachments for a ticket with path traversal protection.
|
|
796
|
+
|
|
797
|
+
Args:
|
|
798
|
+
----
|
|
799
|
+
ticket_id: Ticket identifier
|
|
800
|
+
|
|
801
|
+
Returns:
|
|
802
|
+
-------
|
|
803
|
+
List of attachments (empty if none)
|
|
804
|
+
|
|
805
|
+
"""
|
|
806
|
+
# Resolve and validate attachments directory
|
|
807
|
+
attachments_dir = (self.base_path / "attachments" / ticket_id).resolve()
|
|
808
|
+
|
|
809
|
+
# CRITICAL SECURITY CHECK: Ensure ticket directory is within base attachments
|
|
810
|
+
base_attachments = (self.base_path / "attachments").resolve()
|
|
811
|
+
if not str(attachments_dir).startswith(str(base_attachments)):
|
|
812
|
+
raise ValueError("Invalid ticket_id: path traversal detected")
|
|
813
|
+
|
|
814
|
+
if not attachments_dir.exists():
|
|
815
|
+
return []
|
|
816
|
+
|
|
817
|
+
attachments = []
|
|
818
|
+
for metadata_file in attachments_dir.glob("*.json"):
|
|
819
|
+
try:
|
|
820
|
+
with open(metadata_file) as f:
|
|
821
|
+
data = json.load(f)
|
|
822
|
+
# Convert ISO datetime strings back to datetime objects
|
|
823
|
+
if isinstance(data.get("created_at"), str):
|
|
824
|
+
data["created_at"] = datetime.fromisoformat(
|
|
825
|
+
data["created_at"].replace("Z", "+00:00")
|
|
826
|
+
)
|
|
827
|
+
attachment = Attachment(**data)
|
|
828
|
+
attachments.append(attachment)
|
|
829
|
+
except (json.JSONDecodeError, ValueError) as e:
|
|
830
|
+
# Log error but continue processing other attachments
|
|
831
|
+
logger.warning(
|
|
832
|
+
"Failed to load attachment metadata from %s: %s",
|
|
833
|
+
metadata_file,
|
|
834
|
+
e,
|
|
835
|
+
)
|
|
836
|
+
continue
|
|
837
|
+
|
|
838
|
+
# Sort by creation time (newest first)
|
|
839
|
+
return sorted(
|
|
840
|
+
attachments,
|
|
841
|
+
key=lambda a: a.created_at or datetime.min,
|
|
842
|
+
reverse=True,
|
|
843
|
+
)
|
|
844
|
+
|
|
845
|
+
async def delete_attachment(
|
|
846
|
+
self,
|
|
847
|
+
ticket_id: str,
|
|
848
|
+
attachment_id: str,
|
|
849
|
+
) -> bool:
|
|
850
|
+
"""Delete an attachment and its metadata with path traversal protection.
|
|
851
|
+
|
|
852
|
+
Args:
|
|
853
|
+
----
|
|
854
|
+
ticket_id: Ticket identifier
|
|
855
|
+
attachment_id: Attachment identifier
|
|
856
|
+
|
|
857
|
+
Returns:
|
|
858
|
+
-------
|
|
859
|
+
True if deleted, False if not found
|
|
860
|
+
|
|
861
|
+
"""
|
|
862
|
+
# Resolve base directory
|
|
863
|
+
attachments_dir = (self.base_path / "attachments" / ticket_id).resolve()
|
|
864
|
+
|
|
865
|
+
# Validate attachments directory exists
|
|
866
|
+
if not attachments_dir.exists():
|
|
867
|
+
return False
|
|
868
|
+
|
|
869
|
+
# Resolve file paths
|
|
870
|
+
attachment_file = (attachments_dir / attachment_id).resolve()
|
|
871
|
+
metadata_file = (attachments_dir / f"{attachment_id}.json").resolve()
|
|
872
|
+
|
|
873
|
+
# CRITICAL SECURITY CHECK: Ensure paths are within attachments_dir
|
|
874
|
+
base_resolved = attachments_dir.resolve()
|
|
875
|
+
if not str(attachment_file).startswith(str(base_resolved)):
|
|
876
|
+
raise ValueError(
|
|
877
|
+
"Invalid attachment path: path traversal detected in attachment_id"
|
|
878
|
+
)
|
|
879
|
+
if not str(metadata_file).startswith(str(base_resolved)):
|
|
880
|
+
raise ValueError(
|
|
881
|
+
"Invalid attachment path: path traversal detected in attachment_id"
|
|
882
|
+
)
|
|
883
|
+
|
|
884
|
+
# Delete files if they exist
|
|
885
|
+
deleted = False
|
|
886
|
+
if attachment_file.exists():
|
|
887
|
+
attachment_file.unlink()
|
|
888
|
+
deleted = True
|
|
889
|
+
|
|
890
|
+
if metadata_file.exists():
|
|
891
|
+
metadata_file.unlink()
|
|
892
|
+
deleted = True
|
|
893
|
+
|
|
894
|
+
return deleted
|
|
895
|
+
|
|
896
|
+
async def update_epic(self, epic_id: str, updates: dict[str, Any]) -> Epic | None:
|
|
897
|
+
"""Update an epic (project) in AITrackdown.
|
|
898
|
+
|
|
899
|
+
Args:
|
|
900
|
+
----
|
|
901
|
+
epic_id: Epic identifier (filename without .json)
|
|
902
|
+
updates: Dictionary of fields to update. Supported fields:
|
|
903
|
+
- title: Epic title
|
|
904
|
+
- description: Epic description
|
|
905
|
+
- state: TicketState value
|
|
906
|
+
- priority: Priority value
|
|
907
|
+
- tags: List of tags
|
|
908
|
+
- target_date: Target completion date
|
|
909
|
+
- metadata: User metadata dictionary
|
|
910
|
+
|
|
911
|
+
Returns:
|
|
912
|
+
-------
|
|
913
|
+
Updated Epic object or None if epic not found
|
|
914
|
+
|
|
915
|
+
Raises:
|
|
916
|
+
------
|
|
917
|
+
ValueError: If epic_id is invalid or epic not found
|
|
918
|
+
|
|
919
|
+
Note:
|
|
920
|
+
----
|
|
921
|
+
AITrackdown stores epics as JSON files in {storage_path}/tickets/
|
|
922
|
+
Updates are applied as partial updates (only specified fields changed)
|
|
923
|
+
|
|
924
|
+
"""
|
|
925
|
+
# Validate epic_id
|
|
926
|
+
if not epic_id:
|
|
927
|
+
raise ValueError("epic_id is required")
|
|
928
|
+
|
|
929
|
+
# Read existing epic
|
|
930
|
+
existing = await self.read(epic_id)
|
|
931
|
+
if not existing:
|
|
932
|
+
logger.warning("Epic %s not found for update", epic_id)
|
|
933
|
+
return None
|
|
934
|
+
|
|
935
|
+
# Ensure it's an epic, not a task
|
|
936
|
+
if not isinstance(existing, Epic):
|
|
937
|
+
logger.warning("Ticket %s is not an epic", epic_id)
|
|
938
|
+
return None
|
|
939
|
+
|
|
940
|
+
# Apply updates to the existing epic
|
|
941
|
+
for key, value in updates.items():
|
|
942
|
+
if hasattr(existing, key) and value is not None:
|
|
943
|
+
setattr(existing, key, value)
|
|
944
|
+
|
|
945
|
+
# Update timestamp
|
|
946
|
+
existing.updated_at = datetime.now()
|
|
947
|
+
|
|
948
|
+
# Write back to file
|
|
949
|
+
ai_ticket = self._epic_to_ai_ticket(existing)
|
|
950
|
+
self._write_ticket_file(epic_id, ai_ticket)
|
|
951
|
+
|
|
952
|
+
logger.info("Updated epic %s with fields: %s", epic_id, list(updates.keys()))
|
|
953
|
+
return existing
|
|
954
|
+
|
|
955
|
+
async def list_labels(self, limit: int = 100) -> builtins.list[dict[str, Any]]:
|
|
956
|
+
"""List all tags (labels) used across tickets.
|
|
957
|
+
|
|
958
|
+
Args:
|
|
959
|
+
----
|
|
960
|
+
limit: Maximum number of labels to return (default: 100)
|
|
961
|
+
|
|
962
|
+
Returns:
|
|
963
|
+
-------
|
|
964
|
+
List of label dictionaries sorted by usage count (descending).
|
|
965
|
+
Each dictionary contains:
|
|
966
|
+
- id: Tag name (same as name in AITrackdown)
|
|
967
|
+
- name: Tag name
|
|
968
|
+
- count: Number of tickets using this tag
|
|
969
|
+
|
|
970
|
+
Note:
|
|
971
|
+
----
|
|
972
|
+
AITrackdown uses 'tags' terminology. This method scans
|
|
973
|
+
all task and epic files to extract unique tags.
|
|
974
|
+
|
|
975
|
+
"""
|
|
976
|
+
# Initialize tag counter
|
|
977
|
+
tag_counts: dict[str, int] = {}
|
|
978
|
+
|
|
979
|
+
# Scan all ticket JSON files
|
|
980
|
+
if self.tickets_dir.exists():
|
|
981
|
+
for ticket_file in self.tickets_dir.glob("*.json"):
|
|
982
|
+
try:
|
|
983
|
+
with open(ticket_file) as f:
|
|
984
|
+
ticket_data = json.load(f)
|
|
985
|
+
tags = ticket_data.get("tags", [])
|
|
986
|
+
for tag in tags:
|
|
987
|
+
tag_counts[tag] = tag_counts.get(tag, 0) + 1
|
|
988
|
+
except (json.JSONDecodeError, OSError) as e:
|
|
989
|
+
logger.warning("Failed to read ticket file %s: %s", ticket_file, e)
|
|
990
|
+
continue
|
|
991
|
+
|
|
992
|
+
# Sort by usage count (descending)
|
|
993
|
+
sorted_tags = sorted(tag_counts.items(), key=lambda x: x[1], reverse=True)
|
|
994
|
+
|
|
995
|
+
# Return top N tags with standardized format
|
|
996
|
+
return [
|
|
997
|
+
{"id": tag, "name": tag, "count": count}
|
|
998
|
+
for tag, count in sorted_tags[:limit]
|
|
999
|
+
]
|
|
1000
|
+
|
|
1001
|
+
async def create_issue_label(
|
|
1002
|
+
self, name: str, color: str | None = None
|
|
1003
|
+
) -> dict[str, Any]:
|
|
1004
|
+
"""Create/register a label (tag) in AITrackdown.
|
|
1005
|
+
|
|
1006
|
+
Args:
|
|
1007
|
+
----
|
|
1008
|
+
name: Label name (alphanumeric, hyphens, underscores allowed)
|
|
1009
|
+
color: Optional color (not used in file-based storage)
|
|
1010
|
+
|
|
1011
|
+
Returns:
|
|
1012
|
+
-------
|
|
1013
|
+
Label dictionary with:
|
|
1014
|
+
- id: Label name
|
|
1015
|
+
- name: Label name
|
|
1016
|
+
- color: Color value (if provided)
|
|
1017
|
+
- created: True (always, as tags are created on use)
|
|
1018
|
+
|
|
1019
|
+
Raises:
|
|
1020
|
+
------
|
|
1021
|
+
ValueError: If label name is invalid
|
|
1022
|
+
|
|
1023
|
+
Note:
|
|
1024
|
+
----
|
|
1025
|
+
AITrackdown creates tags implicitly when used on tickets.
|
|
1026
|
+
This method validates the tag name and returns success.
|
|
1027
|
+
Tags are stored as arrays in ticket JSON files.
|
|
1028
|
+
|
|
1029
|
+
"""
|
|
1030
|
+
# Validate tag name
|
|
1031
|
+
if not name:
|
|
1032
|
+
raise ValueError("Label name cannot be empty")
|
|
1033
|
+
|
|
1034
|
+
# Check for valid characters (alphanumeric, hyphens, underscores, spaces)
|
|
1035
|
+
import re
|
|
1036
|
+
|
|
1037
|
+
if not re.match(r"^[a-zA-Z0-9_\- ]+$", name):
|
|
1038
|
+
raise ValueError(
|
|
1039
|
+
"Label name must contain only alphanumeric characters, hyphens, underscores, or spaces"
|
|
1040
|
+
)
|
|
1041
|
+
|
|
1042
|
+
# Return success response
|
|
1043
|
+
logger.info("Label '%s' registered (created implicitly on use)", name)
|
|
1044
|
+
return {
|
|
1045
|
+
"id": name,
|
|
1046
|
+
"name": name,
|
|
1047
|
+
"color": color,
|
|
1048
|
+
"created": True,
|
|
1049
|
+
}
|
|
1050
|
+
|
|
1051
|
+
async def list_project_labels(
|
|
1052
|
+
self, epic_id: str, limit: int = 100
|
|
1053
|
+
) -> builtins.list[dict[str, Any]]:
|
|
1054
|
+
"""List labels (tags) used in a specific epic and its tasks.
|
|
1055
|
+
|
|
1056
|
+
Args:
|
|
1057
|
+
----
|
|
1058
|
+
epic_id: Epic identifier
|
|
1059
|
+
limit: Maximum number of labels to return (default: 100)
|
|
1060
|
+
|
|
1061
|
+
Returns:
|
|
1062
|
+
-------
|
|
1063
|
+
List of label dictionaries used in the epic, sorted by usage count.
|
|
1064
|
+
Each dictionary contains:
|
|
1065
|
+
- id: Tag name
|
|
1066
|
+
- name: Tag name
|
|
1067
|
+
- count: Number of tickets using this tag within the epic
|
|
1068
|
+
|
|
1069
|
+
Raises:
|
|
1070
|
+
------
|
|
1071
|
+
ValueError: If epic not found
|
|
1072
|
+
|
|
1073
|
+
Note:
|
|
1074
|
+
----
|
|
1075
|
+
Scans the epic and all tasks with parent_epic == epic_id.
|
|
1076
|
+
|
|
1077
|
+
"""
|
|
1078
|
+
# Validate epic exists
|
|
1079
|
+
epic = await self.get_epic(epic_id)
|
|
1080
|
+
if not epic:
|
|
1081
|
+
raise ValueError(f"Epic {epic_id} not found")
|
|
1082
|
+
|
|
1083
|
+
# Initialize tag counter
|
|
1084
|
+
tag_counts: dict[str, int] = {}
|
|
1085
|
+
|
|
1086
|
+
# Add tags from the epic itself
|
|
1087
|
+
if epic.tags:
|
|
1088
|
+
for tag in epic.tags:
|
|
1089
|
+
tag_counts[tag] = tag_counts.get(tag, 0) + 1
|
|
1090
|
+
|
|
1091
|
+
# Find all tasks with parent_epic == epic_id
|
|
1092
|
+
all_tasks = await self.list_issues_by_epic(epic_id)
|
|
1093
|
+
for task in all_tasks:
|
|
1094
|
+
if task.tags:
|
|
1095
|
+
for tag in task.tags:
|
|
1096
|
+
tag_counts[tag] = tag_counts.get(tag, 0) + 1
|
|
1097
|
+
|
|
1098
|
+
# Sort by usage count (descending)
|
|
1099
|
+
sorted_tags = sorted(tag_counts.items(), key=lambda x: x[1], reverse=True)
|
|
1100
|
+
|
|
1101
|
+
# Return top N tags
|
|
1102
|
+
return [
|
|
1103
|
+
{"id": tag, "name": tag, "count": count}
|
|
1104
|
+
for tag, count in sorted_tags[:limit]
|
|
1105
|
+
]
|
|
1106
|
+
|
|
1107
|
+
async def list_cycles(self, limit: int = 50) -> builtins.list[dict[str, Any]]:
|
|
1108
|
+
"""List cycles (sprints) - Not supported in file-based AITrackdown.
|
|
1109
|
+
|
|
1110
|
+
Args:
|
|
1111
|
+
----
|
|
1112
|
+
limit: Maximum number of cycles to return (unused)
|
|
1113
|
+
|
|
1114
|
+
Returns:
|
|
1115
|
+
-------
|
|
1116
|
+
Empty list (cycles not supported)
|
|
1117
|
+
|
|
1118
|
+
Note:
|
|
1119
|
+
----
|
|
1120
|
+
AITrackdown is a simple file-based system without
|
|
1121
|
+
cycle/sprint management. Returns empty list.
|
|
1122
|
+
|
|
1123
|
+
"""
|
|
1124
|
+
logger.info("list_cycles called but cycles not supported in AITrackdown")
|
|
1125
|
+
return []
|
|
1126
|
+
|
|
1127
|
+
async def get_issue_status(self, ticket_id: str) -> dict[str, Any] | None:
|
|
1128
|
+
"""Get status details for a ticket.
|
|
1129
|
+
|
|
1130
|
+
Args:
|
|
1131
|
+
----
|
|
1132
|
+
ticket_id: Ticket identifier
|
|
1133
|
+
|
|
1134
|
+
Returns:
|
|
1135
|
+
-------
|
|
1136
|
+
Status dictionary with:
|
|
1137
|
+
- id: Ticket ID
|
|
1138
|
+
- state: Current state
|
|
1139
|
+
- priority: Current priority
|
|
1140
|
+
- updated_at: Last update timestamp
|
|
1141
|
+
- created_at: Creation timestamp
|
|
1142
|
+
- title: Ticket title
|
|
1143
|
+
- assignee: Assignee (if Task, None for Epic)
|
|
1144
|
+
Returns None if ticket not found
|
|
1145
|
+
|
|
1146
|
+
Raises:
|
|
1147
|
+
------
|
|
1148
|
+
ValueError: If ticket_id is invalid
|
|
1149
|
+
|
|
1150
|
+
"""
|
|
1151
|
+
if not ticket_id:
|
|
1152
|
+
raise ValueError("ticket_id is required")
|
|
1153
|
+
|
|
1154
|
+
# Read ticket
|
|
1155
|
+
ticket = await self.read(ticket_id)
|
|
1156
|
+
if not ticket:
|
|
1157
|
+
logger.warning("Ticket %s not found", ticket_id)
|
|
1158
|
+
return None
|
|
1159
|
+
|
|
1160
|
+
# Return comprehensive status object
|
|
1161
|
+
status = {
|
|
1162
|
+
"id": ticket.id,
|
|
1163
|
+
"state": ticket.state,
|
|
1164
|
+
"priority": ticket.priority,
|
|
1165
|
+
"updated_at": ticket.updated_at.isoformat() if ticket.updated_at else None,
|
|
1166
|
+
"created_at": ticket.created_at.isoformat() if ticket.created_at else None,
|
|
1167
|
+
"title": ticket.title,
|
|
1168
|
+
}
|
|
1169
|
+
|
|
1170
|
+
# Add assignee only if ticket is a Task (Epic doesn't have assignee)
|
|
1171
|
+
if hasattr(ticket, "assignee"):
|
|
1172
|
+
status["assignee"] = ticket.assignee
|
|
1173
|
+
|
|
1174
|
+
return status
|
|
1175
|
+
|
|
1176
|
+
async def list_issue_statuses(self) -> builtins.list[dict[str, Any]]:
|
|
1177
|
+
"""List available ticket statuses.
|
|
1178
|
+
|
|
1179
|
+
Returns:
|
|
1180
|
+
-------
|
|
1181
|
+
List of status dictionaries with:
|
|
1182
|
+
- id: State identifier
|
|
1183
|
+
- name: Human-readable state name
|
|
1184
|
+
- description: State description
|
|
1185
|
+
|
|
1186
|
+
Note:
|
|
1187
|
+
----
|
|
1188
|
+
AITrackdown uses standard TicketState enum values:
|
|
1189
|
+
open, in_progress, ready, tested, done, closed, waiting, blocked
|
|
1190
|
+
|
|
1191
|
+
"""
|
|
1192
|
+
# Return hardcoded list of TicketState values
|
|
1193
|
+
statuses = [
|
|
1194
|
+
{
|
|
1195
|
+
"id": "open",
|
|
1196
|
+
"name": "Open",
|
|
1197
|
+
"description": "Ticket is created and ready to be worked on",
|
|
1198
|
+
},
|
|
1199
|
+
{
|
|
1200
|
+
"id": "in_progress",
|
|
1201
|
+
"name": "In Progress",
|
|
1202
|
+
"description": "Ticket is actively being worked on",
|
|
1203
|
+
},
|
|
1204
|
+
{
|
|
1205
|
+
"id": "ready",
|
|
1206
|
+
"name": "Ready",
|
|
1207
|
+
"description": "Ticket is ready for review or testing",
|
|
1208
|
+
},
|
|
1209
|
+
{
|
|
1210
|
+
"id": "tested",
|
|
1211
|
+
"name": "Tested",
|
|
1212
|
+
"description": "Ticket has been tested and verified",
|
|
1213
|
+
},
|
|
1214
|
+
{
|
|
1215
|
+
"id": "done",
|
|
1216
|
+
"name": "Done",
|
|
1217
|
+
"description": "Ticket work is completed",
|
|
1218
|
+
},
|
|
1219
|
+
{
|
|
1220
|
+
"id": "closed",
|
|
1221
|
+
"name": "Closed",
|
|
1222
|
+
"description": "Ticket is closed and archived",
|
|
1223
|
+
},
|
|
1224
|
+
{
|
|
1225
|
+
"id": "waiting",
|
|
1226
|
+
"name": "Waiting",
|
|
1227
|
+
"description": "Ticket is waiting for external dependency",
|
|
1228
|
+
},
|
|
1229
|
+
{
|
|
1230
|
+
"id": "blocked",
|
|
1231
|
+
"name": "Blocked",
|
|
1232
|
+
"description": "Ticket is blocked by an issue or dependency",
|
|
1233
|
+
},
|
|
1234
|
+
]
|
|
1235
|
+
return statuses
|
|
486
1236
|
|
|
487
1237
|
|
|
488
1238
|
# Register the adapter
|