mcp-ticketer 0.1.26__py3-none-any.whl → 0.1.28__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__version__.py +1 -1
- mcp_ticketer/adapters/aitrackdown.py +58 -0
- mcp_ticketer/adapters/hybrid.py +8 -8
- mcp_ticketer/adapters/linear.py +60 -0
- mcp_ticketer/cli/main.py +146 -4
- mcp_ticketer/core/config.py +28 -21
- mcp_ticketer/mcp/server.py +707 -3
- mcp_ticketer/queue/health_monitor.py +322 -0
- mcp_ticketer/queue/queue.py +147 -66
- mcp_ticketer/queue/ticket_registry.py +416 -0
- mcp_ticketer/queue/worker.py +102 -8
- {mcp_ticketer-0.1.26.dist-info → mcp_ticketer-0.1.28.dist-info}/METADATA +1 -1
- {mcp_ticketer-0.1.26.dist-info → mcp_ticketer-0.1.28.dist-info}/RECORD +17 -15
- {mcp_ticketer-0.1.26.dist-info → mcp_ticketer-0.1.28.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.1.26.dist-info → mcp_ticketer-0.1.28.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.1.26.dist-info → mcp_ticketer-0.1.28.dist-info}/licenses/LICENSE +0 -0
- {mcp_ticketer-0.1.26.dist-info → mcp_ticketer-0.1.28.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,416 @@
|
|
|
1
|
+
"""Ticket ID persistence and recovery system."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import sqlite3
|
|
5
|
+
import threading
|
|
6
|
+
from datetime import datetime, timedelta
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any, Dict, List, Optional
|
|
9
|
+
|
|
10
|
+
from .queue import QueueItem, QueueStatus
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class TicketRegistry:
|
|
14
|
+
"""Persistent registry for tracking ticket IDs and their lifecycle."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, db_path: Optional[Path] = None):
|
|
17
|
+
"""Initialize ticket registry.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
db_path: Path to SQLite database. Defaults to ~/.mcp-ticketer/tickets.db
|
|
21
|
+
"""
|
|
22
|
+
if db_path is None:
|
|
23
|
+
db_dir = Path.home() / ".mcp-ticketer"
|
|
24
|
+
db_dir.mkdir(parents=True, exist_ok=True)
|
|
25
|
+
db_path = db_dir / "tickets.db"
|
|
26
|
+
|
|
27
|
+
self.db_path = str(db_path)
|
|
28
|
+
self._lock = threading.Lock()
|
|
29
|
+
self._init_database()
|
|
30
|
+
|
|
31
|
+
def _init_database(self):
|
|
32
|
+
"""Initialize database schema."""
|
|
33
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
34
|
+
# Ticket registry table
|
|
35
|
+
conn.execute("""
|
|
36
|
+
CREATE TABLE IF NOT EXISTS ticket_registry (
|
|
37
|
+
queue_id TEXT PRIMARY KEY,
|
|
38
|
+
ticket_id TEXT,
|
|
39
|
+
adapter TEXT NOT NULL,
|
|
40
|
+
operation TEXT NOT NULL,
|
|
41
|
+
title TEXT,
|
|
42
|
+
status TEXT NOT NULL,
|
|
43
|
+
created_at TEXT NOT NULL,
|
|
44
|
+
updated_at TEXT NOT NULL,
|
|
45
|
+
ticket_data TEXT,
|
|
46
|
+
result_data TEXT,
|
|
47
|
+
error_message TEXT,
|
|
48
|
+
retry_count INTEGER DEFAULT 0,
|
|
49
|
+
CHECK (status IN ('queued', 'processing', 'completed', 'failed', 'recovered'))
|
|
50
|
+
)
|
|
51
|
+
""")
|
|
52
|
+
|
|
53
|
+
# Create indices
|
|
54
|
+
conn.execute("""
|
|
55
|
+
CREATE INDEX IF NOT EXISTS idx_ticket_registry_ticket_id
|
|
56
|
+
ON ticket_registry(ticket_id)
|
|
57
|
+
""")
|
|
58
|
+
conn.execute("""
|
|
59
|
+
CREATE INDEX IF NOT EXISTS idx_ticket_registry_status
|
|
60
|
+
ON ticket_registry(status)
|
|
61
|
+
""")
|
|
62
|
+
conn.execute("""
|
|
63
|
+
CREATE INDEX IF NOT EXISTS idx_ticket_registry_adapter
|
|
64
|
+
ON ticket_registry(adapter)
|
|
65
|
+
""")
|
|
66
|
+
|
|
67
|
+
# Ticket recovery log table
|
|
68
|
+
conn.execute("""
|
|
69
|
+
CREATE TABLE IF NOT EXISTS recovery_log (
|
|
70
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
71
|
+
queue_id TEXT NOT NULL,
|
|
72
|
+
recovery_type TEXT NOT NULL,
|
|
73
|
+
recovery_data TEXT,
|
|
74
|
+
timestamp TEXT NOT NULL,
|
|
75
|
+
success BOOLEAN NOT NULL
|
|
76
|
+
)
|
|
77
|
+
""")
|
|
78
|
+
|
|
79
|
+
def register_ticket_operation(
|
|
80
|
+
self,
|
|
81
|
+
queue_id: str,
|
|
82
|
+
adapter: str,
|
|
83
|
+
operation: str,
|
|
84
|
+
title: str,
|
|
85
|
+
ticket_data: Dict[str, Any]
|
|
86
|
+
) -> None:
|
|
87
|
+
"""Register a new ticket operation.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
queue_id: Queue operation ID
|
|
91
|
+
adapter: Adapter name
|
|
92
|
+
operation: Operation type (create, update, etc.)
|
|
93
|
+
title: Ticket title
|
|
94
|
+
ticket_data: Original ticket data
|
|
95
|
+
"""
|
|
96
|
+
with self._lock:
|
|
97
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
98
|
+
conn.execute("""
|
|
99
|
+
INSERT OR REPLACE INTO ticket_registry (
|
|
100
|
+
queue_id, adapter, operation, title, status,
|
|
101
|
+
created_at, updated_at, ticket_data, retry_count
|
|
102
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
103
|
+
""", (
|
|
104
|
+
queue_id,
|
|
105
|
+
adapter,
|
|
106
|
+
operation,
|
|
107
|
+
title,
|
|
108
|
+
"queued",
|
|
109
|
+
datetime.now().isoformat(),
|
|
110
|
+
datetime.now().isoformat(),
|
|
111
|
+
json.dumps(ticket_data),
|
|
112
|
+
0
|
|
113
|
+
))
|
|
114
|
+
conn.commit()
|
|
115
|
+
|
|
116
|
+
def update_ticket_status(
|
|
117
|
+
self,
|
|
118
|
+
queue_id: str,
|
|
119
|
+
status: str,
|
|
120
|
+
ticket_id: Optional[str] = None,
|
|
121
|
+
result_data: Optional[Dict[str, Any]] = None,
|
|
122
|
+
error_message: Optional[str] = None,
|
|
123
|
+
retry_count: Optional[int] = None
|
|
124
|
+
) -> None:
|
|
125
|
+
"""Update ticket operation status.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
queue_id: Queue operation ID
|
|
129
|
+
status: New status
|
|
130
|
+
ticket_id: Created ticket ID (if available)
|
|
131
|
+
result_data: Operation result data
|
|
132
|
+
error_message: Error message if failed
|
|
133
|
+
retry_count: Current retry count
|
|
134
|
+
"""
|
|
135
|
+
with self._lock:
|
|
136
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
137
|
+
update_fields = ["status = ?", "updated_at = ?"]
|
|
138
|
+
values = [status, datetime.now().isoformat()]
|
|
139
|
+
|
|
140
|
+
if ticket_id is not None:
|
|
141
|
+
update_fields.append("ticket_id = ?")
|
|
142
|
+
values.append(ticket_id)
|
|
143
|
+
|
|
144
|
+
if result_data is not None:
|
|
145
|
+
update_fields.append("result_data = ?")
|
|
146
|
+
values.append(json.dumps(result_data))
|
|
147
|
+
|
|
148
|
+
if error_message is not None:
|
|
149
|
+
update_fields.append("error_message = ?")
|
|
150
|
+
values.append(error_message)
|
|
151
|
+
|
|
152
|
+
if retry_count is not None:
|
|
153
|
+
update_fields.append("retry_count = ?")
|
|
154
|
+
values.append(retry_count)
|
|
155
|
+
|
|
156
|
+
values.append(queue_id)
|
|
157
|
+
|
|
158
|
+
conn.execute(f"""
|
|
159
|
+
UPDATE ticket_registry
|
|
160
|
+
SET {', '.join(update_fields)}
|
|
161
|
+
WHERE queue_id = ?
|
|
162
|
+
""", values)
|
|
163
|
+
conn.commit()
|
|
164
|
+
|
|
165
|
+
def get_ticket_info(self, queue_id: str) -> Optional[Dict[str, Any]]:
|
|
166
|
+
"""Get ticket information by queue ID.
|
|
167
|
+
|
|
168
|
+
Args:
|
|
169
|
+
queue_id: Queue operation ID
|
|
170
|
+
|
|
171
|
+
Returns:
|
|
172
|
+
Ticket information or None if not found
|
|
173
|
+
"""
|
|
174
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
175
|
+
cursor = conn.execute("""
|
|
176
|
+
SELECT * FROM ticket_registry WHERE queue_id = ?
|
|
177
|
+
""", (queue_id,))
|
|
178
|
+
|
|
179
|
+
row = cursor.fetchone()
|
|
180
|
+
if not row:
|
|
181
|
+
return None
|
|
182
|
+
|
|
183
|
+
columns = [desc[0] for desc in cursor.description]
|
|
184
|
+
ticket_info = dict(zip(columns, row))
|
|
185
|
+
|
|
186
|
+
# Parse JSON fields
|
|
187
|
+
if ticket_info.get("ticket_data"):
|
|
188
|
+
ticket_info["ticket_data"] = json.loads(ticket_info["ticket_data"])
|
|
189
|
+
if ticket_info.get("result_data"):
|
|
190
|
+
ticket_info["result_data"] = json.loads(ticket_info["result_data"])
|
|
191
|
+
|
|
192
|
+
return ticket_info
|
|
193
|
+
|
|
194
|
+
def find_tickets_by_id(self, ticket_id: str) -> List[Dict[str, Any]]:
|
|
195
|
+
"""Find all operations for a specific ticket ID.
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
ticket_id: Ticket ID to search for
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
List of ticket operations
|
|
202
|
+
"""
|
|
203
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
204
|
+
cursor = conn.execute("""
|
|
205
|
+
SELECT * FROM ticket_registry
|
|
206
|
+
WHERE ticket_id = ?
|
|
207
|
+
ORDER BY created_at DESC
|
|
208
|
+
""", (ticket_id,))
|
|
209
|
+
|
|
210
|
+
results = []
|
|
211
|
+
columns = [desc[0] for desc in cursor.description]
|
|
212
|
+
|
|
213
|
+
for row in cursor.fetchall():
|
|
214
|
+
ticket_info = dict(zip(columns, row))
|
|
215
|
+
|
|
216
|
+
# Parse JSON fields
|
|
217
|
+
if ticket_info.get("ticket_data"):
|
|
218
|
+
ticket_info["ticket_data"] = json.loads(ticket_info["ticket_data"])
|
|
219
|
+
if ticket_info.get("result_data"):
|
|
220
|
+
ticket_info["result_data"] = json.loads(ticket_info["result_data"])
|
|
221
|
+
|
|
222
|
+
results.append(ticket_info)
|
|
223
|
+
|
|
224
|
+
return results
|
|
225
|
+
|
|
226
|
+
def get_failed_operations(self, limit: int = 50) -> List[Dict[str, Any]]:
|
|
227
|
+
"""Get failed operations that might need recovery.
|
|
228
|
+
|
|
229
|
+
Args:
|
|
230
|
+
limit: Maximum number of operations to return
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
List of failed operations
|
|
234
|
+
"""
|
|
235
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
236
|
+
cursor = conn.execute("""
|
|
237
|
+
SELECT * FROM ticket_registry
|
|
238
|
+
WHERE status = 'failed'
|
|
239
|
+
ORDER BY updated_at DESC
|
|
240
|
+
LIMIT ?
|
|
241
|
+
""", (limit,))
|
|
242
|
+
|
|
243
|
+
results = []
|
|
244
|
+
columns = [desc[0] for desc in cursor.description]
|
|
245
|
+
|
|
246
|
+
for row in cursor.fetchall():
|
|
247
|
+
ticket_info = dict(zip(columns, row))
|
|
248
|
+
|
|
249
|
+
# Parse JSON fields
|
|
250
|
+
if ticket_info.get("ticket_data"):
|
|
251
|
+
ticket_info["ticket_data"] = json.loads(ticket_info["ticket_data"])
|
|
252
|
+
if ticket_info.get("result_data"):
|
|
253
|
+
ticket_info["result_data"] = json.loads(ticket_info["result_data"])
|
|
254
|
+
|
|
255
|
+
results.append(ticket_info)
|
|
256
|
+
|
|
257
|
+
return results
|
|
258
|
+
|
|
259
|
+
def get_orphaned_tickets(self) -> List[Dict[str, Any]]:
|
|
260
|
+
"""Get tickets that were created but queue operation failed.
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
List of potentially orphaned tickets
|
|
264
|
+
"""
|
|
265
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
266
|
+
cursor = conn.execute("""
|
|
267
|
+
SELECT * FROM ticket_registry
|
|
268
|
+
WHERE ticket_id IS NOT NULL
|
|
269
|
+
AND status IN ('processing', 'failed')
|
|
270
|
+
ORDER BY updated_at DESC
|
|
271
|
+
""")
|
|
272
|
+
|
|
273
|
+
results = []
|
|
274
|
+
columns = [desc[0] for desc in cursor.description]
|
|
275
|
+
|
|
276
|
+
for row in cursor.fetchall():
|
|
277
|
+
ticket_info = dict(zip(columns, row))
|
|
278
|
+
|
|
279
|
+
# Parse JSON fields
|
|
280
|
+
if ticket_info.get("ticket_data"):
|
|
281
|
+
ticket_info["ticket_data"] = json.loads(ticket_info["ticket_data"])
|
|
282
|
+
if ticket_info.get("result_data"):
|
|
283
|
+
ticket_info["result_data"] = json.loads(ticket_info["result_data"])
|
|
284
|
+
|
|
285
|
+
results.append(ticket_info)
|
|
286
|
+
|
|
287
|
+
return results
|
|
288
|
+
|
|
289
|
+
def attempt_recovery(self, queue_id: str, recovery_type: str) -> Dict[str, Any]:
|
|
290
|
+
"""Attempt to recover a failed operation.
|
|
291
|
+
|
|
292
|
+
Args:
|
|
293
|
+
queue_id: Queue operation ID to recover
|
|
294
|
+
recovery_type: Type of recovery to attempt
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
Recovery result
|
|
298
|
+
"""
|
|
299
|
+
ticket_info = self.get_ticket_info(queue_id)
|
|
300
|
+
if not ticket_info:
|
|
301
|
+
return {"success": False, "error": "Ticket operation not found"}
|
|
302
|
+
|
|
303
|
+
recovery_data = {
|
|
304
|
+
"original_status": ticket_info["status"],
|
|
305
|
+
"recovery_type": recovery_type,
|
|
306
|
+
"timestamp": datetime.now().isoformat()
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
try:
|
|
310
|
+
if recovery_type == "mark_completed":
|
|
311
|
+
# Mark as completed if ticket ID exists
|
|
312
|
+
if ticket_info.get("ticket_id"):
|
|
313
|
+
self.update_ticket_status(queue_id, "recovered",
|
|
314
|
+
result_data={"recovery": "marked_completed"})
|
|
315
|
+
recovery_data["success"] = True
|
|
316
|
+
recovery_data["action"] = "Marked as completed based on existing ticket ID"
|
|
317
|
+
else:
|
|
318
|
+
recovery_data["success"] = False
|
|
319
|
+
recovery_data["error"] = "No ticket ID available to mark as completed"
|
|
320
|
+
|
|
321
|
+
elif recovery_type == "retry_operation":
|
|
322
|
+
# Reset to queued status for retry
|
|
323
|
+
self.update_ticket_status(queue_id, "queued",
|
|
324
|
+
error_message=None,
|
|
325
|
+
retry_count=ticket_info.get("retry_count", 0))
|
|
326
|
+
recovery_data["success"] = True
|
|
327
|
+
recovery_data["action"] = "Reset to queued for retry"
|
|
328
|
+
|
|
329
|
+
else:
|
|
330
|
+
recovery_data["success"] = False
|
|
331
|
+
recovery_data["error"] = f"Unknown recovery type: {recovery_type}"
|
|
332
|
+
|
|
333
|
+
# Log recovery attempt
|
|
334
|
+
self._log_recovery(queue_id, recovery_type, recovery_data, recovery_data["success"])
|
|
335
|
+
|
|
336
|
+
return recovery_data
|
|
337
|
+
|
|
338
|
+
except Exception as e:
|
|
339
|
+
recovery_data["success"] = False
|
|
340
|
+
recovery_data["error"] = str(e)
|
|
341
|
+
self._log_recovery(queue_id, recovery_type, recovery_data, False)
|
|
342
|
+
return recovery_data
|
|
343
|
+
|
|
344
|
+
def _log_recovery(
|
|
345
|
+
self,
|
|
346
|
+
queue_id: str,
|
|
347
|
+
recovery_type: str,
|
|
348
|
+
recovery_data: Dict[str, Any],
|
|
349
|
+
success: bool
|
|
350
|
+
) -> None:
|
|
351
|
+
"""Log recovery attempt."""
|
|
352
|
+
with self._lock:
|
|
353
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
354
|
+
conn.execute("""
|
|
355
|
+
INSERT INTO recovery_log (
|
|
356
|
+
queue_id, recovery_type, recovery_data, timestamp, success
|
|
357
|
+
) VALUES (?, ?, ?, ?, ?)
|
|
358
|
+
""", (
|
|
359
|
+
queue_id,
|
|
360
|
+
recovery_type,
|
|
361
|
+
json.dumps(recovery_data),
|
|
362
|
+
datetime.now().isoformat(),
|
|
363
|
+
success
|
|
364
|
+
))
|
|
365
|
+
conn.commit()
|
|
366
|
+
|
|
367
|
+
def get_recovery_history(self, queue_id: str) -> List[Dict[str, Any]]:
|
|
368
|
+
"""Get recovery history for a queue operation.
|
|
369
|
+
|
|
370
|
+
Args:
|
|
371
|
+
queue_id: Queue operation ID
|
|
372
|
+
|
|
373
|
+
Returns:
|
|
374
|
+
List of recovery attempts
|
|
375
|
+
"""
|
|
376
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
377
|
+
cursor = conn.execute("""
|
|
378
|
+
SELECT * FROM recovery_log
|
|
379
|
+
WHERE queue_id = ?
|
|
380
|
+
ORDER BY timestamp DESC
|
|
381
|
+
""", (queue_id,))
|
|
382
|
+
|
|
383
|
+
results = []
|
|
384
|
+
columns = [desc[0] for desc in cursor.description]
|
|
385
|
+
|
|
386
|
+
for row in cursor.fetchall():
|
|
387
|
+
recovery_info = dict(zip(columns, row))
|
|
388
|
+
if recovery_info.get("recovery_data"):
|
|
389
|
+
recovery_info["recovery_data"] = json.loads(recovery_info["recovery_data"])
|
|
390
|
+
results.append(recovery_info)
|
|
391
|
+
|
|
392
|
+
return results
|
|
393
|
+
|
|
394
|
+
def cleanup_old_entries(self, days: int = 30) -> int:
|
|
395
|
+
"""Clean up old completed entries.
|
|
396
|
+
|
|
397
|
+
Args:
|
|
398
|
+
days: Remove entries older than this many days
|
|
399
|
+
|
|
400
|
+
Returns:
|
|
401
|
+
Number of entries removed
|
|
402
|
+
"""
|
|
403
|
+
cutoff_date = (datetime.now() - timedelta(days=days)).isoformat()
|
|
404
|
+
|
|
405
|
+
with self._lock:
|
|
406
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
407
|
+
cursor = conn.execute("""
|
|
408
|
+
DELETE FROM ticket_registry
|
|
409
|
+
WHERE status IN ('completed', 'recovered')
|
|
410
|
+
AND updated_at < ?
|
|
411
|
+
""", (cutoff_date,))
|
|
412
|
+
|
|
413
|
+
deleted_count = cursor.rowcount
|
|
414
|
+
conn.commit()
|
|
415
|
+
|
|
416
|
+
return deleted_count
|
mcp_ticketer/queue/worker.py
CHANGED
|
@@ -13,6 +13,10 @@ from dotenv import load_dotenv
|
|
|
13
13
|
|
|
14
14
|
from ..core import AdapterRegistry, Task
|
|
15
15
|
from .queue import Queue, QueueItem, QueueStatus
|
|
16
|
+
from .ticket_registry import TicketRegistry
|
|
17
|
+
|
|
18
|
+
# Import adapters module to trigger registration
|
|
19
|
+
import mcp_ticketer.adapters # noqa: F401
|
|
16
20
|
|
|
17
21
|
# Load environment variables from .env.local
|
|
18
22
|
env_path = Path.cwd() / ".env.local"
|
|
@@ -67,6 +71,7 @@ class Worker:
|
|
|
67
71
|
|
|
68
72
|
"""
|
|
69
73
|
self.queue = queue or Queue()
|
|
74
|
+
self.ticket_registry = TicketRegistry()
|
|
70
75
|
self.running = False
|
|
71
76
|
self.stop_event = threading.Event()
|
|
72
77
|
self.batch_size = batch_size
|
|
@@ -232,6 +237,13 @@ class Worker:
|
|
|
232
237
|
f"Processing queue item {item.id}: {item.operation} on {item.adapter}"
|
|
233
238
|
)
|
|
234
239
|
|
|
240
|
+
# Register operation start in ticket registry
|
|
241
|
+
title = item.ticket_data.get("title", "Unknown")
|
|
242
|
+
self.ticket_registry.register_ticket_operation(
|
|
243
|
+
item.id, item.adapter, item.operation, title, item.ticket_data
|
|
244
|
+
)
|
|
245
|
+
self.ticket_registry.update_ticket_status(item.id, "processing")
|
|
246
|
+
|
|
235
247
|
try:
|
|
236
248
|
# Check rate limit
|
|
237
249
|
await self._check_rate_limit(item.adapter)
|
|
@@ -244,14 +256,34 @@ class Worker:
|
|
|
244
256
|
# Process operation
|
|
245
257
|
result = await self._execute_operation(adapter, item)
|
|
246
258
|
|
|
247
|
-
#
|
|
248
|
-
|
|
259
|
+
# Extract ticket ID from result if available
|
|
260
|
+
ticket_id = None
|
|
261
|
+
if isinstance(result, dict):
|
|
262
|
+
ticket_id = result.get("id")
|
|
263
|
+
|
|
264
|
+
# Mark as completed in both queue and registry (atomic)
|
|
265
|
+
success = self.queue.update_status(
|
|
266
|
+
item.id, QueueStatus.COMPLETED, result=result,
|
|
267
|
+
expected_status=QueueStatus.PROCESSING
|
|
268
|
+
)
|
|
269
|
+
if success:
|
|
270
|
+
self.ticket_registry.update_ticket_status(
|
|
271
|
+
item.id, "completed", ticket_id=ticket_id, result_data=result
|
|
272
|
+
)
|
|
273
|
+
else:
|
|
274
|
+
logger.warning(f"Failed to update status for {item.id} - item may have been processed by another worker")
|
|
275
|
+
|
|
249
276
|
self.stats["items_processed"] += 1
|
|
250
|
-
logger.info(f"Successfully processed {item.id}")
|
|
277
|
+
logger.info(f"Successfully processed {item.id}, ticket ID: {ticket_id}")
|
|
251
278
|
|
|
252
279
|
except Exception as e:
|
|
253
280
|
logger.error(f"Error processing {item.id}: {e}")
|
|
254
281
|
|
|
282
|
+
# Update registry with error
|
|
283
|
+
self.ticket_registry.update_ticket_status(
|
|
284
|
+
item.id, "failed", error_message=str(e), retry_count=item.retry_count
|
|
285
|
+
)
|
|
286
|
+
|
|
255
287
|
# Check retry count
|
|
256
288
|
if item.retry_count < self.MAX_RETRIES:
|
|
257
289
|
# Retry with exponential backoff
|
|
@@ -260,16 +292,31 @@ class Worker:
|
|
|
260
292
|
f"Retrying {item.id} after {retry_delay}s (attempt {item.retry_count + 1}/{self.MAX_RETRIES})"
|
|
261
293
|
)
|
|
262
294
|
|
|
263
|
-
# Increment retry count and reset to pending
|
|
264
|
-
self.queue.increment_retry(
|
|
295
|
+
# Increment retry count and reset to pending (atomic)
|
|
296
|
+
new_retry_count = self.queue.increment_retry(
|
|
297
|
+
item.id, expected_status=QueueStatus.PROCESSING
|
|
298
|
+
)
|
|
299
|
+
if new_retry_count >= 0:
|
|
300
|
+
self.ticket_registry.update_ticket_status(
|
|
301
|
+
item.id, "queued", retry_count=new_retry_count
|
|
302
|
+
)
|
|
303
|
+
else:
|
|
304
|
+
logger.warning(f"Failed to increment retry for {item.id} - item may have been processed by another worker")
|
|
265
305
|
|
|
266
306
|
# Wait before retry
|
|
267
307
|
await asyncio.sleep(retry_delay)
|
|
268
308
|
else:
|
|
269
|
-
# Max retries exceeded, mark as failed
|
|
270
|
-
self.queue.update_status(
|
|
271
|
-
item.id, QueueStatus.FAILED, error_message=str(e)
|
|
309
|
+
# Max retries exceeded, mark as failed (atomic)
|
|
310
|
+
success = self.queue.update_status(
|
|
311
|
+
item.id, QueueStatus.FAILED, error_message=str(e),
|
|
312
|
+
expected_status=QueueStatus.PROCESSING
|
|
272
313
|
)
|
|
314
|
+
if success:
|
|
315
|
+
self.ticket_registry.update_ticket_status(
|
|
316
|
+
item.id, "failed", error_message=str(e), retry_count=item.retry_count
|
|
317
|
+
)
|
|
318
|
+
else:
|
|
319
|
+
logger.warning(f"Failed to mark {item.id} as failed - item may have been processed by another worker")
|
|
273
320
|
self.stats["items_failed"] += 1
|
|
274
321
|
logger.error(f"Max retries exceeded for {item.id}, marking as failed")
|
|
275
322
|
|
|
@@ -387,6 +434,53 @@ class Worker:
|
|
|
387
434
|
await adapter.add_comment(ticket_id, content)
|
|
388
435
|
return {"success": True}
|
|
389
436
|
|
|
437
|
+
# Hierarchy operations
|
|
438
|
+
elif operation == "create_epic":
|
|
439
|
+
result = await adapter.create_epic(
|
|
440
|
+
title=data["title"],
|
|
441
|
+
description=data.get("description"),
|
|
442
|
+
**{k: v for k, v in data.items()
|
|
443
|
+
if k not in ["title", "description"]}
|
|
444
|
+
)
|
|
445
|
+
return {
|
|
446
|
+
"id": result.id if result else None,
|
|
447
|
+
"title": result.title if result else None,
|
|
448
|
+
"type": "epic",
|
|
449
|
+
"success": bool(result)
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
elif operation == "create_issue":
|
|
453
|
+
result = await adapter.create_issue(
|
|
454
|
+
title=data["title"],
|
|
455
|
+
description=data.get("description"),
|
|
456
|
+
epic_id=data.get("epic_id"),
|
|
457
|
+
**{k: v for k, v in data.items()
|
|
458
|
+
if k not in ["title", "description", "epic_id"]}
|
|
459
|
+
)
|
|
460
|
+
return {
|
|
461
|
+
"id": result.id if result else None,
|
|
462
|
+
"title": result.title if result else None,
|
|
463
|
+
"type": "issue",
|
|
464
|
+
"epic_id": data.get("epic_id"),
|
|
465
|
+
"success": bool(result)
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
elif operation == "create_task":
|
|
469
|
+
result = await adapter.create_task(
|
|
470
|
+
title=data["title"],
|
|
471
|
+
parent_id=data["parent_id"],
|
|
472
|
+
description=data.get("description"),
|
|
473
|
+
**{k: v for k, v in data.items()
|
|
474
|
+
if k not in ["title", "parent_id", "description"]}
|
|
475
|
+
)
|
|
476
|
+
return {
|
|
477
|
+
"id": result.id if result else None,
|
|
478
|
+
"title": result.title if result else None,
|
|
479
|
+
"type": "task",
|
|
480
|
+
"parent_id": data["parent_id"],
|
|
481
|
+
"success": bool(result)
|
|
482
|
+
}
|
|
483
|
+
|
|
390
484
|
else:
|
|
391
485
|
raise ValueError(f"Unknown operation: {operation}")
|
|
392
486
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: mcp-ticketer
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.28
|
|
4
4
|
Summary: Universal ticket management interface for AI agents with MCP support
|
|
5
5
|
Author-email: MCP Ticketer Team <support@mcp-ticketer.io>
|
|
6
6
|
Maintainer-email: MCP Ticketer Team <support@mcp-ticketer.io>
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
mcp_ticketer/__init__.py,sha256=Xx4WaprO5PXhVPbYi1L6tBmwmJMkYS-lMyG4ieN6QP0,717
|
|
2
|
-
mcp_ticketer/__version__.py,sha256=
|
|
2
|
+
mcp_ticketer/__version__.py,sha256=w-yViL73x_vy42OtylKLgKgQWyIAAbOKsrNKa-cIlJg,1118
|
|
3
3
|
mcp_ticketer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
mcp_ticketer/adapters/__init__.py,sha256=B5DFllWn23hkhmrLykNO5uMMSdcFuuPHXyLw_jyFzuE,358
|
|
5
|
-
mcp_ticketer/adapters/aitrackdown.py,sha256=
|
|
5
|
+
mcp_ticketer/adapters/aitrackdown.py,sha256=stlbge8K6w-EyQkw_vEQNSXQgCOWN5tOlQUgGWZQNMQ,17936
|
|
6
6
|
mcp_ticketer/adapters/github.py,sha256=X0lEWBCfy-vztX2vauuVSYsOCa9_ezt9hGa5BsCQTu8,46663
|
|
7
|
-
mcp_ticketer/adapters/hybrid.py,sha256=
|
|
7
|
+
mcp_ticketer/adapters/hybrid.py,sha256=UADYZLc_UNw0xHPSbgguBNzvUCnuYn12Qi9ea-zdlMk,19086
|
|
8
8
|
mcp_ticketer/adapters/jira.py,sha256=W2pU-YxrSqgjm1gVt2eGc8We-G0MbRMSggQ2gWkThME,30602
|
|
9
|
-
mcp_ticketer/adapters/linear.py,sha256=
|
|
9
|
+
mcp_ticketer/adapters/linear.py,sha256=0eI8x8pv0isb7RTNmYczefUtaBFKPbOhGnnA4lrUjs8,73369
|
|
10
10
|
mcp_ticketer/cache/__init__.py,sha256=Xcd-cKnt-Cx7jBzvfzUUUPaGkmyXFi5XUFWw3Z4b7d4,138
|
|
11
11
|
mcp_ticketer/cache/memory.py,sha256=2yBqGi9i0SanlUhJoOC7nijWjoMa3_ntPe-V-AV-LfU,5042
|
|
12
12
|
mcp_ticketer/cli/__init__.py,sha256=l9Q8iKmfGkTu0cssHBVqNZTsL4eAtFzOB25AED_0G6g,89
|
|
@@ -15,14 +15,14 @@ mcp_ticketer/cli/codex_configure.py,sha256=xDppHouT6_-cYXswyAggoPX5bSlRXMvCoM_x9
|
|
|
15
15
|
mcp_ticketer/cli/configure.py,sha256=BsA_pSHQMQS0t1bJO_wMM8LWsd5sWJDASjEPRHvwC18,16198
|
|
16
16
|
mcp_ticketer/cli/discover.py,sha256=AF_qlQc1Oo0UkWayoF5pmRChS5J3fJjH6f2YZzd_k8w,13188
|
|
17
17
|
mcp_ticketer/cli/gemini_configure.py,sha256=ZNSA1lBW-itVToza-JxW95Po7daVXKiZAh7lp6pmXMU,9343
|
|
18
|
-
mcp_ticketer/cli/main.py,sha256=
|
|
18
|
+
mcp_ticketer/cli/main.py,sha256=hXPQyeQ9dv5Ry1XxSJZqmanw2KTgN912eXd1dkwd_os,53326
|
|
19
19
|
mcp_ticketer/cli/mcp_configure.py,sha256=RzV50UjXgOmvMp-9S0zS39psuvjffVByaMrqrUaAGAM,9594
|
|
20
20
|
mcp_ticketer/cli/migrate_config.py,sha256=MYsr_C5ZxsGg0P13etWTWNrJ_lc6ElRCkzfQADYr3DM,5956
|
|
21
21
|
mcp_ticketer/cli/queue_commands.py,sha256=mm-3H6jmkUGJDyU_E46o9iRpek8tvFCm77F19OtHiZI,7884
|
|
22
22
|
mcp_ticketer/cli/utils.py,sha256=2ptUrp2ELZsox0kSxAI5DFrHonOU999qh4MxbLv6VBQ,21155
|
|
23
23
|
mcp_ticketer/core/__init__.py,sha256=eXovsaJymQRP2AwOBuOy6mFtI3I68D7gGenZ5V-IMqo,349
|
|
24
24
|
mcp_ticketer/core/adapter.py,sha256=q64LxOInIno7EIbmuxItf8KEsd-g9grCs__Z4uwZHto,10273
|
|
25
|
-
mcp_ticketer/core/config.py,sha256=
|
|
25
|
+
mcp_ticketer/core/config.py,sha256=aC1MAV0nghmkB6BnAEP3aa4DwLob9q6U04eP5xmQx0Y,15181
|
|
26
26
|
mcp_ticketer/core/env_discovery.py,sha256=wKp2Pi5vQMGOTrM1690IBv_eoABly-pD8ah7n1zSWDc,17710
|
|
27
27
|
mcp_ticketer/core/http_client.py,sha256=s5ikMiwEJ8TJjNn73wu3gv3OdAtyBEpAqPnSroRMW2k,13971
|
|
28
28
|
mcp_ticketer/core/mappers.py,sha256=1aG1jFsHTCwmGRVgOlXW-VOSTGzc86gv7qjDfiR1ups,17462
|
|
@@ -30,16 +30,18 @@ mcp_ticketer/core/models.py,sha256=DRuJoYbjp9fcPV9GwQfhVcNUB0XmwQB3vuqW8hQWZ_k,6
|
|
|
30
30
|
mcp_ticketer/core/project_config.py,sha256=yYxlgxjcEPeOwx-b-SXFpe0k9pW9xzBRAK72PsItG-o,23346
|
|
31
31
|
mcp_ticketer/core/registry.py,sha256=ShYLDPE62KFJpB0kj_zFyQzRxSH3LkQEEuo1jaakb1k,3483
|
|
32
32
|
mcp_ticketer/mcp/__init__.py,sha256=Y05eTzsPk0wH8yKNIM-ekpGjgSDO0bQr0EME-vOP4GE,123
|
|
33
|
-
mcp_ticketer/mcp/server.py,sha256=
|
|
33
|
+
mcp_ticketer/mcp/server.py,sha256=PpENqLi9qdhxT1KTYrjkekT1LWP2mfTZY-PF6la1hs4,68078
|
|
34
34
|
mcp_ticketer/queue/__init__.py,sha256=1YIaCpZpFqPcqvDEQXiEvDLiw94DXRdCJkBaVIFQrms,231
|
|
35
35
|
mcp_ticketer/queue/__main__.py,sha256=gc_tE9NUdK07OJfTZuD4t6KeBD_vxFQIhknGTQUG_jk,109
|
|
36
|
+
mcp_ticketer/queue/health_monitor.py,sha256=aQrlBzfbLWu8-fV2b5CuHs4oqyTqGGcntKIHM3r-dDI,11844
|
|
36
37
|
mcp_ticketer/queue/manager.py,sha256=qqUqq_JtH8jfg-MDfc-UIgFaa7gYsA1eBaR2KsCw48c,7513
|
|
37
|
-
mcp_ticketer/queue/queue.py,sha256=
|
|
38
|
+
mcp_ticketer/queue/queue.py,sha256=jSAkYNEIbNH1cbYuF8s6eFuZmXqn8WHXx3mbfMU2Ud8,17131
|
|
38
39
|
mcp_ticketer/queue/run_worker.py,sha256=_IBezjvhbJJ7gn0evTBIMbSPjvfFZwxEdT-1DLo_bRk,799
|
|
39
|
-
mcp_ticketer/queue/
|
|
40
|
-
mcp_ticketer
|
|
41
|
-
mcp_ticketer-0.1.
|
|
42
|
-
mcp_ticketer-0.1.
|
|
43
|
-
mcp_ticketer-0.1.
|
|
44
|
-
mcp_ticketer-0.1.
|
|
45
|
-
mcp_ticketer-0.1.
|
|
40
|
+
mcp_ticketer/queue/ticket_registry.py,sha256=k8FYg2cFYsI4POb94-o-fTrIVr-ttfi60r0O5YhJYck,15321
|
|
41
|
+
mcp_ticketer/queue/worker.py,sha256=TLXXXTAQT1k9Oiw2WjSd8bzT3rr8TQ8NLt9JBovGQEA,18679
|
|
42
|
+
mcp_ticketer-0.1.28.dist-info/licenses/LICENSE,sha256=KOVrunjtILSzY-2N8Lqa3-Q8dMaZIG4LrlLTr9UqL08,1073
|
|
43
|
+
mcp_ticketer-0.1.28.dist-info/METADATA,sha256=fJ7LnYE7qITkq4JVxoYUmNZavf9K48EhBPVshcAUOQs,13191
|
|
44
|
+
mcp_ticketer-0.1.28.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
45
|
+
mcp_ticketer-0.1.28.dist-info/entry_points.txt,sha256=o1IxVhnHnBNG7FZzbFq-Whcs1Djbofs0qMjiUYBLx2s,60
|
|
46
|
+
mcp_ticketer-0.1.28.dist-info/top_level.txt,sha256=WnAG4SOT1Vm9tIwl70AbGG_nA217YyV3aWFhxLH2rxw,13
|
|
47
|
+
mcp_ticketer-0.1.28.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|