mcp-ticketer 0.2.0__py3-none-any.whl → 2.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_ticketer/__init__.py +10 -10
- mcp_ticketer/__version__.py +3 -3
- mcp_ticketer/_version_scm.py +1 -0
- mcp_ticketer/adapters/__init__.py +2 -0
- mcp_ticketer/adapters/aitrackdown.py +930 -52
- mcp_ticketer/adapters/asana/__init__.py +15 -0
- mcp_ticketer/adapters/asana/adapter.py +1537 -0
- mcp_ticketer/adapters/asana/client.py +292 -0
- mcp_ticketer/adapters/asana/mappers.py +348 -0
- mcp_ticketer/adapters/asana/types.py +146 -0
- mcp_ticketer/adapters/github/__init__.py +26 -0
- mcp_ticketer/adapters/github/adapter.py +3229 -0
- mcp_ticketer/adapters/github/client.py +335 -0
- mcp_ticketer/adapters/github/mappers.py +797 -0
- mcp_ticketer/adapters/github/queries.py +692 -0
- mcp_ticketer/adapters/github/types.py +460 -0
- mcp_ticketer/adapters/hybrid.py +58 -16
- mcp_ticketer/adapters/jira/__init__.py +35 -0
- mcp_ticketer/adapters/jira/adapter.py +1351 -0
- mcp_ticketer/adapters/jira/client.py +271 -0
- mcp_ticketer/adapters/jira/mappers.py +246 -0
- mcp_ticketer/adapters/jira/queries.py +216 -0
- mcp_ticketer/adapters/jira/types.py +304 -0
- mcp_ticketer/adapters/linear/__init__.py +1 -1
- mcp_ticketer/adapters/linear/adapter.py +3810 -462
- mcp_ticketer/adapters/linear/client.py +312 -69
- mcp_ticketer/adapters/linear/mappers.py +305 -85
- mcp_ticketer/adapters/linear/queries.py +317 -17
- mcp_ticketer/adapters/linear/types.py +187 -64
- mcp_ticketer/adapters/linear.py +2 -2
- mcp_ticketer/analysis/__init__.py +56 -0
- mcp_ticketer/analysis/dependency_graph.py +255 -0
- mcp_ticketer/analysis/health_assessment.py +304 -0
- mcp_ticketer/analysis/orphaned.py +218 -0
- mcp_ticketer/analysis/project_status.py +594 -0
- mcp_ticketer/analysis/similarity.py +224 -0
- mcp_ticketer/analysis/staleness.py +266 -0
- mcp_ticketer/automation/__init__.py +11 -0
- mcp_ticketer/automation/project_updates.py +378 -0
- mcp_ticketer/cache/memory.py +9 -8
- mcp_ticketer/cli/adapter_diagnostics.py +421 -0
- mcp_ticketer/cli/auggie_configure.py +116 -15
- mcp_ticketer/cli/codex_configure.py +274 -82
- mcp_ticketer/cli/configure.py +1323 -151
- mcp_ticketer/cli/cursor_configure.py +314 -0
- mcp_ticketer/cli/diagnostics.py +209 -114
- mcp_ticketer/cli/discover.py +297 -26
- mcp_ticketer/cli/gemini_configure.py +119 -26
- mcp_ticketer/cli/init_command.py +880 -0
- mcp_ticketer/cli/install_mcp_server.py +418 -0
- mcp_ticketer/cli/instruction_commands.py +435 -0
- mcp_ticketer/cli/linear_commands.py +256 -130
- mcp_ticketer/cli/main.py +140 -1284
- mcp_ticketer/cli/mcp_configure.py +1013 -100
- mcp_ticketer/cli/mcp_server_commands.py +415 -0
- mcp_ticketer/cli/migrate_config.py +12 -8
- mcp_ticketer/cli/platform_commands.py +123 -0
- mcp_ticketer/cli/platform_detection.py +477 -0
- mcp_ticketer/cli/platform_installer.py +545 -0
- mcp_ticketer/cli/project_update_commands.py +350 -0
- mcp_ticketer/cli/python_detection.py +126 -0
- mcp_ticketer/cli/queue_commands.py +15 -15
- mcp_ticketer/cli/setup_command.py +794 -0
- mcp_ticketer/cli/simple_health.py +84 -59
- mcp_ticketer/cli/ticket_commands.py +1375 -0
- mcp_ticketer/cli/update_checker.py +313 -0
- mcp_ticketer/cli/utils.py +195 -72
- mcp_ticketer/core/__init__.py +64 -1
- mcp_ticketer/core/adapter.py +618 -18
- mcp_ticketer/core/config.py +77 -68
- mcp_ticketer/core/env_discovery.py +75 -16
- mcp_ticketer/core/env_loader.py +121 -97
- mcp_ticketer/core/exceptions.py +32 -24
- mcp_ticketer/core/http_client.py +26 -26
- mcp_ticketer/core/instructions.py +405 -0
- mcp_ticketer/core/label_manager.py +732 -0
- mcp_ticketer/core/mappers.py +42 -30
- mcp_ticketer/core/milestone_manager.py +252 -0
- mcp_ticketer/core/models.py +566 -19
- mcp_ticketer/core/onepassword_secrets.py +379 -0
- mcp_ticketer/core/priority_matcher.py +463 -0
- mcp_ticketer/core/project_config.py +189 -49
- mcp_ticketer/core/project_utils.py +281 -0
- mcp_ticketer/core/project_validator.py +376 -0
- mcp_ticketer/core/registry.py +3 -3
- mcp_ticketer/core/session_state.py +176 -0
- mcp_ticketer/core/state_matcher.py +592 -0
- mcp_ticketer/core/url_parser.py +425 -0
- mcp_ticketer/core/validators.py +69 -0
- mcp_ticketer/defaults/ticket_instructions.md +644 -0
- mcp_ticketer/mcp/__init__.py +29 -1
- mcp_ticketer/mcp/__main__.py +60 -0
- mcp_ticketer/mcp/server/__init__.py +25 -0
- mcp_ticketer/mcp/server/__main__.py +60 -0
- mcp_ticketer/mcp/server/constants.py +58 -0
- mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
- mcp_ticketer/mcp/server/dto.py +195 -0
- mcp_ticketer/mcp/server/main.py +1343 -0
- mcp_ticketer/mcp/server/response_builder.py +206 -0
- mcp_ticketer/mcp/server/routing.py +723 -0
- mcp_ticketer/mcp/server/server_sdk.py +151 -0
- mcp_ticketer/mcp/server/tools/__init__.py +69 -0
- mcp_ticketer/mcp/server/tools/analysis_tools.py +854 -0
- mcp_ticketer/mcp/server/tools/attachment_tools.py +224 -0
- mcp_ticketer/mcp/server/tools/bulk_tools.py +330 -0
- mcp_ticketer/mcp/server/tools/comment_tools.py +152 -0
- mcp_ticketer/mcp/server/tools/config_tools.py +1564 -0
- mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
- mcp_ticketer/mcp/server/tools/hierarchy_tools.py +942 -0
- mcp_ticketer/mcp/server/tools/instruction_tools.py +295 -0
- mcp_ticketer/mcp/server/tools/label_tools.py +942 -0
- mcp_ticketer/mcp/server/tools/milestone_tools.py +338 -0
- mcp_ticketer/mcp/server/tools/pr_tools.py +150 -0
- mcp_ticketer/mcp/server/tools/project_status_tools.py +158 -0
- mcp_ticketer/mcp/server/tools/project_update_tools.py +473 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +318 -0
- mcp_ticketer/mcp/server/tools/session_tools.py +308 -0
- mcp_ticketer/mcp/server/tools/ticket_tools.py +1413 -0
- mcp_ticketer/mcp/server/tools/user_ticket_tools.py +364 -0
- mcp_ticketer/queue/__init__.py +1 -0
- mcp_ticketer/queue/health_monitor.py +168 -136
- mcp_ticketer/queue/manager.py +78 -63
- mcp_ticketer/queue/queue.py +108 -21
- mcp_ticketer/queue/run_worker.py +2 -2
- mcp_ticketer/queue/ticket_registry.py +213 -155
- mcp_ticketer/queue/worker.py +96 -58
- mcp_ticketer/utils/__init__.py +5 -0
- mcp_ticketer/utils/token_utils.py +246 -0
- mcp_ticketer-2.2.9.dist-info/METADATA +1396 -0
- mcp_ticketer-2.2.9.dist-info/RECORD +158 -0
- mcp_ticketer-2.2.9.dist-info/top_level.txt +2 -0
- py_mcp_installer/examples/phase3_demo.py +178 -0
- py_mcp_installer/scripts/manage_version.py +54 -0
- py_mcp_installer/setup.py +6 -0
- py_mcp_installer/src/py_mcp_installer/__init__.py +153 -0
- py_mcp_installer/src/py_mcp_installer/command_builder.py +445 -0
- py_mcp_installer/src/py_mcp_installer/config_manager.py +541 -0
- py_mcp_installer/src/py_mcp_installer/exceptions.py +243 -0
- py_mcp_installer/src/py_mcp_installer/installation_strategy.py +617 -0
- py_mcp_installer/src/py_mcp_installer/installer.py +656 -0
- py_mcp_installer/src/py_mcp_installer/mcp_inspector.py +750 -0
- py_mcp_installer/src/py_mcp_installer/platform_detector.py +451 -0
- py_mcp_installer/src/py_mcp_installer/platforms/__init__.py +26 -0
- py_mcp_installer/src/py_mcp_installer/platforms/claude_code.py +225 -0
- py_mcp_installer/src/py_mcp_installer/platforms/codex.py +181 -0
- py_mcp_installer/src/py_mcp_installer/platforms/cursor.py +191 -0
- py_mcp_installer/src/py_mcp_installer/types.py +222 -0
- py_mcp_installer/src/py_mcp_installer/utils.py +463 -0
- py_mcp_installer/tests/__init__.py +0 -0
- py_mcp_installer/tests/platforms/__init__.py +0 -0
- py_mcp_installer/tests/test_platform_detector.py +17 -0
- mcp_ticketer/adapters/github.py +0 -1354
- mcp_ticketer/adapters/jira.py +0 -1011
- mcp_ticketer/mcp/server.py +0 -1895
- mcp_ticketer-0.2.0.dist-info/METADATA +0 -414
- mcp_ticketer-0.2.0.dist-info/RECORD +0 -58
- mcp_ticketer-0.2.0.dist-info/top_level.txt +0 -1
- {mcp_ticketer-0.2.0.dist-info → mcp_ticketer-2.2.9.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.2.0.dist-info → mcp_ticketer-2.2.9.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.2.0.dist-info → mcp_ticketer-2.2.9.dist-info}/licenses/LICENSE +0 -0
mcp_ticketer/mcp/server.py
DELETED
|
@@ -1,1895 +0,0 @@
|
|
|
1
|
-
"""MCP JSON-RPC server for ticket management."""
|
|
2
|
-
|
|
3
|
-
import asyncio
|
|
4
|
-
import json
|
|
5
|
-
import sys
|
|
6
|
-
from pathlib import Path
|
|
7
|
-
from typing import Any, Optional
|
|
8
|
-
|
|
9
|
-
from dotenv import load_dotenv
|
|
10
|
-
|
|
11
|
-
from ..core import AdapterRegistry
|
|
12
|
-
from ..core.models import SearchQuery
|
|
13
|
-
from ..queue import Queue, QueueStatus, WorkerManager
|
|
14
|
-
from ..queue.health_monitor import QueueHealthMonitor, HealthStatus
|
|
15
|
-
|
|
16
|
-
# Import adapters module to trigger registration
|
|
17
|
-
import mcp_ticketer.adapters # noqa: F401
|
|
18
|
-
|
|
19
|
-
# Load environment variables early (prioritize .env.local)
|
|
20
|
-
# Check for .env.local first (takes precedence)
|
|
21
|
-
env_local_file = Path.cwd() / ".env.local"
|
|
22
|
-
if env_local_file.exists():
|
|
23
|
-
load_dotenv(env_local_file, override=True)
|
|
24
|
-
sys.stderr.write(f"[MCP Server] Loaded environment from: {env_local_file}\n")
|
|
25
|
-
else:
|
|
26
|
-
# Fall back to .env
|
|
27
|
-
env_file = Path.cwd() / ".env"
|
|
28
|
-
if env_file.exists():
|
|
29
|
-
load_dotenv(env_file, override=True)
|
|
30
|
-
sys.stderr.write(f"[MCP Server] Loaded environment from: {env_file}\n")
|
|
31
|
-
else:
|
|
32
|
-
# Try default dotenv loading (searches upward)
|
|
33
|
-
load_dotenv(override=True)
|
|
34
|
-
sys.stderr.write("[MCP Server] Loaded environment from default search path\n")
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
class MCPTicketServer:
|
|
38
|
-
"""MCP server for ticket operations over stdio."""
|
|
39
|
-
|
|
40
|
-
def __init__(
|
|
41
|
-
self, adapter_type: str = "aitrackdown", config: Optional[dict[str, Any]] = None
|
|
42
|
-
):
|
|
43
|
-
"""Initialize MCP server.
|
|
44
|
-
|
|
45
|
-
Args:
|
|
46
|
-
adapter_type: Type of adapter to use
|
|
47
|
-
config: Adapter configuration
|
|
48
|
-
|
|
49
|
-
"""
|
|
50
|
-
self.adapter = AdapterRegistry.get_adapter(
|
|
51
|
-
adapter_type, config or {"base_path": ".aitrackdown"}
|
|
52
|
-
)
|
|
53
|
-
self.running = False
|
|
54
|
-
|
|
55
|
-
async def handle_request(self, request: dict[str, Any]) -> dict[str, Any]:
|
|
56
|
-
"""Handle JSON-RPC request.
|
|
57
|
-
|
|
58
|
-
Args:
|
|
59
|
-
request: JSON-RPC request
|
|
60
|
-
|
|
61
|
-
Returns:
|
|
62
|
-
JSON-RPC response
|
|
63
|
-
|
|
64
|
-
"""
|
|
65
|
-
method = request.get("method")
|
|
66
|
-
params = request.get("params", {})
|
|
67
|
-
request_id = request.get("id")
|
|
68
|
-
|
|
69
|
-
try:
|
|
70
|
-
# Handle MCP protocol methods
|
|
71
|
-
if method == "initialize":
|
|
72
|
-
result = await self._handle_initialize(params)
|
|
73
|
-
# Route to ticket operation handlers
|
|
74
|
-
elif method == "ticket/create":
|
|
75
|
-
result = await self._handle_create(params)
|
|
76
|
-
elif method == "ticket/read":
|
|
77
|
-
result = await self._handle_read(params)
|
|
78
|
-
elif method == "ticket/update":
|
|
79
|
-
result = await self._handle_update(params)
|
|
80
|
-
elif method == "ticket/delete":
|
|
81
|
-
result = await self._handle_delete(params)
|
|
82
|
-
elif method == "ticket/list":
|
|
83
|
-
result = await self._handle_list(params)
|
|
84
|
-
elif method == "ticket/search":
|
|
85
|
-
result = await self._handle_search(params)
|
|
86
|
-
elif method == "ticket/transition":
|
|
87
|
-
result = await self._handle_transition(params)
|
|
88
|
-
elif method == "ticket/comment":
|
|
89
|
-
result = await self._handle_comment(params)
|
|
90
|
-
elif method == "ticket/status":
|
|
91
|
-
result = await self._handle_queue_status(params)
|
|
92
|
-
elif method == "ticket/create_pr":
|
|
93
|
-
result = await self._handle_create_pr(params)
|
|
94
|
-
elif method == "ticket/link_pr":
|
|
95
|
-
result = await self._handle_link_pr(params)
|
|
96
|
-
elif method == "queue/health":
|
|
97
|
-
result = await self._handle_queue_health(params)
|
|
98
|
-
# Hierarchy management tools
|
|
99
|
-
elif method == "epic/create":
|
|
100
|
-
result = await self._handle_epic_create(params)
|
|
101
|
-
elif method == "epic/list":
|
|
102
|
-
result = await self._handle_epic_list(params)
|
|
103
|
-
elif method == "epic/issues":
|
|
104
|
-
result = await self._handle_epic_issues(params)
|
|
105
|
-
elif method == "issue/create":
|
|
106
|
-
result = await self._handle_issue_create(params)
|
|
107
|
-
elif method == "issue/tasks":
|
|
108
|
-
result = await self._handle_issue_tasks(params)
|
|
109
|
-
elif method == "task/create":
|
|
110
|
-
result = await self._handle_task_create(params)
|
|
111
|
-
elif method == "hierarchy/tree":
|
|
112
|
-
result = await self._handle_hierarchy_tree(params)
|
|
113
|
-
# Bulk operations
|
|
114
|
-
elif method == "ticket/bulk_create":
|
|
115
|
-
result = await self._handle_bulk_create(params)
|
|
116
|
-
elif method == "ticket/bulk_update":
|
|
117
|
-
result = await self._handle_bulk_update(params)
|
|
118
|
-
# Advanced search
|
|
119
|
-
elif method == "ticket/search_hierarchy":
|
|
120
|
-
result = await self._handle_search_hierarchy(params)
|
|
121
|
-
# Attachment handling
|
|
122
|
-
elif method == "ticket/attach":
|
|
123
|
-
result = await self._handle_attach(params)
|
|
124
|
-
elif method == "ticket/attachments":
|
|
125
|
-
result = await self._handle_list_attachments(params)
|
|
126
|
-
elif method == "tools/list":
|
|
127
|
-
result = await self._handle_tools_list()
|
|
128
|
-
elif method == "tools/call":
|
|
129
|
-
result = await self._handle_tools_call(params)
|
|
130
|
-
else:
|
|
131
|
-
return self._error_response(
|
|
132
|
-
request_id, -32601, f"Method not found: {method}"
|
|
133
|
-
)
|
|
134
|
-
|
|
135
|
-
return {"jsonrpc": "2.0", "result": result, "id": request_id}
|
|
136
|
-
|
|
137
|
-
except Exception as e:
|
|
138
|
-
return self._error_response(request_id, -32603, f"Internal error: {str(e)}")
|
|
139
|
-
|
|
140
|
-
def _error_response(
|
|
141
|
-
self, request_id: Any, code: int, message: str
|
|
142
|
-
) -> dict[str, Any]:
|
|
143
|
-
"""Create error response.
|
|
144
|
-
|
|
145
|
-
Args:
|
|
146
|
-
request_id: Request ID
|
|
147
|
-
code: Error code
|
|
148
|
-
message: Error message
|
|
149
|
-
|
|
150
|
-
Returns:
|
|
151
|
-
Error response
|
|
152
|
-
|
|
153
|
-
"""
|
|
154
|
-
return {
|
|
155
|
-
"jsonrpc": "2.0",
|
|
156
|
-
"error": {"code": code, "message": message},
|
|
157
|
-
"id": request_id,
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
async def _handle_create(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
161
|
-
"""Handle ticket creation."""
|
|
162
|
-
# Check queue health before proceeding
|
|
163
|
-
health_monitor = QueueHealthMonitor()
|
|
164
|
-
health = health_monitor.check_health()
|
|
165
|
-
|
|
166
|
-
# If queue is in critical state, try auto-repair
|
|
167
|
-
if health["status"] == HealthStatus.CRITICAL:
|
|
168
|
-
repair_result = health_monitor.auto_repair()
|
|
169
|
-
# Re-check health after repair
|
|
170
|
-
health = health_monitor.check_health()
|
|
171
|
-
|
|
172
|
-
# If still critical, return error immediately
|
|
173
|
-
if health["status"] == HealthStatus.CRITICAL:
|
|
174
|
-
critical_alerts = [alert for alert in health["alerts"] if alert["level"] == "critical"]
|
|
175
|
-
return {
|
|
176
|
-
"status": "error",
|
|
177
|
-
"error": "Queue system is in critical state",
|
|
178
|
-
"details": {
|
|
179
|
-
"health_status": health["status"],
|
|
180
|
-
"critical_issues": critical_alerts,
|
|
181
|
-
"repair_attempted": repair_result["actions_taken"]
|
|
182
|
-
}
|
|
183
|
-
}
|
|
184
|
-
|
|
185
|
-
# Queue the operation
|
|
186
|
-
queue = Queue()
|
|
187
|
-
task_data = {
|
|
188
|
-
"title": params["title"],
|
|
189
|
-
"description": params.get("description"),
|
|
190
|
-
"priority": params.get("priority", "medium"),
|
|
191
|
-
"tags": params.get("tags", []),
|
|
192
|
-
"assignee": params.get("assignee"),
|
|
193
|
-
}
|
|
194
|
-
|
|
195
|
-
queue_id = queue.add(
|
|
196
|
-
ticket_data=task_data,
|
|
197
|
-
adapter=self.adapter.__class__.__name__.lower().replace("adapter", ""),
|
|
198
|
-
operation="create",
|
|
199
|
-
)
|
|
200
|
-
|
|
201
|
-
# Start worker if needed
|
|
202
|
-
manager = WorkerManager()
|
|
203
|
-
worker_started = manager.start_if_needed()
|
|
204
|
-
|
|
205
|
-
# If worker failed to start and we have pending items, that's critical
|
|
206
|
-
if not worker_started and queue.get_pending_count() > 0:
|
|
207
|
-
return {
|
|
208
|
-
"status": "error",
|
|
209
|
-
"error": "Failed to start worker process",
|
|
210
|
-
"queue_id": queue_id,
|
|
211
|
-
"details": {
|
|
212
|
-
"pending_count": queue.get_pending_count(),
|
|
213
|
-
"action": "Worker process could not be started to process queued operations"
|
|
214
|
-
}
|
|
215
|
-
}
|
|
216
|
-
|
|
217
|
-
# Check if async mode is requested (for backward compatibility)
|
|
218
|
-
if params.get("async_mode", False):
|
|
219
|
-
return {
|
|
220
|
-
"queue_id": queue_id,
|
|
221
|
-
"status": "queued",
|
|
222
|
-
"message": f"Ticket creation queued with ID: {queue_id}",
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
# Poll for completion with timeout (default synchronous behavior)
|
|
226
|
-
max_wait_time = params.get("timeout", 30) # seconds, allow override
|
|
227
|
-
poll_interval = 0.5 # seconds
|
|
228
|
-
start_time = asyncio.get_event_loop().time()
|
|
229
|
-
|
|
230
|
-
while True:
|
|
231
|
-
# Check queue status
|
|
232
|
-
item = queue.get_item(queue_id)
|
|
233
|
-
|
|
234
|
-
if not item:
|
|
235
|
-
return {
|
|
236
|
-
"queue_id": queue_id,
|
|
237
|
-
"status": "error",
|
|
238
|
-
"error": f"Queue item {queue_id} not found",
|
|
239
|
-
}
|
|
240
|
-
|
|
241
|
-
# If completed, return with ticket ID
|
|
242
|
-
if item.status == QueueStatus.COMPLETED:
|
|
243
|
-
response = {
|
|
244
|
-
"queue_id": queue_id,
|
|
245
|
-
"status": "completed",
|
|
246
|
-
"title": params["title"],
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
# Add ticket ID and other result data if available
|
|
250
|
-
if item.result:
|
|
251
|
-
response["ticket_id"] = item.result.get("id")
|
|
252
|
-
if "state" in item.result:
|
|
253
|
-
response["state"] = item.result["state"]
|
|
254
|
-
# Try to construct URL if we have enough information
|
|
255
|
-
if response.get("ticket_id"):
|
|
256
|
-
# This is adapter-specific, but we can add URL generation later
|
|
257
|
-
response["id"] = response[
|
|
258
|
-
"ticket_id"
|
|
259
|
-
] # Also include as "id" for compatibility
|
|
260
|
-
|
|
261
|
-
response["message"] = (
|
|
262
|
-
f"Ticket created successfully: {response.get('ticket_id', queue_id)}"
|
|
263
|
-
)
|
|
264
|
-
return response
|
|
265
|
-
|
|
266
|
-
# If failed, return error
|
|
267
|
-
if item.status == QueueStatus.FAILED:
|
|
268
|
-
return {
|
|
269
|
-
"queue_id": queue_id,
|
|
270
|
-
"status": "failed",
|
|
271
|
-
"error": item.error_message or "Ticket creation failed",
|
|
272
|
-
"title": params["title"],
|
|
273
|
-
}
|
|
274
|
-
|
|
275
|
-
# Check timeout
|
|
276
|
-
elapsed = asyncio.get_event_loop().time() - start_time
|
|
277
|
-
if elapsed > max_wait_time:
|
|
278
|
-
return {
|
|
279
|
-
"queue_id": queue_id,
|
|
280
|
-
"status": "timeout",
|
|
281
|
-
"message": f"Ticket creation timed out after {max_wait_time} seconds. Use ticket_status with queue_id to check status.",
|
|
282
|
-
"title": params["title"],
|
|
283
|
-
}
|
|
284
|
-
|
|
285
|
-
# Wait before next poll
|
|
286
|
-
await asyncio.sleep(poll_interval)
|
|
287
|
-
|
|
288
|
-
async def _handle_read(self, params: dict[str, Any]) -> Optional[dict[str, Any]]:
|
|
289
|
-
"""Handle ticket read."""
|
|
290
|
-
ticket = await self.adapter.read(params["ticket_id"])
|
|
291
|
-
return ticket.model_dump() if ticket else None
|
|
292
|
-
|
|
293
|
-
async def _handle_update(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
294
|
-
"""Handle ticket update."""
|
|
295
|
-
# Queue the operation
|
|
296
|
-
queue = Queue()
|
|
297
|
-
updates = params.get("updates", {})
|
|
298
|
-
updates["ticket_id"] = params["ticket_id"]
|
|
299
|
-
|
|
300
|
-
queue_id = queue.add(
|
|
301
|
-
ticket_data=updates,
|
|
302
|
-
adapter=self.adapter.__class__.__name__.lower().replace("adapter", ""),
|
|
303
|
-
operation="update",
|
|
304
|
-
)
|
|
305
|
-
|
|
306
|
-
# Start worker if needed
|
|
307
|
-
manager = WorkerManager()
|
|
308
|
-
manager.start_if_needed()
|
|
309
|
-
|
|
310
|
-
# Poll for completion with timeout
|
|
311
|
-
max_wait_time = 30 # seconds
|
|
312
|
-
poll_interval = 0.5 # seconds
|
|
313
|
-
start_time = asyncio.get_event_loop().time()
|
|
314
|
-
|
|
315
|
-
while True:
|
|
316
|
-
# Check queue status
|
|
317
|
-
item = queue.get_item(queue_id)
|
|
318
|
-
|
|
319
|
-
if not item:
|
|
320
|
-
return {
|
|
321
|
-
"queue_id": queue_id,
|
|
322
|
-
"status": "error",
|
|
323
|
-
"error": f"Queue item {queue_id} not found",
|
|
324
|
-
}
|
|
325
|
-
|
|
326
|
-
# If completed, return with ticket ID
|
|
327
|
-
if item.status == QueueStatus.COMPLETED:
|
|
328
|
-
response = {
|
|
329
|
-
"queue_id": queue_id,
|
|
330
|
-
"status": "completed",
|
|
331
|
-
"ticket_id": params["ticket_id"],
|
|
332
|
-
}
|
|
333
|
-
|
|
334
|
-
# Add result data if available
|
|
335
|
-
if item.result:
|
|
336
|
-
if item.result.get("id"):
|
|
337
|
-
response["ticket_id"] = item.result["id"]
|
|
338
|
-
response["success"] = item.result.get("success", True)
|
|
339
|
-
|
|
340
|
-
response["message"] = (
|
|
341
|
-
f"Ticket updated successfully: {response['ticket_id']}"
|
|
342
|
-
)
|
|
343
|
-
return response
|
|
344
|
-
|
|
345
|
-
# If failed, return error
|
|
346
|
-
if item.status == QueueStatus.FAILED:
|
|
347
|
-
return {
|
|
348
|
-
"queue_id": queue_id,
|
|
349
|
-
"status": "failed",
|
|
350
|
-
"error": item.error_message or "Ticket update failed",
|
|
351
|
-
"ticket_id": params["ticket_id"],
|
|
352
|
-
}
|
|
353
|
-
|
|
354
|
-
# Check timeout
|
|
355
|
-
elapsed = asyncio.get_event_loop().time() - start_time
|
|
356
|
-
if elapsed > max_wait_time:
|
|
357
|
-
return {
|
|
358
|
-
"queue_id": queue_id,
|
|
359
|
-
"status": "timeout",
|
|
360
|
-
"message": f"Ticket update timed out after {max_wait_time} seconds. Use ticket_status with queue_id to check status.",
|
|
361
|
-
"ticket_id": params["ticket_id"],
|
|
362
|
-
}
|
|
363
|
-
|
|
364
|
-
# Wait before next poll
|
|
365
|
-
await asyncio.sleep(poll_interval)
|
|
366
|
-
|
|
367
|
-
async def _handle_delete(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
368
|
-
"""Handle ticket deletion."""
|
|
369
|
-
# Queue the operation
|
|
370
|
-
queue = Queue()
|
|
371
|
-
queue_id = queue.add(
|
|
372
|
-
ticket_data={"ticket_id": params["ticket_id"]},
|
|
373
|
-
adapter=self.adapter.__class__.__name__.lower().replace("adapter", ""),
|
|
374
|
-
operation="delete",
|
|
375
|
-
)
|
|
376
|
-
|
|
377
|
-
# Start worker if needed
|
|
378
|
-
manager = WorkerManager()
|
|
379
|
-
manager.start_if_needed()
|
|
380
|
-
|
|
381
|
-
return {
|
|
382
|
-
"queue_id": queue_id,
|
|
383
|
-
"status": "queued",
|
|
384
|
-
"message": f"Ticket deletion queued with ID: {queue_id}",
|
|
385
|
-
}
|
|
386
|
-
|
|
387
|
-
async def _handle_list(self, params: dict[str, Any]) -> list[dict[str, Any]]:
|
|
388
|
-
"""Handle ticket listing."""
|
|
389
|
-
tickets = await self.adapter.list(
|
|
390
|
-
limit=params.get("limit", 10),
|
|
391
|
-
offset=params.get("offset", 0),
|
|
392
|
-
filters=params.get("filters"),
|
|
393
|
-
)
|
|
394
|
-
return [ticket.model_dump() for ticket in tickets]
|
|
395
|
-
|
|
396
|
-
async def _handle_search(self, params: dict[str, Any]) -> list[dict[str, Any]]:
|
|
397
|
-
"""Handle ticket search."""
|
|
398
|
-
query = SearchQuery(**params)
|
|
399
|
-
tickets = await self.adapter.search(query)
|
|
400
|
-
return [ticket.model_dump() for ticket in tickets]
|
|
401
|
-
|
|
402
|
-
async def _handle_transition(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
403
|
-
"""Handle state transition."""
|
|
404
|
-
# Queue the operation
|
|
405
|
-
queue = Queue()
|
|
406
|
-
queue_id = queue.add(
|
|
407
|
-
ticket_data={
|
|
408
|
-
"ticket_id": params["ticket_id"],
|
|
409
|
-
"state": params["target_state"],
|
|
410
|
-
},
|
|
411
|
-
adapter=self.adapter.__class__.__name__.lower().replace("adapter", ""),
|
|
412
|
-
operation="transition",
|
|
413
|
-
)
|
|
414
|
-
|
|
415
|
-
# Start worker if needed
|
|
416
|
-
manager = WorkerManager()
|
|
417
|
-
manager.start_if_needed()
|
|
418
|
-
|
|
419
|
-
# Poll for completion with timeout
|
|
420
|
-
max_wait_time = 30 # seconds
|
|
421
|
-
poll_interval = 0.5 # seconds
|
|
422
|
-
start_time = asyncio.get_event_loop().time()
|
|
423
|
-
|
|
424
|
-
while True:
|
|
425
|
-
# Check queue status
|
|
426
|
-
item = queue.get_item(queue_id)
|
|
427
|
-
|
|
428
|
-
if not item:
|
|
429
|
-
return {
|
|
430
|
-
"queue_id": queue_id,
|
|
431
|
-
"status": "error",
|
|
432
|
-
"error": f"Queue item {queue_id} not found",
|
|
433
|
-
}
|
|
434
|
-
|
|
435
|
-
# If completed, return with ticket ID
|
|
436
|
-
if item.status == QueueStatus.COMPLETED:
|
|
437
|
-
response = {
|
|
438
|
-
"queue_id": queue_id,
|
|
439
|
-
"status": "completed",
|
|
440
|
-
"ticket_id": params["ticket_id"],
|
|
441
|
-
"state": params["target_state"],
|
|
442
|
-
}
|
|
443
|
-
|
|
444
|
-
# Add result data if available
|
|
445
|
-
if item.result:
|
|
446
|
-
if item.result.get("id"):
|
|
447
|
-
response["ticket_id"] = item.result["id"]
|
|
448
|
-
response["success"] = item.result.get("success", True)
|
|
449
|
-
|
|
450
|
-
response["message"] = (
|
|
451
|
-
f"State transition completed successfully: {response['ticket_id']} → {params['target_state']}"
|
|
452
|
-
)
|
|
453
|
-
return response
|
|
454
|
-
|
|
455
|
-
# If failed, return error
|
|
456
|
-
if item.status == QueueStatus.FAILED:
|
|
457
|
-
return {
|
|
458
|
-
"queue_id": queue_id,
|
|
459
|
-
"status": "failed",
|
|
460
|
-
"error": item.error_message or "State transition failed",
|
|
461
|
-
"ticket_id": params["ticket_id"],
|
|
462
|
-
}
|
|
463
|
-
|
|
464
|
-
# Check timeout
|
|
465
|
-
elapsed = asyncio.get_event_loop().time() - start_time
|
|
466
|
-
if elapsed > max_wait_time:
|
|
467
|
-
return {
|
|
468
|
-
"queue_id": queue_id,
|
|
469
|
-
"status": "timeout",
|
|
470
|
-
"message": f"State transition timed out after {max_wait_time} seconds. Use ticket_status with queue_id to check status.",
|
|
471
|
-
"ticket_id": params["ticket_id"],
|
|
472
|
-
}
|
|
473
|
-
|
|
474
|
-
# Wait before next poll
|
|
475
|
-
await asyncio.sleep(poll_interval)
|
|
476
|
-
|
|
477
|
-
async def _handle_comment(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
478
|
-
"""Handle comment operations."""
|
|
479
|
-
operation = params.get("operation", "add")
|
|
480
|
-
|
|
481
|
-
if operation == "add":
|
|
482
|
-
# Queue the comment addition
|
|
483
|
-
queue = Queue()
|
|
484
|
-
queue_id = queue.add(
|
|
485
|
-
ticket_data={
|
|
486
|
-
"ticket_id": params["ticket_id"],
|
|
487
|
-
"content": params["content"],
|
|
488
|
-
"author": params.get("author"),
|
|
489
|
-
},
|
|
490
|
-
adapter=self.adapter.__class__.__name__.lower().replace("adapter", ""),
|
|
491
|
-
operation="comment",
|
|
492
|
-
)
|
|
493
|
-
|
|
494
|
-
# Start worker if needed
|
|
495
|
-
manager = WorkerManager()
|
|
496
|
-
manager.start_if_needed()
|
|
497
|
-
|
|
498
|
-
return {
|
|
499
|
-
"queue_id": queue_id,
|
|
500
|
-
"status": "queued",
|
|
501
|
-
"message": f"Comment addition queued with ID: {queue_id}",
|
|
502
|
-
}
|
|
503
|
-
|
|
504
|
-
elif operation == "list":
|
|
505
|
-
# Comments list is read-only, execute directly
|
|
506
|
-
comments = await self.adapter.get_comments(
|
|
507
|
-
params["ticket_id"],
|
|
508
|
-
limit=params.get("limit", 10),
|
|
509
|
-
offset=params.get("offset", 0),
|
|
510
|
-
)
|
|
511
|
-
return [comment.model_dump() for comment in comments]
|
|
512
|
-
|
|
513
|
-
else:
|
|
514
|
-
raise ValueError(f"Unknown comment operation: {operation}")
|
|
515
|
-
|
|
516
|
-
async def _handle_queue_status(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
517
|
-
"""Check status of queued operation."""
|
|
518
|
-
queue_id = params.get("queue_id")
|
|
519
|
-
if not queue_id:
|
|
520
|
-
raise ValueError("queue_id is required")
|
|
521
|
-
|
|
522
|
-
queue = Queue()
|
|
523
|
-
item = queue.get_item(queue_id)
|
|
524
|
-
|
|
525
|
-
if not item:
|
|
526
|
-
return {"error": f"Queue item not found: {queue_id}"}
|
|
527
|
-
|
|
528
|
-
response = {
|
|
529
|
-
"queue_id": item.id,
|
|
530
|
-
"status": item.status.value,
|
|
531
|
-
"operation": item.operation,
|
|
532
|
-
"created_at": item.created_at.isoformat(),
|
|
533
|
-
"retry_count": item.retry_count,
|
|
534
|
-
}
|
|
535
|
-
|
|
536
|
-
if item.processed_at:
|
|
537
|
-
response["processed_at"] = item.processed_at.isoformat()
|
|
538
|
-
|
|
539
|
-
if item.error_message:
|
|
540
|
-
response["error"] = item.error_message
|
|
541
|
-
|
|
542
|
-
if item.result:
|
|
543
|
-
response["result"] = item.result
|
|
544
|
-
|
|
545
|
-
return response
|
|
546
|
-
|
|
547
|
-
async def _handle_queue_health(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
548
|
-
"""Handle queue health check."""
|
|
549
|
-
health_monitor = QueueHealthMonitor()
|
|
550
|
-
health = health_monitor.check_health()
|
|
551
|
-
|
|
552
|
-
# Add auto-repair option
|
|
553
|
-
auto_repair = params.get("auto_repair", False)
|
|
554
|
-
if auto_repair and health["status"] in [HealthStatus.CRITICAL, HealthStatus.WARNING]:
|
|
555
|
-
repair_result = health_monitor.auto_repair()
|
|
556
|
-
health["auto_repair"] = repair_result
|
|
557
|
-
# Re-check health after repair
|
|
558
|
-
health.update(health_monitor.check_health())
|
|
559
|
-
|
|
560
|
-
return health
|
|
561
|
-
|
|
562
|
-
# Hierarchy Management Handlers
|
|
563
|
-
|
|
564
|
-
async def _handle_epic_create(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
565
|
-
"""Handle epic creation."""
|
|
566
|
-
# Check queue health before proceeding
|
|
567
|
-
health_monitor = QueueHealthMonitor()
|
|
568
|
-
health = health_monitor.check_health()
|
|
569
|
-
|
|
570
|
-
if health["status"] == HealthStatus.CRITICAL:
|
|
571
|
-
repair_result = health_monitor.auto_repair()
|
|
572
|
-
health = health_monitor.check_health()
|
|
573
|
-
|
|
574
|
-
if health["status"] == HealthStatus.CRITICAL:
|
|
575
|
-
critical_alerts = [alert for alert in health["alerts"] if alert["level"] == "critical"]
|
|
576
|
-
return {
|
|
577
|
-
"status": "error",
|
|
578
|
-
"error": "Queue system is in critical state",
|
|
579
|
-
"details": {
|
|
580
|
-
"health_status": health["status"],
|
|
581
|
-
"critical_issues": critical_alerts,
|
|
582
|
-
"repair_attempted": repair_result["actions_taken"]
|
|
583
|
-
}
|
|
584
|
-
}
|
|
585
|
-
|
|
586
|
-
# Queue the epic creation
|
|
587
|
-
queue = Queue()
|
|
588
|
-
epic_data = {
|
|
589
|
-
"title": params["title"],
|
|
590
|
-
"description": params.get("description"),
|
|
591
|
-
"child_issues": params.get("child_issues", []),
|
|
592
|
-
"target_date": params.get("target_date"),
|
|
593
|
-
"lead_id": params.get("lead_id"),
|
|
594
|
-
}
|
|
595
|
-
|
|
596
|
-
queue_id = queue.add(
|
|
597
|
-
ticket_data=epic_data,
|
|
598
|
-
adapter=self.adapter.__class__.__name__.lower().replace("adapter", ""),
|
|
599
|
-
operation="create_epic",
|
|
600
|
-
)
|
|
601
|
-
|
|
602
|
-
# Start worker if needed
|
|
603
|
-
manager = WorkerManager()
|
|
604
|
-
worker_started = manager.start_if_needed()
|
|
605
|
-
|
|
606
|
-
if not worker_started and queue.get_pending_count() > 0:
|
|
607
|
-
return {
|
|
608
|
-
"status": "error",
|
|
609
|
-
"error": "Failed to start worker process",
|
|
610
|
-
"queue_id": queue_id,
|
|
611
|
-
"details": {
|
|
612
|
-
"pending_count": queue.get_pending_count(),
|
|
613
|
-
"action": "Worker process could not be started to process queued operations"
|
|
614
|
-
}
|
|
615
|
-
}
|
|
616
|
-
|
|
617
|
-
return {
|
|
618
|
-
"queue_id": queue_id,
|
|
619
|
-
"status": "queued",
|
|
620
|
-
"message": f"Epic creation queued with ID: {queue_id}",
|
|
621
|
-
"epic_data": epic_data
|
|
622
|
-
}
|
|
623
|
-
|
|
624
|
-
async def _handle_epic_list(self, params: dict[str, Any]) -> list[dict[str, Any]]:
|
|
625
|
-
"""Handle epic listing."""
|
|
626
|
-
epics = await self.adapter.list_epics(
|
|
627
|
-
limit=params.get("limit", 10),
|
|
628
|
-
offset=params.get("offset", 0),
|
|
629
|
-
**{k: v for k, v in params.items() if k not in ["limit", "offset"]}
|
|
630
|
-
)
|
|
631
|
-
return [epic.model_dump() for epic in epics]
|
|
632
|
-
|
|
633
|
-
async def _handle_epic_issues(self, params: dict[str, Any]) -> list[dict[str, Any]]:
|
|
634
|
-
"""Handle listing issues in an epic."""
|
|
635
|
-
epic_id = params["epic_id"]
|
|
636
|
-
issues = await self.adapter.list_issues_by_epic(epic_id)
|
|
637
|
-
return [issue.model_dump() for issue in issues]
|
|
638
|
-
|
|
639
|
-
async def _handle_issue_create(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
640
|
-
"""Handle issue creation."""
|
|
641
|
-
# Check queue health
|
|
642
|
-
health_monitor = QueueHealthMonitor()
|
|
643
|
-
health = health_monitor.check_health()
|
|
644
|
-
|
|
645
|
-
if health["status"] == HealthStatus.CRITICAL:
|
|
646
|
-
repair_result = health_monitor.auto_repair()
|
|
647
|
-
health = health_monitor.check_health()
|
|
648
|
-
|
|
649
|
-
if health["status"] == HealthStatus.CRITICAL:
|
|
650
|
-
critical_alerts = [alert for alert in health["alerts"] if alert["level"] == "critical"]
|
|
651
|
-
return {
|
|
652
|
-
"status": "error",
|
|
653
|
-
"error": "Queue system is in critical state",
|
|
654
|
-
"details": {
|
|
655
|
-
"health_status": health["status"],
|
|
656
|
-
"critical_issues": critical_alerts,
|
|
657
|
-
"repair_attempted": repair_result["actions_taken"]
|
|
658
|
-
}
|
|
659
|
-
}
|
|
660
|
-
|
|
661
|
-
# Queue the issue creation
|
|
662
|
-
queue = Queue()
|
|
663
|
-
issue_data = {
|
|
664
|
-
"title": params["title"],
|
|
665
|
-
"description": params.get("description"),
|
|
666
|
-
"epic_id": params.get("epic_id"),
|
|
667
|
-
"priority": params.get("priority", "medium"),
|
|
668
|
-
"assignee": params.get("assignee"),
|
|
669
|
-
"tags": params.get("tags", []),
|
|
670
|
-
"estimated_hours": params.get("estimated_hours"),
|
|
671
|
-
}
|
|
672
|
-
|
|
673
|
-
queue_id = queue.add(
|
|
674
|
-
ticket_data=issue_data,
|
|
675
|
-
adapter=self.adapter.__class__.__name__.lower().replace("adapter", ""),
|
|
676
|
-
operation="create_issue",
|
|
677
|
-
)
|
|
678
|
-
|
|
679
|
-
# Start worker if needed
|
|
680
|
-
manager = WorkerManager()
|
|
681
|
-
worker_started = manager.start_if_needed()
|
|
682
|
-
|
|
683
|
-
if not worker_started and queue.get_pending_count() > 0:
|
|
684
|
-
return {
|
|
685
|
-
"status": "error",
|
|
686
|
-
"error": "Failed to start worker process",
|
|
687
|
-
"queue_id": queue_id,
|
|
688
|
-
"details": {
|
|
689
|
-
"pending_count": queue.get_pending_count(),
|
|
690
|
-
"action": "Worker process could not be started to process queued operations"
|
|
691
|
-
}
|
|
692
|
-
}
|
|
693
|
-
|
|
694
|
-
return {
|
|
695
|
-
"queue_id": queue_id,
|
|
696
|
-
"status": "queued",
|
|
697
|
-
"message": f"Issue creation queued with ID: {queue_id}",
|
|
698
|
-
"issue_data": issue_data
|
|
699
|
-
}
|
|
700
|
-
|
|
701
|
-
async def _handle_issue_tasks(self, params: dict[str, Any]) -> list[dict[str, Any]]:
|
|
702
|
-
"""Handle listing tasks in an issue."""
|
|
703
|
-
issue_id = params["issue_id"]
|
|
704
|
-
tasks = await self.adapter.list_tasks_by_issue(issue_id)
|
|
705
|
-
return [task.model_dump() for task in tasks]
|
|
706
|
-
|
|
707
|
-
async def _handle_task_create(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
708
|
-
"""Handle task creation."""
|
|
709
|
-
# Check queue health
|
|
710
|
-
health_monitor = QueueHealthMonitor()
|
|
711
|
-
health = health_monitor.check_health()
|
|
712
|
-
|
|
713
|
-
if health["status"] == HealthStatus.CRITICAL:
|
|
714
|
-
repair_result = health_monitor.auto_repair()
|
|
715
|
-
health = health_monitor.check_health()
|
|
716
|
-
|
|
717
|
-
if health["status"] == HealthStatus.CRITICAL:
|
|
718
|
-
critical_alerts = [alert for alert in health["alerts"] if alert["level"] == "critical"]
|
|
719
|
-
return {
|
|
720
|
-
"status": "error",
|
|
721
|
-
"error": "Queue system is in critical state",
|
|
722
|
-
"details": {
|
|
723
|
-
"health_status": health["status"],
|
|
724
|
-
"critical_issues": critical_alerts,
|
|
725
|
-
"repair_attempted": repair_result["actions_taken"]
|
|
726
|
-
}
|
|
727
|
-
}
|
|
728
|
-
|
|
729
|
-
# Validate required parent_id
|
|
730
|
-
if not params.get("parent_id"):
|
|
731
|
-
return {
|
|
732
|
-
"status": "error",
|
|
733
|
-
"error": "Tasks must have a parent_id (issue identifier)",
|
|
734
|
-
"details": {"required_field": "parent_id"}
|
|
735
|
-
}
|
|
736
|
-
|
|
737
|
-
# Queue the task creation
|
|
738
|
-
queue = Queue()
|
|
739
|
-
task_data = {
|
|
740
|
-
"title": params["title"],
|
|
741
|
-
"parent_id": params["parent_id"],
|
|
742
|
-
"description": params.get("description"),
|
|
743
|
-
"priority": params.get("priority", "medium"),
|
|
744
|
-
"assignee": params.get("assignee"),
|
|
745
|
-
"tags": params.get("tags", []),
|
|
746
|
-
"estimated_hours": params.get("estimated_hours"),
|
|
747
|
-
}
|
|
748
|
-
|
|
749
|
-
queue_id = queue.add(
|
|
750
|
-
ticket_data=task_data,
|
|
751
|
-
adapter=self.adapter.__class__.__name__.lower().replace("adapter", ""),
|
|
752
|
-
operation="create_task",
|
|
753
|
-
)
|
|
754
|
-
|
|
755
|
-
# Start worker if needed
|
|
756
|
-
manager = WorkerManager()
|
|
757
|
-
worker_started = manager.start_if_needed()
|
|
758
|
-
|
|
759
|
-
if not worker_started and queue.get_pending_count() > 0:
|
|
760
|
-
return {
|
|
761
|
-
"status": "error",
|
|
762
|
-
"error": "Failed to start worker process",
|
|
763
|
-
"queue_id": queue_id,
|
|
764
|
-
"details": {
|
|
765
|
-
"pending_count": queue.get_pending_count(),
|
|
766
|
-
"action": "Worker process could not be started to process queued operations"
|
|
767
|
-
}
|
|
768
|
-
}
|
|
769
|
-
|
|
770
|
-
return {
|
|
771
|
-
"queue_id": queue_id,
|
|
772
|
-
"status": "queued",
|
|
773
|
-
"message": f"Task creation queued with ID: {queue_id}",
|
|
774
|
-
"task_data": task_data
|
|
775
|
-
}
|
|
776
|
-
|
|
777
|
-
async def _handle_hierarchy_tree(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
778
|
-
"""Handle hierarchy tree visualization."""
|
|
779
|
-
epic_id = params.get("epic_id")
|
|
780
|
-
max_depth = params.get("max_depth", 3)
|
|
781
|
-
|
|
782
|
-
if epic_id:
|
|
783
|
-
# Get specific epic tree
|
|
784
|
-
epic = await self.adapter.get_epic(epic_id)
|
|
785
|
-
if not epic:
|
|
786
|
-
return {"error": f"Epic {epic_id} not found"}
|
|
787
|
-
|
|
788
|
-
# Build tree structure
|
|
789
|
-
tree = {
|
|
790
|
-
"epic": epic.model_dump(),
|
|
791
|
-
"issues": []
|
|
792
|
-
}
|
|
793
|
-
|
|
794
|
-
# Get issues in epic
|
|
795
|
-
issues = await self.adapter.list_issues_by_epic(epic_id)
|
|
796
|
-
for issue in issues:
|
|
797
|
-
issue_node = {
|
|
798
|
-
"issue": issue.model_dump(),
|
|
799
|
-
"tasks": []
|
|
800
|
-
}
|
|
801
|
-
|
|
802
|
-
# Get tasks in issue if depth allows
|
|
803
|
-
if max_depth > 2:
|
|
804
|
-
tasks = await self.adapter.list_tasks_by_issue(issue.id)
|
|
805
|
-
issue_node["tasks"] = [task.model_dump() for task in tasks]
|
|
806
|
-
|
|
807
|
-
tree["issues"].append(issue_node)
|
|
808
|
-
|
|
809
|
-
return tree
|
|
810
|
-
else:
|
|
811
|
-
# Get all epics with their hierarchies
|
|
812
|
-
epics = await self.adapter.list_epics(limit=params.get("limit", 10))
|
|
813
|
-
trees = []
|
|
814
|
-
|
|
815
|
-
for epic in epics:
|
|
816
|
-
tree = await self._handle_hierarchy_tree({"epic_id": epic.id, "max_depth": max_depth})
|
|
817
|
-
trees.append(tree)
|
|
818
|
-
|
|
819
|
-
return {"trees": trees}
|
|
820
|
-
|
|
821
|
-
async def _handle_bulk_create(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
822
|
-
"""Handle bulk ticket creation."""
|
|
823
|
-
tickets = params.get("tickets", [])
|
|
824
|
-
if not tickets:
|
|
825
|
-
return {"error": "No tickets provided for bulk creation"}
|
|
826
|
-
|
|
827
|
-
# Check queue health
|
|
828
|
-
health_monitor = QueueHealthMonitor()
|
|
829
|
-
health = health_monitor.check_health()
|
|
830
|
-
|
|
831
|
-
if health["status"] == HealthStatus.CRITICAL:
|
|
832
|
-
repair_result = health_monitor.auto_repair()
|
|
833
|
-
health = health_monitor.check_health()
|
|
834
|
-
|
|
835
|
-
if health["status"] == HealthStatus.CRITICAL:
|
|
836
|
-
return {
|
|
837
|
-
"status": "error",
|
|
838
|
-
"error": "Queue system is in critical state - cannot process bulk operations",
|
|
839
|
-
"details": {"health_status": health["status"]}
|
|
840
|
-
}
|
|
841
|
-
|
|
842
|
-
# Queue all tickets
|
|
843
|
-
queue = Queue()
|
|
844
|
-
queue_ids = []
|
|
845
|
-
|
|
846
|
-
for i, ticket_data in enumerate(tickets):
|
|
847
|
-
if not ticket_data.get("title"):
|
|
848
|
-
return {
|
|
849
|
-
"status": "error",
|
|
850
|
-
"error": f"Ticket {i} missing required 'title' field"
|
|
851
|
-
}
|
|
852
|
-
|
|
853
|
-
queue_id = queue.add(
|
|
854
|
-
ticket_data=ticket_data,
|
|
855
|
-
adapter=self.adapter.__class__.__name__.lower().replace("adapter", ""),
|
|
856
|
-
operation=ticket_data.get("operation", "create"),
|
|
857
|
-
)
|
|
858
|
-
queue_ids.append(queue_id)
|
|
859
|
-
|
|
860
|
-
# Start worker if needed
|
|
861
|
-
manager = WorkerManager()
|
|
862
|
-
manager.start_if_needed()
|
|
863
|
-
|
|
864
|
-
return {
|
|
865
|
-
"queue_ids": queue_ids,
|
|
866
|
-
"status": "queued",
|
|
867
|
-
"message": f"Bulk creation of {len(tickets)} tickets queued",
|
|
868
|
-
"count": len(tickets)
|
|
869
|
-
}
|
|
870
|
-
|
|
871
|
-
async def _handle_bulk_update(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
872
|
-
"""Handle bulk ticket updates."""
|
|
873
|
-
updates = params.get("updates", [])
|
|
874
|
-
if not updates:
|
|
875
|
-
return {"error": "No updates provided for bulk operation"}
|
|
876
|
-
|
|
877
|
-
# Check queue health
|
|
878
|
-
health_monitor = QueueHealthMonitor()
|
|
879
|
-
health = health_monitor.check_health()
|
|
880
|
-
|
|
881
|
-
if health["status"] == HealthStatus.CRITICAL:
|
|
882
|
-
repair_result = health_monitor.auto_repair()
|
|
883
|
-
health = health_monitor.check_health()
|
|
884
|
-
|
|
885
|
-
if health["status"] == HealthStatus.CRITICAL:
|
|
886
|
-
return {
|
|
887
|
-
"status": "error",
|
|
888
|
-
"error": "Queue system is in critical state - cannot process bulk operations",
|
|
889
|
-
"details": {"health_status": health["status"]}
|
|
890
|
-
}
|
|
891
|
-
|
|
892
|
-
# Queue all updates
|
|
893
|
-
queue = Queue()
|
|
894
|
-
queue_ids = []
|
|
895
|
-
|
|
896
|
-
for i, update_data in enumerate(updates):
|
|
897
|
-
if not update_data.get("ticket_id"):
|
|
898
|
-
return {
|
|
899
|
-
"status": "error",
|
|
900
|
-
"error": f"Update {i} missing required 'ticket_id' field"
|
|
901
|
-
}
|
|
902
|
-
|
|
903
|
-
queue_id = queue.add(
|
|
904
|
-
ticket_data=update_data,
|
|
905
|
-
adapter=self.adapter.__class__.__name__.lower().replace("adapter", ""),
|
|
906
|
-
operation="update",
|
|
907
|
-
)
|
|
908
|
-
queue_ids.append(queue_id)
|
|
909
|
-
|
|
910
|
-
# Start worker if needed
|
|
911
|
-
manager = WorkerManager()
|
|
912
|
-
manager.start_if_needed()
|
|
913
|
-
|
|
914
|
-
return {
|
|
915
|
-
"queue_ids": queue_ids,
|
|
916
|
-
"status": "queued",
|
|
917
|
-
"message": f"Bulk update of {len(updates)} tickets queued",
|
|
918
|
-
"count": len(updates)
|
|
919
|
-
}
|
|
920
|
-
|
|
921
|
-
async def _handle_search_hierarchy(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
922
|
-
"""Handle hierarchy-aware search."""
|
|
923
|
-
query = params.get("query", "")
|
|
924
|
-
include_children = params.get("include_children", True)
|
|
925
|
-
include_parents = params.get("include_parents", True)
|
|
926
|
-
|
|
927
|
-
# Perform basic search
|
|
928
|
-
search_query = SearchQuery(
|
|
929
|
-
query=query,
|
|
930
|
-
state=params.get("state"),
|
|
931
|
-
priority=params.get("priority"),
|
|
932
|
-
limit=params.get("limit", 50)
|
|
933
|
-
)
|
|
934
|
-
|
|
935
|
-
tickets = await self.adapter.search(search_query)
|
|
936
|
-
|
|
937
|
-
# Enhance with hierarchy information
|
|
938
|
-
enhanced_results = []
|
|
939
|
-
for ticket in tickets:
|
|
940
|
-
result = {
|
|
941
|
-
"ticket": ticket.model_dump(),
|
|
942
|
-
"hierarchy": {}
|
|
943
|
-
}
|
|
944
|
-
|
|
945
|
-
# Add parent information
|
|
946
|
-
if include_parents:
|
|
947
|
-
if hasattr(ticket, 'parent_epic') and ticket.parent_epic:
|
|
948
|
-
parent_epic = await self.adapter.get_epic(ticket.parent_epic)
|
|
949
|
-
if parent_epic:
|
|
950
|
-
result["hierarchy"]["epic"] = parent_epic.model_dump()
|
|
951
|
-
|
|
952
|
-
if hasattr(ticket, 'parent_issue') and ticket.parent_issue:
|
|
953
|
-
parent_issue = await self.adapter.read(ticket.parent_issue)
|
|
954
|
-
if parent_issue:
|
|
955
|
-
result["hierarchy"]["parent_issue"] = parent_issue.model_dump()
|
|
956
|
-
|
|
957
|
-
# Add children information
|
|
958
|
-
if include_children:
|
|
959
|
-
if ticket.ticket_type == "epic":
|
|
960
|
-
issues = await self.adapter.list_issues_by_epic(ticket.id)
|
|
961
|
-
result["hierarchy"]["issues"] = [issue.model_dump() for issue in issues]
|
|
962
|
-
elif ticket.ticket_type == "issue":
|
|
963
|
-
tasks = await self.adapter.list_tasks_by_issue(ticket.id)
|
|
964
|
-
result["hierarchy"]["tasks"] = [task.model_dump() for task in tasks]
|
|
965
|
-
|
|
966
|
-
enhanced_results.append(result)
|
|
967
|
-
|
|
968
|
-
return {
|
|
969
|
-
"results": enhanced_results,
|
|
970
|
-
"count": len(enhanced_results),
|
|
971
|
-
"query": query
|
|
972
|
-
}
|
|
973
|
-
|
|
974
|
-
async def _handle_attach(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
975
|
-
"""Handle file attachment to ticket."""
|
|
976
|
-
# Note: This is a placeholder for attachment functionality
|
|
977
|
-
# Most adapters don't support file attachments directly
|
|
978
|
-
return {
|
|
979
|
-
"status": "not_implemented",
|
|
980
|
-
"error": "Attachment functionality not yet implemented",
|
|
981
|
-
"ticket_id": params.get("ticket_id"),
|
|
982
|
-
"details": {
|
|
983
|
-
"reason": "File attachments require adapter-specific implementation",
|
|
984
|
-
"alternatives": ["Add file URLs in comments", "Use external file storage"]
|
|
985
|
-
}
|
|
986
|
-
}
|
|
987
|
-
|
|
988
|
-
async def _handle_list_attachments(self, params: dict[str, Any]) -> list[dict[str, Any]]:
|
|
989
|
-
"""Handle listing ticket attachments."""
|
|
990
|
-
# Note: This is a placeholder for attachment functionality
|
|
991
|
-
return []
|
|
992
|
-
|
|
993
|
-
async def _handle_create_pr(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
994
|
-
"""Handle PR creation for a ticket."""
|
|
995
|
-
ticket_id = params.get("ticket_id")
|
|
996
|
-
if not ticket_id:
|
|
997
|
-
raise ValueError("ticket_id is required")
|
|
998
|
-
|
|
999
|
-
# Check if adapter supports PR creation
|
|
1000
|
-
adapter_name = self.adapter.__class__.__name__.lower()
|
|
1001
|
-
|
|
1002
|
-
if "github" in adapter_name:
|
|
1003
|
-
# GitHub adapter supports direct PR creation
|
|
1004
|
-
from ..adapters.github import GitHubAdapter
|
|
1005
|
-
|
|
1006
|
-
if isinstance(self.adapter, GitHubAdapter):
|
|
1007
|
-
try:
|
|
1008
|
-
result = await self.adapter.create_pull_request(
|
|
1009
|
-
ticket_id=ticket_id,
|
|
1010
|
-
base_branch=params.get("base_branch", "main"),
|
|
1011
|
-
head_branch=params.get("head_branch"),
|
|
1012
|
-
title=params.get("title"),
|
|
1013
|
-
body=params.get("body"),
|
|
1014
|
-
draft=params.get("draft", False),
|
|
1015
|
-
)
|
|
1016
|
-
return {
|
|
1017
|
-
"success": True,
|
|
1018
|
-
"pr_number": result.get("number"),
|
|
1019
|
-
"pr_url": result.get("url"),
|
|
1020
|
-
"branch": result.get("branch"),
|
|
1021
|
-
"linked_issue": result.get("linked_issue"),
|
|
1022
|
-
"message": f"Pull request created successfully: {result.get('url')}",
|
|
1023
|
-
}
|
|
1024
|
-
except Exception as e:
|
|
1025
|
-
return {
|
|
1026
|
-
"success": False,
|
|
1027
|
-
"error": str(e),
|
|
1028
|
-
"ticket_id": ticket_id,
|
|
1029
|
-
}
|
|
1030
|
-
elif "linear" in adapter_name:
|
|
1031
|
-
# Linear adapter needs GitHub config for PR creation
|
|
1032
|
-
from ..adapters.linear import LinearAdapter
|
|
1033
|
-
|
|
1034
|
-
if isinstance(self.adapter, LinearAdapter):
|
|
1035
|
-
# For Linear, we prepare the branch and metadata but can't create the actual PR
|
|
1036
|
-
# without GitHub integration configured
|
|
1037
|
-
try:
|
|
1038
|
-
github_config = {
|
|
1039
|
-
"owner": params.get("github_owner"),
|
|
1040
|
-
"repo": params.get("github_repo"),
|
|
1041
|
-
"base_branch": params.get("base_branch", "main"),
|
|
1042
|
-
"head_branch": params.get("head_branch"),
|
|
1043
|
-
}
|
|
1044
|
-
|
|
1045
|
-
# Validate GitHub config for Linear
|
|
1046
|
-
if not github_config.get("owner") or not github_config.get("repo"):
|
|
1047
|
-
return {
|
|
1048
|
-
"success": False,
|
|
1049
|
-
"error": "GitHub owner and repo are required for Linear PR creation",
|
|
1050
|
-
"ticket_id": ticket_id,
|
|
1051
|
-
}
|
|
1052
|
-
|
|
1053
|
-
result = await self.adapter.create_pull_request_for_issue(
|
|
1054
|
-
ticket_id=ticket_id,
|
|
1055
|
-
github_config=github_config,
|
|
1056
|
-
)
|
|
1057
|
-
return {
|
|
1058
|
-
"success": True,
|
|
1059
|
-
"branch_name": result.get("branch_name"),
|
|
1060
|
-
"ticket_id": ticket_id,
|
|
1061
|
-
"message": result.get("message"),
|
|
1062
|
-
"github_config": {
|
|
1063
|
-
"owner": result.get("github_owner"),
|
|
1064
|
-
"repo": result.get("github_repo"),
|
|
1065
|
-
"base_branch": result.get("base_branch"),
|
|
1066
|
-
},
|
|
1067
|
-
}
|
|
1068
|
-
except Exception as e:
|
|
1069
|
-
return {
|
|
1070
|
-
"success": False,
|
|
1071
|
-
"error": str(e),
|
|
1072
|
-
"ticket_id": ticket_id,
|
|
1073
|
-
}
|
|
1074
|
-
else:
|
|
1075
|
-
return {
|
|
1076
|
-
"success": False,
|
|
1077
|
-
"error": f"PR creation not supported for adapter: {adapter_name}",
|
|
1078
|
-
"ticket_id": ticket_id,
|
|
1079
|
-
}
|
|
1080
|
-
|
|
1081
|
-
async def _handle_link_pr(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
1082
|
-
"""Handle linking an existing PR to a ticket."""
|
|
1083
|
-
ticket_id = params.get("ticket_id")
|
|
1084
|
-
pr_url = params.get("pr_url")
|
|
1085
|
-
|
|
1086
|
-
if not ticket_id:
|
|
1087
|
-
raise ValueError("ticket_id is required")
|
|
1088
|
-
if not pr_url:
|
|
1089
|
-
raise ValueError("pr_url is required")
|
|
1090
|
-
|
|
1091
|
-
adapter_name = self.adapter.__class__.__name__.lower()
|
|
1092
|
-
|
|
1093
|
-
if "github" in adapter_name:
|
|
1094
|
-
from ..adapters.github import GitHubAdapter
|
|
1095
|
-
|
|
1096
|
-
if isinstance(self.adapter, GitHubAdapter):
|
|
1097
|
-
try:
|
|
1098
|
-
result = await self.adapter.link_existing_pull_request(
|
|
1099
|
-
ticket_id=ticket_id,
|
|
1100
|
-
pr_url=pr_url,
|
|
1101
|
-
)
|
|
1102
|
-
return result
|
|
1103
|
-
except Exception as e:
|
|
1104
|
-
return {
|
|
1105
|
-
"success": False,
|
|
1106
|
-
"error": str(e),
|
|
1107
|
-
"ticket_id": ticket_id,
|
|
1108
|
-
"pr_url": pr_url,
|
|
1109
|
-
}
|
|
1110
|
-
elif "linear" in adapter_name:
|
|
1111
|
-
from ..adapters.linear import LinearAdapter
|
|
1112
|
-
|
|
1113
|
-
if isinstance(self.adapter, LinearAdapter):
|
|
1114
|
-
try:
|
|
1115
|
-
result = await self.adapter.link_to_pull_request(
|
|
1116
|
-
ticket_id=ticket_id,
|
|
1117
|
-
pr_url=pr_url,
|
|
1118
|
-
)
|
|
1119
|
-
return result
|
|
1120
|
-
except Exception as e:
|
|
1121
|
-
return {
|
|
1122
|
-
"success": False,
|
|
1123
|
-
"error": str(e),
|
|
1124
|
-
"ticket_id": ticket_id,
|
|
1125
|
-
"pr_url": pr_url,
|
|
1126
|
-
}
|
|
1127
|
-
else:
|
|
1128
|
-
return {
|
|
1129
|
-
"success": False,
|
|
1130
|
-
"error": f"PR linking not supported for adapter: {adapter_name}",
|
|
1131
|
-
"ticket_id": ticket_id,
|
|
1132
|
-
"pr_url": pr_url,
|
|
1133
|
-
}
|
|
1134
|
-
|
|
1135
|
-
async def _handle_initialize(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
1136
|
-
"""Handle initialize request from MCP client.
|
|
1137
|
-
|
|
1138
|
-
Args:
|
|
1139
|
-
params: Initialize parameters
|
|
1140
|
-
|
|
1141
|
-
Returns:
|
|
1142
|
-
Server capabilities
|
|
1143
|
-
|
|
1144
|
-
"""
|
|
1145
|
-
return {
|
|
1146
|
-
"protocolVersion": "2024-11-05",
|
|
1147
|
-
"serverInfo": {"name": "mcp-ticketer", "version": "0.1.8"},
|
|
1148
|
-
"capabilities": {"tools": {"listChanged": False}},
|
|
1149
|
-
}
|
|
1150
|
-
|
|
1151
|
-
async def _handle_tools_list(self) -> dict[str, Any]:
|
|
1152
|
-
"""List available MCP tools."""
|
|
1153
|
-
return {
|
|
1154
|
-
"tools": [
|
|
1155
|
-
# Hierarchy Management Tools
|
|
1156
|
-
{
|
|
1157
|
-
"name": "epic_create",
|
|
1158
|
-
"description": "Create a new epic (top-level project/milestone)",
|
|
1159
|
-
"inputSchema": {
|
|
1160
|
-
"type": "object",
|
|
1161
|
-
"properties": {
|
|
1162
|
-
"title": {"type": "string", "description": "Epic title"},
|
|
1163
|
-
"description": {"type": "string", "description": "Epic description"},
|
|
1164
|
-
"target_date": {"type": "string", "description": "Target completion date (ISO format)"},
|
|
1165
|
-
"lead_id": {"type": "string", "description": "Epic lead/owner ID"},
|
|
1166
|
-
"child_issues": {"type": "array", "items": {"type": "string"}, "description": "Initial child issue IDs"}
|
|
1167
|
-
},
|
|
1168
|
-
"required": ["title"]
|
|
1169
|
-
}
|
|
1170
|
-
},
|
|
1171
|
-
{
|
|
1172
|
-
"name": "epic_list",
|
|
1173
|
-
"description": "List all epics",
|
|
1174
|
-
"inputSchema": {
|
|
1175
|
-
"type": "object",
|
|
1176
|
-
"properties": {
|
|
1177
|
-
"limit": {"type": "integer", "default": 10, "description": "Maximum number of epics to return"},
|
|
1178
|
-
"offset": {"type": "integer", "default": 0, "description": "Number of epics to skip"}
|
|
1179
|
-
}
|
|
1180
|
-
}
|
|
1181
|
-
},
|
|
1182
|
-
{
|
|
1183
|
-
"name": "epic_issues",
|
|
1184
|
-
"description": "List all issues in an epic",
|
|
1185
|
-
"inputSchema": {
|
|
1186
|
-
"type": "object",
|
|
1187
|
-
"properties": {
|
|
1188
|
-
"epic_id": {"type": "string", "description": "Epic ID to get issues for"}
|
|
1189
|
-
},
|
|
1190
|
-
"required": ["epic_id"]
|
|
1191
|
-
}
|
|
1192
|
-
},
|
|
1193
|
-
{
|
|
1194
|
-
"name": "issue_create",
|
|
1195
|
-
"description": "Create a new issue (work item)",
|
|
1196
|
-
"inputSchema": {
|
|
1197
|
-
"type": "object",
|
|
1198
|
-
"properties": {
|
|
1199
|
-
"title": {"type": "string", "description": "Issue title"},
|
|
1200
|
-
"description": {"type": "string", "description": "Issue description"},
|
|
1201
|
-
"epic_id": {"type": "string", "description": "Parent epic ID"},
|
|
1202
|
-
"priority": {"type": "string", "enum": ["low", "medium", "high", "critical"], "default": "medium"},
|
|
1203
|
-
"assignee": {"type": "string", "description": "Assignee username"},
|
|
1204
|
-
"tags": {"type": "array", "items": {"type": "string"}, "description": "Issue tags"},
|
|
1205
|
-
"estimated_hours": {"type": "number", "description": "Estimated hours to complete"}
|
|
1206
|
-
},
|
|
1207
|
-
"required": ["title"]
|
|
1208
|
-
}
|
|
1209
|
-
},
|
|
1210
|
-
{
|
|
1211
|
-
"name": "issue_tasks",
|
|
1212
|
-
"description": "List all tasks in an issue",
|
|
1213
|
-
"inputSchema": {
|
|
1214
|
-
"type": "object",
|
|
1215
|
-
"properties": {
|
|
1216
|
-
"issue_id": {"type": "string", "description": "Issue ID to get tasks for"}
|
|
1217
|
-
},
|
|
1218
|
-
"required": ["issue_id"]
|
|
1219
|
-
}
|
|
1220
|
-
},
|
|
1221
|
-
{
|
|
1222
|
-
"name": "task_create",
|
|
1223
|
-
"description": "Create a new task (sub-item under an issue)",
|
|
1224
|
-
"inputSchema": {
|
|
1225
|
-
"type": "object",
|
|
1226
|
-
"properties": {
|
|
1227
|
-
"title": {"type": "string", "description": "Task title"},
|
|
1228
|
-
"parent_id": {"type": "string", "description": "Parent issue ID (required)"},
|
|
1229
|
-
"description": {"type": "string", "description": "Task description"},
|
|
1230
|
-
"priority": {"type": "string", "enum": ["low", "medium", "high", "critical"], "default": "medium"},
|
|
1231
|
-
"assignee": {"type": "string", "description": "Assignee username"},
|
|
1232
|
-
"tags": {"type": "array", "items": {"type": "string"}, "description": "Task tags"},
|
|
1233
|
-
"estimated_hours": {"type": "number", "description": "Estimated hours to complete"}
|
|
1234
|
-
},
|
|
1235
|
-
"required": ["title", "parent_id"]
|
|
1236
|
-
}
|
|
1237
|
-
},
|
|
1238
|
-
{
|
|
1239
|
-
"name": "hierarchy_tree",
|
|
1240
|
-
"description": "Get hierarchy tree view of epic/issues/tasks",
|
|
1241
|
-
"inputSchema": {
|
|
1242
|
-
"type": "object",
|
|
1243
|
-
"properties": {
|
|
1244
|
-
"epic_id": {"type": "string", "description": "Specific epic ID (optional - if not provided, returns all epics)"},
|
|
1245
|
-
"max_depth": {"type": "integer", "default": 3, "description": "Maximum depth to traverse (1=epics only, 2=epics+issues, 3=full tree)"},
|
|
1246
|
-
"limit": {"type": "integer", "default": 10, "description": "Maximum number of epics to return (when epic_id not specified)"}
|
|
1247
|
-
}
|
|
1248
|
-
}
|
|
1249
|
-
},
|
|
1250
|
-
# Bulk Operations
|
|
1251
|
-
{
|
|
1252
|
-
"name": "ticket_bulk_create",
|
|
1253
|
-
"description": "Create multiple tickets in one operation",
|
|
1254
|
-
"inputSchema": {
|
|
1255
|
-
"type": "object",
|
|
1256
|
-
"properties": {
|
|
1257
|
-
"tickets": {
|
|
1258
|
-
"type": "array",
|
|
1259
|
-
"items": {
|
|
1260
|
-
"type": "object",
|
|
1261
|
-
"properties": {
|
|
1262
|
-
"title": {"type": "string"},
|
|
1263
|
-
"description": {"type": "string"},
|
|
1264
|
-
"priority": {"type": "string", "enum": ["low", "medium", "high", "critical"]},
|
|
1265
|
-
"operation": {"type": "string", "enum": ["create", "create_epic", "create_issue", "create_task"], "default": "create"},
|
|
1266
|
-
"epic_id": {"type": "string", "description": "For issues"},
|
|
1267
|
-
"parent_id": {"type": "string", "description": "For tasks"}
|
|
1268
|
-
},
|
|
1269
|
-
"required": ["title"]
|
|
1270
|
-
},
|
|
1271
|
-
"description": "Array of tickets to create"
|
|
1272
|
-
}
|
|
1273
|
-
},
|
|
1274
|
-
"required": ["tickets"]
|
|
1275
|
-
}
|
|
1276
|
-
},
|
|
1277
|
-
{
|
|
1278
|
-
"name": "ticket_bulk_update",
|
|
1279
|
-
"description": "Update multiple tickets in one operation",
|
|
1280
|
-
"inputSchema": {
|
|
1281
|
-
"type": "object",
|
|
1282
|
-
"properties": {
|
|
1283
|
-
"updates": {
|
|
1284
|
-
"type": "array",
|
|
1285
|
-
"items": {
|
|
1286
|
-
"type": "object",
|
|
1287
|
-
"properties": {
|
|
1288
|
-
"ticket_id": {"type": "string"},
|
|
1289
|
-
"title": {"type": "string"},
|
|
1290
|
-
"description": {"type": "string"},
|
|
1291
|
-
"priority": {"type": "string", "enum": ["low", "medium", "high", "critical"]},
|
|
1292
|
-
"state": {"type": "string"},
|
|
1293
|
-
"assignee": {"type": "string"}
|
|
1294
|
-
},
|
|
1295
|
-
"required": ["ticket_id"]
|
|
1296
|
-
},
|
|
1297
|
-
"description": "Array of ticket updates"
|
|
1298
|
-
}
|
|
1299
|
-
},
|
|
1300
|
-
"required": ["updates"]
|
|
1301
|
-
}
|
|
1302
|
-
},
|
|
1303
|
-
# Advanced Search
|
|
1304
|
-
{
|
|
1305
|
-
"name": "ticket_search_hierarchy",
|
|
1306
|
-
"description": "Search tickets with hierarchy context",
|
|
1307
|
-
"inputSchema": {
|
|
1308
|
-
"type": "object",
|
|
1309
|
-
"properties": {
|
|
1310
|
-
"query": {"type": "string", "description": "Search query"},
|
|
1311
|
-
"state": {"type": "string", "description": "Filter by state"},
|
|
1312
|
-
"priority": {"type": "string", "description": "Filter by priority"},
|
|
1313
|
-
"limit": {"type": "integer", "default": 50, "description": "Maximum results"},
|
|
1314
|
-
"include_children": {"type": "boolean", "default": True, "description": "Include child items in results"},
|
|
1315
|
-
"include_parents": {"type": "boolean", "default": True, "description": "Include parent context in results"}
|
|
1316
|
-
},
|
|
1317
|
-
"required": ["query"]
|
|
1318
|
-
}
|
|
1319
|
-
},
|
|
1320
|
-
# PR Integration
|
|
1321
|
-
{
|
|
1322
|
-
"name": "ticket_create_pr",
|
|
1323
|
-
"description": "Create a GitHub PR linked to a ticket",
|
|
1324
|
-
"inputSchema": {
|
|
1325
|
-
"type": "object",
|
|
1326
|
-
"properties": {
|
|
1327
|
-
"ticket_id": {
|
|
1328
|
-
"type": "string",
|
|
1329
|
-
"description": "Ticket ID to link the PR to",
|
|
1330
|
-
},
|
|
1331
|
-
"base_branch": {
|
|
1332
|
-
"type": "string",
|
|
1333
|
-
"description": "Target branch for the PR",
|
|
1334
|
-
"default": "main",
|
|
1335
|
-
},
|
|
1336
|
-
"head_branch": {
|
|
1337
|
-
"type": "string",
|
|
1338
|
-
"description": "Source branch name (auto-generated if not provided)",
|
|
1339
|
-
},
|
|
1340
|
-
"title": {
|
|
1341
|
-
"type": "string",
|
|
1342
|
-
"description": "PR title (uses ticket title if not provided)",
|
|
1343
|
-
},
|
|
1344
|
-
"body": {
|
|
1345
|
-
"type": "string",
|
|
1346
|
-
"description": "PR description (auto-generated with issue link if not provided)",
|
|
1347
|
-
},
|
|
1348
|
-
"draft": {
|
|
1349
|
-
"type": "boolean",
|
|
1350
|
-
"description": "Create as draft PR",
|
|
1351
|
-
"default": False,
|
|
1352
|
-
},
|
|
1353
|
-
},
|
|
1354
|
-
"required": ["ticket_id"],
|
|
1355
|
-
},
|
|
1356
|
-
},
|
|
1357
|
-
# Standard Ticket Operations
|
|
1358
|
-
{
|
|
1359
|
-
"name": "ticket_link_pr",
|
|
1360
|
-
"description": "Link an existing PR to a ticket",
|
|
1361
|
-
"inputSchema": {
|
|
1362
|
-
"type": "object",
|
|
1363
|
-
"properties": {
|
|
1364
|
-
"ticket_id": {
|
|
1365
|
-
"type": "string",
|
|
1366
|
-
"description": "Ticket ID to link the PR to",
|
|
1367
|
-
},
|
|
1368
|
-
"pr_url": {
|
|
1369
|
-
"type": "string",
|
|
1370
|
-
"description": "GitHub PR URL to link",
|
|
1371
|
-
},
|
|
1372
|
-
},
|
|
1373
|
-
"required": ["ticket_id", "pr_url"],
|
|
1374
|
-
},
|
|
1375
|
-
},
|
|
1376
|
-
{
|
|
1377
|
-
"name": "ticket_create",
|
|
1378
|
-
"description": "Create a new ticket",
|
|
1379
|
-
"inputSchema": {
|
|
1380
|
-
"type": "object",
|
|
1381
|
-
"properties": {
|
|
1382
|
-
"title": {"type": "string", "description": "Ticket title"},
|
|
1383
|
-
"description": {
|
|
1384
|
-
"type": "string",
|
|
1385
|
-
"description": "Description",
|
|
1386
|
-
},
|
|
1387
|
-
"priority": {
|
|
1388
|
-
"type": "string",
|
|
1389
|
-
"enum": ["low", "medium", "high", "critical"],
|
|
1390
|
-
},
|
|
1391
|
-
"tags": {"type": "array", "items": {"type": "string"}},
|
|
1392
|
-
"assignee": {"type": "string"},
|
|
1393
|
-
},
|
|
1394
|
-
"required": ["title"],
|
|
1395
|
-
},
|
|
1396
|
-
},
|
|
1397
|
-
{
|
|
1398
|
-
"name": "ticket_list",
|
|
1399
|
-
"description": "List tickets",
|
|
1400
|
-
"inputSchema": {
|
|
1401
|
-
"type": "object",
|
|
1402
|
-
"properties": {
|
|
1403
|
-
"limit": {"type": "integer", "default": 10},
|
|
1404
|
-
"state": {"type": "string"},
|
|
1405
|
-
"priority": {"type": "string"},
|
|
1406
|
-
},
|
|
1407
|
-
},
|
|
1408
|
-
},
|
|
1409
|
-
{
|
|
1410
|
-
"name": "ticket_update",
|
|
1411
|
-
"description": "Update a ticket",
|
|
1412
|
-
"inputSchema": {
|
|
1413
|
-
"type": "object",
|
|
1414
|
-
"properties": {
|
|
1415
|
-
"ticket_id": {"type": "string", "description": "Ticket ID"},
|
|
1416
|
-
"updates": {
|
|
1417
|
-
"type": "object",
|
|
1418
|
-
"description": "Fields to update",
|
|
1419
|
-
},
|
|
1420
|
-
},
|
|
1421
|
-
"required": ["ticket_id", "updates"],
|
|
1422
|
-
},
|
|
1423
|
-
},
|
|
1424
|
-
{
|
|
1425
|
-
"name": "ticket_transition",
|
|
1426
|
-
"description": "Change ticket state",
|
|
1427
|
-
"inputSchema": {
|
|
1428
|
-
"type": "object",
|
|
1429
|
-
"properties": {
|
|
1430
|
-
"ticket_id": {"type": "string"},
|
|
1431
|
-
"target_state": {"type": "string"},
|
|
1432
|
-
},
|
|
1433
|
-
"required": ["ticket_id", "target_state"],
|
|
1434
|
-
},
|
|
1435
|
-
},
|
|
1436
|
-
{
|
|
1437
|
-
"name": "ticket_search",
|
|
1438
|
-
"description": "Search tickets",
|
|
1439
|
-
"inputSchema": {
|
|
1440
|
-
"type": "object",
|
|
1441
|
-
"properties": {
|
|
1442
|
-
"query": {"type": "string"},
|
|
1443
|
-
"state": {"type": "string"},
|
|
1444
|
-
"priority": {"type": "string"},
|
|
1445
|
-
"limit": {"type": "integer", "default": 10},
|
|
1446
|
-
},
|
|
1447
|
-
},
|
|
1448
|
-
},
|
|
1449
|
-
{
|
|
1450
|
-
"name": "ticket_status",
|
|
1451
|
-
"description": "Check status of queued ticket operation",
|
|
1452
|
-
"inputSchema": {
|
|
1453
|
-
"type": "object",
|
|
1454
|
-
"properties": {
|
|
1455
|
-
"queue_id": {
|
|
1456
|
-
"type": "string",
|
|
1457
|
-
"description": "Queue ID returned from create/update/delete operations",
|
|
1458
|
-
},
|
|
1459
|
-
},
|
|
1460
|
-
"required": ["queue_id"],
|
|
1461
|
-
},
|
|
1462
|
-
},
|
|
1463
|
-
# System diagnostics tools
|
|
1464
|
-
{
|
|
1465
|
-
"name": "system_health",
|
|
1466
|
-
"description": "Quick system health check - shows configuration, queue worker, and failure rates",
|
|
1467
|
-
"inputSchema": {
|
|
1468
|
-
"type": "object",
|
|
1469
|
-
"properties": {},
|
|
1470
|
-
},
|
|
1471
|
-
},
|
|
1472
|
-
{
|
|
1473
|
-
"name": "system_diagnose",
|
|
1474
|
-
"description": "Comprehensive system diagnostics - detailed analysis of all components",
|
|
1475
|
-
"inputSchema": {
|
|
1476
|
-
"type": "object",
|
|
1477
|
-
"properties": {
|
|
1478
|
-
"include_logs": {
|
|
1479
|
-
"type": "boolean",
|
|
1480
|
-
"default": False,
|
|
1481
|
-
"description": "Include recent log analysis in diagnosis",
|
|
1482
|
-
},
|
|
1483
|
-
},
|
|
1484
|
-
},
|
|
1485
|
-
},
|
|
1486
|
-
]
|
|
1487
|
-
}
|
|
1488
|
-
|
|
1489
|
-
async def _handle_tools_call(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
1490
|
-
"""Handle tool invocation from MCP client.
|
|
1491
|
-
|
|
1492
|
-
Args:
|
|
1493
|
-
params: Contains 'name' and 'arguments' fields
|
|
1494
|
-
|
|
1495
|
-
Returns:
|
|
1496
|
-
MCP formatted response with content array
|
|
1497
|
-
|
|
1498
|
-
"""
|
|
1499
|
-
tool_name = params.get("name")
|
|
1500
|
-
arguments = params.get("arguments", {})
|
|
1501
|
-
|
|
1502
|
-
try:
|
|
1503
|
-
# Route to appropriate handler based on tool name
|
|
1504
|
-
# Hierarchy management tools
|
|
1505
|
-
if tool_name == "epic_create":
|
|
1506
|
-
result = await self._handle_epic_create(arguments)
|
|
1507
|
-
elif tool_name == "epic_list":
|
|
1508
|
-
result = await self._handle_epic_list(arguments)
|
|
1509
|
-
elif tool_name == "epic_issues":
|
|
1510
|
-
result = await self._handle_epic_issues(arguments)
|
|
1511
|
-
elif tool_name == "issue_create":
|
|
1512
|
-
result = await self._handle_issue_create(arguments)
|
|
1513
|
-
elif tool_name == "issue_tasks":
|
|
1514
|
-
result = await self._handle_issue_tasks(arguments)
|
|
1515
|
-
elif tool_name == "task_create":
|
|
1516
|
-
result = await self._handle_task_create(arguments)
|
|
1517
|
-
elif tool_name == "hierarchy_tree":
|
|
1518
|
-
result = await self._handle_hierarchy_tree(arguments)
|
|
1519
|
-
# Bulk operations
|
|
1520
|
-
elif tool_name == "ticket_bulk_create":
|
|
1521
|
-
result = await self._handle_bulk_create(arguments)
|
|
1522
|
-
elif tool_name == "ticket_bulk_update":
|
|
1523
|
-
result = await self._handle_bulk_update(arguments)
|
|
1524
|
-
# Advanced search
|
|
1525
|
-
elif tool_name == "ticket_search_hierarchy":
|
|
1526
|
-
result = await self._handle_search_hierarchy(arguments)
|
|
1527
|
-
# Standard ticket operations
|
|
1528
|
-
elif tool_name == "ticket_create":
|
|
1529
|
-
result = await self._handle_create(arguments)
|
|
1530
|
-
elif tool_name == "ticket_list":
|
|
1531
|
-
result = await self._handle_list(arguments)
|
|
1532
|
-
elif tool_name == "ticket_update":
|
|
1533
|
-
result = await self._handle_update(arguments)
|
|
1534
|
-
elif tool_name == "ticket_transition":
|
|
1535
|
-
result = await self._handle_transition(arguments)
|
|
1536
|
-
elif tool_name == "ticket_search":
|
|
1537
|
-
result = await self._handle_search(arguments)
|
|
1538
|
-
elif tool_name == "ticket_status":
|
|
1539
|
-
result = await self._handle_queue_status(arguments)
|
|
1540
|
-
# System diagnostics
|
|
1541
|
-
elif tool_name == "system_health":
|
|
1542
|
-
result = await self._handle_system_health(arguments)
|
|
1543
|
-
elif tool_name == "system_diagnose":
|
|
1544
|
-
result = await self._handle_system_diagnose(arguments)
|
|
1545
|
-
# PR integration
|
|
1546
|
-
elif tool_name == "ticket_create_pr":
|
|
1547
|
-
result = await self._handle_create_pr(arguments)
|
|
1548
|
-
elif tool_name == "ticket_link_pr":
|
|
1549
|
-
result = await self._handle_link_pr(arguments)
|
|
1550
|
-
else:
|
|
1551
|
-
return {
|
|
1552
|
-
"content": [{"type": "text", "text": f"Unknown tool: {tool_name}"}],
|
|
1553
|
-
"isError": True,
|
|
1554
|
-
}
|
|
1555
|
-
|
|
1556
|
-
# Format successful response in MCP content format
|
|
1557
|
-
# Handle different response types
|
|
1558
|
-
if isinstance(result, list):
|
|
1559
|
-
# For list operations, convert Pydantic models to dicts
|
|
1560
|
-
result_text = json.dumps(result, indent=2, default=str)
|
|
1561
|
-
elif isinstance(result, dict):
|
|
1562
|
-
# For dict responses (create, update, etc.)
|
|
1563
|
-
result_text = json.dumps(result, indent=2, default=str)
|
|
1564
|
-
else:
|
|
1565
|
-
result_text = str(result)
|
|
1566
|
-
|
|
1567
|
-
return {
|
|
1568
|
-
"content": [{"type": "text", "text": result_text}],
|
|
1569
|
-
"isError": False,
|
|
1570
|
-
}
|
|
1571
|
-
|
|
1572
|
-
except Exception as e:
|
|
1573
|
-
# Format error response
|
|
1574
|
-
return {
|
|
1575
|
-
"content": [
|
|
1576
|
-
{
|
|
1577
|
-
"type": "text",
|
|
1578
|
-
"text": f"Error calling tool {tool_name}: {str(e)}",
|
|
1579
|
-
}
|
|
1580
|
-
],
|
|
1581
|
-
"isError": True,
|
|
1582
|
-
}
|
|
1583
|
-
|
|
1584
|
-
async def run(self) -> None:
|
|
1585
|
-
"""Run the MCP server, reading from stdin and writing to stdout."""
|
|
1586
|
-
self.running = True
|
|
1587
|
-
|
|
1588
|
-
try:
|
|
1589
|
-
reader = asyncio.StreamReader()
|
|
1590
|
-
protocol = asyncio.StreamReaderProtocol(reader)
|
|
1591
|
-
await asyncio.get_event_loop().connect_read_pipe(
|
|
1592
|
-
lambda: protocol, sys.stdin
|
|
1593
|
-
)
|
|
1594
|
-
except Exception as e:
|
|
1595
|
-
sys.stderr.write(f"Failed to connect to stdin: {str(e)}\n")
|
|
1596
|
-
return
|
|
1597
|
-
|
|
1598
|
-
# Main message loop
|
|
1599
|
-
while self.running:
|
|
1600
|
-
try:
|
|
1601
|
-
line = await reader.readline()
|
|
1602
|
-
if not line:
|
|
1603
|
-
# EOF reached, exit gracefully
|
|
1604
|
-
sys.stderr.write("EOF reached, shutting down server\n")
|
|
1605
|
-
break
|
|
1606
|
-
|
|
1607
|
-
# Parse JSON-RPC request
|
|
1608
|
-
request = json.loads(line.decode())
|
|
1609
|
-
|
|
1610
|
-
# Handle request
|
|
1611
|
-
response = await self.handle_request(request)
|
|
1612
|
-
|
|
1613
|
-
# Send response
|
|
1614
|
-
sys.stdout.write(json.dumps(response) + "\n")
|
|
1615
|
-
sys.stdout.flush()
|
|
1616
|
-
|
|
1617
|
-
except json.JSONDecodeError as e:
|
|
1618
|
-
error_response = self._error_response(
|
|
1619
|
-
None, -32700, f"Parse error: {str(e)}"
|
|
1620
|
-
)
|
|
1621
|
-
sys.stdout.write(json.dumps(error_response) + "\n")
|
|
1622
|
-
sys.stdout.flush()
|
|
1623
|
-
|
|
1624
|
-
except KeyboardInterrupt:
|
|
1625
|
-
sys.stderr.write("Received interrupt signal\n")
|
|
1626
|
-
break
|
|
1627
|
-
|
|
1628
|
-
except BrokenPipeError:
|
|
1629
|
-
sys.stderr.write("Connection closed by client\n")
|
|
1630
|
-
break
|
|
1631
|
-
|
|
1632
|
-
except Exception as e:
|
|
1633
|
-
# Log error but continue running
|
|
1634
|
-
sys.stderr.write(f"Error: {str(e)}\n")
|
|
1635
|
-
|
|
1636
|
-
async def stop(self) -> None:
|
|
1637
|
-
"""Stop the server."""
|
|
1638
|
-
self.running = False
|
|
1639
|
-
await self.adapter.close()
|
|
1640
|
-
|
|
1641
|
-
|
|
1642
|
-
async def main():
|
|
1643
|
-
"""Main entry point for MCP server - kept for backward compatibility.
|
|
1644
|
-
|
|
1645
|
-
This function is maintained in case it's being called directly,
|
|
1646
|
-
but the preferred way is now through the CLI: `mcp-ticketer mcp`
|
|
1647
|
-
|
|
1648
|
-
SECURITY: This method ONLY reads from the current project directory
|
|
1649
|
-
to prevent configuration leakage across projects. It will NEVER read
|
|
1650
|
-
from user home directory or system-wide locations.
|
|
1651
|
-
"""
|
|
1652
|
-
# Load configuration
|
|
1653
|
-
import json
|
|
1654
|
-
import logging
|
|
1655
|
-
from pathlib import Path
|
|
1656
|
-
|
|
1657
|
-
logger = logging.getLogger(__name__)
|
|
1658
|
-
|
|
1659
|
-
# ONLY read from project-local config, never from user home
|
|
1660
|
-
config_file = Path.cwd() / ".mcp-ticketer" / "config.json"
|
|
1661
|
-
if config_file.exists():
|
|
1662
|
-
# Validate config is within project
|
|
1663
|
-
try:
|
|
1664
|
-
if not config_file.resolve().is_relative_to(Path.cwd().resolve()):
|
|
1665
|
-
logger.error(
|
|
1666
|
-
f"Security violation: Config file {config_file} "
|
|
1667
|
-
"is not within project directory"
|
|
1668
|
-
)
|
|
1669
|
-
raise ValueError(
|
|
1670
|
-
f"Security violation: Config file {config_file} "
|
|
1671
|
-
"is not within project directory"
|
|
1672
|
-
)
|
|
1673
|
-
except (ValueError, RuntimeError):
|
|
1674
|
-
# is_relative_to may raise ValueError in some cases
|
|
1675
|
-
pass
|
|
1676
|
-
|
|
1677
|
-
try:
|
|
1678
|
-
with open(config_file) as f:
|
|
1679
|
-
config = json.load(f)
|
|
1680
|
-
adapter_type = config.get("default_adapter", "aitrackdown")
|
|
1681
|
-
# Get adapter-specific config
|
|
1682
|
-
adapters_config = config.get("adapters", {})
|
|
1683
|
-
adapter_config = adapters_config.get(adapter_type, {})
|
|
1684
|
-
# Fallback to legacy config format
|
|
1685
|
-
if not adapter_config and "config" in config:
|
|
1686
|
-
adapter_config = config["config"]
|
|
1687
|
-
logger.info(
|
|
1688
|
-
f"Loaded MCP configuration from project-local: {config_file}"
|
|
1689
|
-
)
|
|
1690
|
-
except (OSError, json.JSONDecodeError) as e:
|
|
1691
|
-
logger.warning(f"Could not load project config: {e}, using defaults")
|
|
1692
|
-
adapter_type = "aitrackdown"
|
|
1693
|
-
adapter_config = {"base_path": ".aitrackdown"}
|
|
1694
|
-
else:
|
|
1695
|
-
# Default to aitrackdown with local base path
|
|
1696
|
-
logger.info("No project-local config found, defaulting to aitrackdown adapter")
|
|
1697
|
-
adapter_type = "aitrackdown"
|
|
1698
|
-
adapter_config = {"base_path": ".aitrackdown"}
|
|
1699
|
-
|
|
1700
|
-
# Create and run server
|
|
1701
|
-
server = MCPTicketServer(adapter_type, adapter_config)
|
|
1702
|
-
await server.run()
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
# Add diagnostic handler methods to MCPTicketServer class
|
|
1706
|
-
async def _handle_system_health(self, arguments: dict[str, Any]) -> dict[str, Any]:
|
|
1707
|
-
"""Handle system health check."""
|
|
1708
|
-
from ..cli.diagnostics import SystemDiagnostics
|
|
1709
|
-
|
|
1710
|
-
try:
|
|
1711
|
-
diagnostics = SystemDiagnostics()
|
|
1712
|
-
|
|
1713
|
-
# Quick health checks
|
|
1714
|
-
health_status = {
|
|
1715
|
-
"overall_status": "healthy",
|
|
1716
|
-
"components": {},
|
|
1717
|
-
"issues": [],
|
|
1718
|
-
"warnings": [],
|
|
1719
|
-
}
|
|
1720
|
-
|
|
1721
|
-
# Check configuration
|
|
1722
|
-
try:
|
|
1723
|
-
from ..core.config import get_config
|
|
1724
|
-
config = get_config()
|
|
1725
|
-
adapters = config.get_enabled_adapters()
|
|
1726
|
-
if adapters:
|
|
1727
|
-
health_status["components"]["configuration"] = {
|
|
1728
|
-
"status": "healthy",
|
|
1729
|
-
"adapters_count": len(adapters),
|
|
1730
|
-
}
|
|
1731
|
-
else:
|
|
1732
|
-
health_status["components"]["configuration"] = {
|
|
1733
|
-
"status": "failed",
|
|
1734
|
-
"error": "No adapters configured",
|
|
1735
|
-
}
|
|
1736
|
-
health_status["issues"].append("No adapters configured")
|
|
1737
|
-
health_status["overall_status"] = "critical"
|
|
1738
|
-
except Exception as e:
|
|
1739
|
-
health_status["components"]["configuration"] = {
|
|
1740
|
-
"status": "failed",
|
|
1741
|
-
"error": str(e),
|
|
1742
|
-
}
|
|
1743
|
-
health_status["issues"].append(f"Configuration error: {str(e)}")
|
|
1744
|
-
health_status["overall_status"] = "critical"
|
|
1745
|
-
|
|
1746
|
-
# Check queue system
|
|
1747
|
-
try:
|
|
1748
|
-
from ..queue.manager import WorkerManager
|
|
1749
|
-
worker_manager = WorkerManager()
|
|
1750
|
-
worker_status = worker_manager.get_status()
|
|
1751
|
-
stats = worker_manager.queue.get_stats()
|
|
1752
|
-
|
|
1753
|
-
total = stats.get("total", 0)
|
|
1754
|
-
failed = stats.get("failed", 0)
|
|
1755
|
-
failure_rate = (failed / total * 100) if total > 0 else 0
|
|
1756
|
-
|
|
1757
|
-
queue_health = {
|
|
1758
|
-
"status": "healthy",
|
|
1759
|
-
"worker_running": worker_status.get("running", False),
|
|
1760
|
-
"worker_pid": worker_status.get("pid"),
|
|
1761
|
-
"failure_rate": failure_rate,
|
|
1762
|
-
"total_processed": total,
|
|
1763
|
-
"failed_items": failed,
|
|
1764
|
-
}
|
|
1765
|
-
|
|
1766
|
-
if not worker_status.get("running", False):
|
|
1767
|
-
queue_health["status"] = "failed"
|
|
1768
|
-
health_status["issues"].append("Queue worker not running")
|
|
1769
|
-
health_status["overall_status"] = "critical"
|
|
1770
|
-
elif failure_rate > 50:
|
|
1771
|
-
queue_health["status"] = "degraded"
|
|
1772
|
-
health_status["issues"].append(f"High queue failure rate: {failure_rate:.1f}%")
|
|
1773
|
-
health_status["overall_status"] = "critical"
|
|
1774
|
-
elif failure_rate > 20:
|
|
1775
|
-
queue_health["status"] = "warning"
|
|
1776
|
-
health_status["warnings"].append(f"Elevated queue failure rate: {failure_rate:.1f}%")
|
|
1777
|
-
if health_status["overall_status"] == "healthy":
|
|
1778
|
-
health_status["overall_status"] = "warning"
|
|
1779
|
-
|
|
1780
|
-
health_status["components"]["queue_system"] = queue_health
|
|
1781
|
-
|
|
1782
|
-
except Exception as e:
|
|
1783
|
-
health_status["components"]["queue_system"] = {
|
|
1784
|
-
"status": "failed",
|
|
1785
|
-
"error": str(e),
|
|
1786
|
-
}
|
|
1787
|
-
health_status["issues"].append(f"Queue system error: {str(e)}")
|
|
1788
|
-
health_status["overall_status"] = "critical"
|
|
1789
|
-
|
|
1790
|
-
return {
|
|
1791
|
-
"content": [
|
|
1792
|
-
{
|
|
1793
|
-
"type": "text",
|
|
1794
|
-
"text": f"System Health Status: {health_status['overall_status'].upper()}\n\n" +
|
|
1795
|
-
f"Configuration: {health_status['components'].get('configuration', {}).get('status', 'unknown')}\n" +
|
|
1796
|
-
f"Queue System: {health_status['components'].get('queue_system', {}).get('status', 'unknown')}\n\n" +
|
|
1797
|
-
f"Issues: {len(health_status['issues'])}\n" +
|
|
1798
|
-
f"Warnings: {len(health_status['warnings'])}\n\n" +
|
|
1799
|
-
(f"Critical Issues:\n" + "\n".join(f"• {issue}" for issue in health_status['issues']) + "\n\n" if health_status['issues'] else "") +
|
|
1800
|
-
(f"Warnings:\n" + "\n".join(f"• {warning}" for warning in health_status['warnings']) + "\n\n" if health_status['warnings'] else "") +
|
|
1801
|
-
"For detailed diagnosis, use system_diagnose tool.",
|
|
1802
|
-
}
|
|
1803
|
-
],
|
|
1804
|
-
"isError": health_status["overall_status"] == "critical",
|
|
1805
|
-
}
|
|
1806
|
-
|
|
1807
|
-
except Exception as e:
|
|
1808
|
-
return {
|
|
1809
|
-
"content": [
|
|
1810
|
-
{
|
|
1811
|
-
"type": "text",
|
|
1812
|
-
"text": f"Health check failed: {str(e)}",
|
|
1813
|
-
}
|
|
1814
|
-
],
|
|
1815
|
-
"isError": True,
|
|
1816
|
-
}
|
|
1817
|
-
|
|
1818
|
-
|
|
1819
|
-
async def _handle_system_diagnose(self, arguments: dict[str, Any]) -> dict[str, Any]:
|
|
1820
|
-
"""Handle comprehensive system diagnosis."""
|
|
1821
|
-
from ..cli.diagnostics import SystemDiagnostics
|
|
1822
|
-
|
|
1823
|
-
try:
|
|
1824
|
-
diagnostics = SystemDiagnostics()
|
|
1825
|
-
report = await diagnostics.run_full_diagnosis()
|
|
1826
|
-
|
|
1827
|
-
# Format report for MCP response
|
|
1828
|
-
summary = f"""System Diagnosis Report
|
|
1829
|
-
Generated: {report['timestamp']}
|
|
1830
|
-
Version: {report['version']}
|
|
1831
|
-
|
|
1832
|
-
OVERALL STATUS: {
|
|
1833
|
-
'CRITICAL' if diagnostics.issues else
|
|
1834
|
-
'WARNING' if diagnostics.warnings else
|
|
1835
|
-
'HEALTHY'
|
|
1836
|
-
}
|
|
1837
|
-
|
|
1838
|
-
COMPONENT STATUS:
|
|
1839
|
-
• Configuration: {len(report['configuration']['issues'])} issues
|
|
1840
|
-
• Adapters: {report['adapters']['failed_adapters']}/{report['adapters']['total_adapters']} failed
|
|
1841
|
-
• Queue System: {report['queue_system']['health_score']}/100 health score
|
|
1842
|
-
|
|
1843
|
-
STATISTICS:
|
|
1844
|
-
• Successes: {len(diagnostics.successes)}
|
|
1845
|
-
• Warnings: {len(diagnostics.warnings)}
|
|
1846
|
-
• Critical Issues: {len(diagnostics.issues)}
|
|
1847
|
-
|
|
1848
|
-
"""
|
|
1849
|
-
|
|
1850
|
-
if diagnostics.issues:
|
|
1851
|
-
summary += "CRITICAL ISSUES:\n"
|
|
1852
|
-
for issue in diagnostics.issues:
|
|
1853
|
-
summary += f"• {issue}\n"
|
|
1854
|
-
summary += "\n"
|
|
1855
|
-
|
|
1856
|
-
if diagnostics.warnings:
|
|
1857
|
-
summary += "WARNINGS:\n"
|
|
1858
|
-
for warning in diagnostics.warnings:
|
|
1859
|
-
summary += f"• {warning}\n"
|
|
1860
|
-
summary += "\n"
|
|
1861
|
-
|
|
1862
|
-
if report['recommendations']:
|
|
1863
|
-
summary += "RECOMMENDATIONS:\n"
|
|
1864
|
-
for rec in report['recommendations']:
|
|
1865
|
-
summary += f"{rec}\n"
|
|
1866
|
-
|
|
1867
|
-
return {
|
|
1868
|
-
"content": [
|
|
1869
|
-
{
|
|
1870
|
-
"type": "text",
|
|
1871
|
-
"text": summary,
|
|
1872
|
-
}
|
|
1873
|
-
],
|
|
1874
|
-
"isError": bool(diagnostics.issues),
|
|
1875
|
-
}
|
|
1876
|
-
|
|
1877
|
-
except Exception as e:
|
|
1878
|
-
return {
|
|
1879
|
-
"content": [
|
|
1880
|
-
{
|
|
1881
|
-
"type": "text",
|
|
1882
|
-
"text": f"System diagnosis failed: {str(e)}",
|
|
1883
|
-
}
|
|
1884
|
-
],
|
|
1885
|
-
"isError": True,
|
|
1886
|
-
}
|
|
1887
|
-
|
|
1888
|
-
|
|
1889
|
-
# Monkey patch the methods onto the class
|
|
1890
|
-
MCPTicketServer._handle_system_health = _handle_system_health
|
|
1891
|
-
MCPTicketServer._handle_system_diagnose = _handle_system_diagnose
|
|
1892
|
-
|
|
1893
|
-
|
|
1894
|
-
if __name__ == "__main__":
|
|
1895
|
-
asyncio.run(main())
|