mcp-ticketer 0.1.30__py3-none-any.whl → 1.2.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__init__.py +10 -10
- mcp_ticketer/__version__.py +3 -3
- mcp_ticketer/adapters/__init__.py +2 -0
- mcp_ticketer/adapters/aitrackdown.py +796 -46
- mcp_ticketer/adapters/asana/__init__.py +15 -0
- mcp_ticketer/adapters/asana/adapter.py +1416 -0
- mcp_ticketer/adapters/asana/client.py +292 -0
- mcp_ticketer/adapters/asana/mappers.py +348 -0
- mcp_ticketer/adapters/asana/types.py +146 -0
- mcp_ticketer/adapters/github.py +879 -129
- mcp_ticketer/adapters/hybrid.py +11 -11
- mcp_ticketer/adapters/jira.py +973 -73
- mcp_ticketer/adapters/linear/__init__.py +24 -0
- mcp_ticketer/adapters/linear/adapter.py +2732 -0
- mcp_ticketer/adapters/linear/client.py +344 -0
- mcp_ticketer/adapters/linear/mappers.py +420 -0
- mcp_ticketer/adapters/linear/queries.py +479 -0
- mcp_ticketer/adapters/linear/types.py +360 -0
- mcp_ticketer/adapters/linear.py +10 -2315
- mcp_ticketer/analysis/__init__.py +23 -0
- mcp_ticketer/analysis/orphaned.py +218 -0
- mcp_ticketer/analysis/similarity.py +224 -0
- mcp_ticketer/analysis/staleness.py +266 -0
- mcp_ticketer/cache/memory.py +9 -8
- mcp_ticketer/cli/adapter_diagnostics.py +421 -0
- mcp_ticketer/cli/auggie_configure.py +116 -15
- mcp_ticketer/cli/codex_configure.py +274 -82
- mcp_ticketer/cli/configure.py +888 -151
- mcp_ticketer/cli/diagnostics.py +400 -157
- mcp_ticketer/cli/discover.py +297 -26
- mcp_ticketer/cli/gemini_configure.py +119 -26
- mcp_ticketer/cli/init_command.py +880 -0
- mcp_ticketer/cli/instruction_commands.py +435 -0
- mcp_ticketer/cli/linear_commands.py +616 -0
- mcp_ticketer/cli/main.py +203 -1165
- mcp_ticketer/cli/mcp_configure.py +474 -90
- mcp_ticketer/cli/mcp_server_commands.py +415 -0
- mcp_ticketer/cli/migrate_config.py +12 -8
- mcp_ticketer/cli/platform_commands.py +123 -0
- mcp_ticketer/cli/platform_detection.py +418 -0
- mcp_ticketer/cli/platform_installer.py +513 -0
- mcp_ticketer/cli/python_detection.py +126 -0
- mcp_ticketer/cli/queue_commands.py +15 -15
- mcp_ticketer/cli/setup_command.py +639 -0
- mcp_ticketer/cli/simple_health.py +90 -65
- mcp_ticketer/cli/ticket_commands.py +1013 -0
- mcp_ticketer/cli/update_checker.py +313 -0
- mcp_ticketer/cli/utils.py +114 -66
- mcp_ticketer/core/__init__.py +24 -1
- mcp_ticketer/core/adapter.py +250 -16
- mcp_ticketer/core/config.py +145 -37
- mcp_ticketer/core/env_discovery.py +101 -22
- mcp_ticketer/core/env_loader.py +349 -0
- mcp_ticketer/core/exceptions.py +160 -0
- mcp_ticketer/core/http_client.py +26 -26
- mcp_ticketer/core/instructions.py +405 -0
- mcp_ticketer/core/label_manager.py +732 -0
- mcp_ticketer/core/mappers.py +42 -30
- mcp_ticketer/core/models.py +280 -28
- mcp_ticketer/core/onepassword_secrets.py +379 -0
- mcp_ticketer/core/project_config.py +183 -49
- mcp_ticketer/core/registry.py +3 -3
- mcp_ticketer/core/session_state.py +171 -0
- mcp_ticketer/core/state_matcher.py +592 -0
- mcp_ticketer/core/url_parser.py +425 -0
- mcp_ticketer/core/validators.py +69 -0
- mcp_ticketer/defaults/ticket_instructions.md +644 -0
- mcp_ticketer/mcp/__init__.py +29 -1
- mcp_ticketer/mcp/__main__.py +60 -0
- mcp_ticketer/mcp/server/__init__.py +25 -0
- mcp_ticketer/mcp/server/__main__.py +60 -0
- mcp_ticketer/mcp/server/constants.py +58 -0
- mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
- mcp_ticketer/mcp/server/dto.py +195 -0
- mcp_ticketer/mcp/server/main.py +1343 -0
- mcp_ticketer/mcp/server/response_builder.py +206 -0
- mcp_ticketer/mcp/server/routing.py +655 -0
- mcp_ticketer/mcp/server/server_sdk.py +151 -0
- mcp_ticketer/mcp/server/tools/__init__.py +56 -0
- mcp_ticketer/mcp/server/tools/analysis_tools.py +495 -0
- mcp_ticketer/mcp/server/tools/attachment_tools.py +226 -0
- mcp_ticketer/mcp/server/tools/bulk_tools.py +273 -0
- mcp_ticketer/mcp/server/tools/comment_tools.py +152 -0
- mcp_ticketer/mcp/server/tools/config_tools.py +1439 -0
- mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
- mcp_ticketer/mcp/server/tools/hierarchy_tools.py +921 -0
- mcp_ticketer/mcp/server/tools/instruction_tools.py +300 -0
- mcp_ticketer/mcp/server/tools/label_tools.py +948 -0
- mcp_ticketer/mcp/server/tools/pr_tools.py +152 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +215 -0
- mcp_ticketer/mcp/server/tools/session_tools.py +170 -0
- mcp_ticketer/mcp/server/tools/ticket_tools.py +1268 -0
- mcp_ticketer/mcp/server/tools/user_ticket_tools.py +547 -0
- mcp_ticketer/queue/__init__.py +1 -0
- mcp_ticketer/queue/health_monitor.py +168 -136
- mcp_ticketer/queue/manager.py +95 -25
- mcp_ticketer/queue/queue.py +40 -21
- mcp_ticketer/queue/run_worker.py +6 -1
- mcp_ticketer/queue/ticket_registry.py +213 -155
- mcp_ticketer/queue/worker.py +109 -49
- mcp_ticketer-1.2.11.dist-info/METADATA +792 -0
- mcp_ticketer-1.2.11.dist-info/RECORD +110 -0
- mcp_ticketer/mcp/server.py +0 -1895
- mcp_ticketer-0.1.30.dist-info/METADATA +0 -413
- mcp_ticketer-0.1.30.dist-info/RECORD +0 -49
- {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/licenses/LICENSE +0 -0
- {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1343 @@
|
|
|
1
|
+
"""MCP JSON-RPC server for ticket management - Simplified synchronous implementation."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
import sys
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from dotenv import load_dotenv
|
|
10
|
+
|
|
11
|
+
# Import adapters module to trigger registration
|
|
12
|
+
import mcp_ticketer.adapters # noqa: F401
|
|
13
|
+
|
|
14
|
+
from ...core import AdapterRegistry
|
|
15
|
+
from ...core.models import Comment, Epic, Priority, SearchQuery, Task, TicketState
|
|
16
|
+
from .constants import (
|
|
17
|
+
DEFAULT_BASE_PATH,
|
|
18
|
+
DEFAULT_LIMIT,
|
|
19
|
+
DEFAULT_MAX_DEPTH,
|
|
20
|
+
DEFAULT_OFFSET,
|
|
21
|
+
ERROR_INTERNAL,
|
|
22
|
+
ERROR_METHOD_NOT_FOUND,
|
|
23
|
+
ERROR_PARSE,
|
|
24
|
+
JSONRPC_VERSION,
|
|
25
|
+
MCP_PROTOCOL_VERSION,
|
|
26
|
+
MSG_EPIC_NOT_FOUND,
|
|
27
|
+
MSG_INTERNAL_ERROR,
|
|
28
|
+
MSG_MISSING_TICKET_ID,
|
|
29
|
+
MSG_MISSING_TITLE,
|
|
30
|
+
MSG_NO_TICKETS_PROVIDED,
|
|
31
|
+
MSG_NO_UPDATES_PROVIDED,
|
|
32
|
+
MSG_TICKET_NOT_FOUND,
|
|
33
|
+
MSG_TRANSITION_FAILED,
|
|
34
|
+
MSG_UNKNOWN_METHOD,
|
|
35
|
+
MSG_UNKNOWN_OPERATION,
|
|
36
|
+
MSG_UPDATE_FAILED,
|
|
37
|
+
SERVER_NAME,
|
|
38
|
+
SERVER_VERSION,
|
|
39
|
+
STATUS_COMPLETED,
|
|
40
|
+
STATUS_ERROR,
|
|
41
|
+
)
|
|
42
|
+
from .dto import (
|
|
43
|
+
CreateEpicRequest,
|
|
44
|
+
CreateIssueRequest,
|
|
45
|
+
CreateTaskRequest,
|
|
46
|
+
CreateTicketRequest,
|
|
47
|
+
ReadTicketRequest,
|
|
48
|
+
)
|
|
49
|
+
from .response_builder import ResponseBuilder
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class MCPTicketServer:
|
|
53
|
+
"""MCP server for ticket operations over stdio - synchronous implementation."""
|
|
54
|
+
|
|
55
|
+
def __init__(
|
|
56
|
+
self, adapter_type: str = "aitrackdown", config: dict[str, Any] | None = None
|
|
57
|
+
):
|
|
58
|
+
"""Initialize MCP server.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
----
|
|
62
|
+
adapter_type: Type of adapter to use
|
|
63
|
+
config: Adapter configuration
|
|
64
|
+
|
|
65
|
+
"""
|
|
66
|
+
self.adapter_type = adapter_type
|
|
67
|
+
self.adapter_config = config or {"base_path": DEFAULT_BASE_PATH}
|
|
68
|
+
self.adapter = AdapterRegistry.get_adapter(adapter_type, self.adapter_config)
|
|
69
|
+
self.running = False
|
|
70
|
+
|
|
71
|
+
async def handle_request(self, request: dict[str, Any]) -> dict[str, Any]:
|
|
72
|
+
"""Handle JSON-RPC request.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
----
|
|
76
|
+
request: JSON-RPC request
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
-------
|
|
80
|
+
JSON-RPC response
|
|
81
|
+
|
|
82
|
+
"""
|
|
83
|
+
method = request.get("method")
|
|
84
|
+
params = request.get("params", {})
|
|
85
|
+
request_id = request.get("id")
|
|
86
|
+
|
|
87
|
+
try:
|
|
88
|
+
# Handle MCP protocol methods
|
|
89
|
+
if method == "initialize":
|
|
90
|
+
result = await self._handle_initialize(params)
|
|
91
|
+
# Route to ticket operation handlers
|
|
92
|
+
elif method == "ticket/create":
|
|
93
|
+
result = await self._handle_create(params)
|
|
94
|
+
elif method == "ticket/read":
|
|
95
|
+
result = await self._handle_read(params)
|
|
96
|
+
elif method == "ticket/update":
|
|
97
|
+
result = await self._handle_update(params)
|
|
98
|
+
elif method == "ticket/delete":
|
|
99
|
+
result = await self._handle_delete(params)
|
|
100
|
+
elif method == "ticket/list":
|
|
101
|
+
result = await self._handle_list(params)
|
|
102
|
+
elif method == "ticket/search":
|
|
103
|
+
result = await self._handle_search(params)
|
|
104
|
+
elif method == "ticket/transition":
|
|
105
|
+
result = await self._handle_transition(params)
|
|
106
|
+
elif method == "ticket/comment":
|
|
107
|
+
result = await self._handle_comment(params)
|
|
108
|
+
elif method == "ticket/create_pr":
|
|
109
|
+
result = await self._handle_create_pr(params)
|
|
110
|
+
elif method == "ticket/link_pr":
|
|
111
|
+
result = await self._handle_link_pr(params)
|
|
112
|
+
# Hierarchy management tools
|
|
113
|
+
elif method == "epic/create":
|
|
114
|
+
result = await self._handle_epic_create(params)
|
|
115
|
+
elif method == "epic/list":
|
|
116
|
+
result = await self._handle_epic_list(params)
|
|
117
|
+
elif method == "epic/issues":
|
|
118
|
+
result = await self._handle_epic_issues(params)
|
|
119
|
+
elif method == "issue/create":
|
|
120
|
+
result = await self._handle_issue_create(params)
|
|
121
|
+
elif method == "issue/tasks":
|
|
122
|
+
result = await self._handle_issue_tasks(params)
|
|
123
|
+
elif method == "task/create":
|
|
124
|
+
result = await self._handle_task_create(params)
|
|
125
|
+
elif method == "hierarchy/tree":
|
|
126
|
+
result = await self._handle_hierarchy_tree(params)
|
|
127
|
+
# Bulk operations
|
|
128
|
+
elif method == "ticket/bulk_create":
|
|
129
|
+
result = await self._handle_bulk_create(params)
|
|
130
|
+
elif method == "ticket/bulk_update":
|
|
131
|
+
result = await self._handle_bulk_update(params)
|
|
132
|
+
# Advanced search
|
|
133
|
+
elif method == "ticket/search_hierarchy":
|
|
134
|
+
result = await self._handle_search_hierarchy(params)
|
|
135
|
+
# Attachment handling
|
|
136
|
+
elif method == "ticket/attach":
|
|
137
|
+
result = await self._handle_attach(params)
|
|
138
|
+
elif method == "ticket/attachments":
|
|
139
|
+
result = await self._handle_list_attachments(params)
|
|
140
|
+
elif method == "tools/list":
|
|
141
|
+
result = await self._handle_tools_list()
|
|
142
|
+
elif method == "tools/call":
|
|
143
|
+
result = await self._handle_tools_call(params)
|
|
144
|
+
else:
|
|
145
|
+
return ResponseBuilder.error(
|
|
146
|
+
request_id,
|
|
147
|
+
ERROR_METHOD_NOT_FOUND,
|
|
148
|
+
MSG_UNKNOWN_METHOD.format(method=method),
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
return {"jsonrpc": JSONRPC_VERSION, "result": result, "id": request_id}
|
|
152
|
+
|
|
153
|
+
except Exception as e:
|
|
154
|
+
return ResponseBuilder.error(
|
|
155
|
+
request_id, ERROR_INTERNAL, MSG_INTERNAL_ERROR.format(error=str(e))
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
def _error_response(
|
|
159
|
+
self, request_id: Any, code: int, message: str
|
|
160
|
+
) -> dict[str, Any]:
|
|
161
|
+
"""Create error response.
|
|
162
|
+
|
|
163
|
+
Args:
|
|
164
|
+
----
|
|
165
|
+
request_id: Request ID
|
|
166
|
+
code: Error code
|
|
167
|
+
message: Error message
|
|
168
|
+
|
|
169
|
+
Returns:
|
|
170
|
+
-------
|
|
171
|
+
Error response
|
|
172
|
+
|
|
173
|
+
"""
|
|
174
|
+
return {
|
|
175
|
+
"jsonrpc": "2.0",
|
|
176
|
+
"error": {"code": code, "message": message},
|
|
177
|
+
"id": request_id,
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
async def _handle_create(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
181
|
+
"""Handle task creation - SYNCHRONOUS with validation."""
|
|
182
|
+
# Validate and parse request
|
|
183
|
+
request = CreateTicketRequest(**params)
|
|
184
|
+
|
|
185
|
+
# Build task from validated DTO
|
|
186
|
+
task = Task( # type: ignore[call-arg]
|
|
187
|
+
title=request.title,
|
|
188
|
+
description=request.description,
|
|
189
|
+
priority=Priority(request.priority),
|
|
190
|
+
tags=request.tags,
|
|
191
|
+
assignee=request.assignee,
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
# Create directly
|
|
195
|
+
created = await self.adapter.create(task)
|
|
196
|
+
|
|
197
|
+
# Return immediately
|
|
198
|
+
return ResponseBuilder.status_result(
|
|
199
|
+
STATUS_COMPLETED, **ResponseBuilder.ticket_result(created)
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
async def _handle_read(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
203
|
+
"""Handle ticket read - SYNCHRONOUS with validation."""
|
|
204
|
+
# Validate and parse request
|
|
205
|
+
request = ReadTicketRequest(**params)
|
|
206
|
+
|
|
207
|
+
ticket = await self.adapter.read(request.ticket_id)
|
|
208
|
+
|
|
209
|
+
if ticket is None:
|
|
210
|
+
return ResponseBuilder.status_result(
|
|
211
|
+
STATUS_ERROR,
|
|
212
|
+
error=MSG_TICKET_NOT_FOUND.format(ticket_id=request.ticket_id),
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
return ResponseBuilder.status_result(
|
|
216
|
+
STATUS_COMPLETED, **ResponseBuilder.ticket_result(ticket)
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
async def _handle_update(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
220
|
+
"""Handle ticket update - SYNCHRONOUS."""
|
|
221
|
+
ticket_id = params["ticket_id"]
|
|
222
|
+
|
|
223
|
+
# Support both formats: {"ticket_id": "x", "updates": {...}} and {"ticket_id": "x", "field": "value"}
|
|
224
|
+
if "updates" in params:
|
|
225
|
+
updates = params["updates"]
|
|
226
|
+
else:
|
|
227
|
+
# Extract all non-ticket_id fields as updates
|
|
228
|
+
updates = {k: v for k, v in params.items() if k != "ticket_id"}
|
|
229
|
+
|
|
230
|
+
updated = await self.adapter.update(ticket_id, updates)
|
|
231
|
+
|
|
232
|
+
if updated is None:
|
|
233
|
+
return ResponseBuilder.status_result(
|
|
234
|
+
STATUS_ERROR, error=MSG_UPDATE_FAILED.format(ticket_id=ticket_id)
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
return ResponseBuilder.status_result(
|
|
238
|
+
STATUS_COMPLETED, **ResponseBuilder.ticket_result(updated)
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
async def _handle_delete(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
242
|
+
"""Handle ticket deletion - SYNCHRONOUS."""
|
|
243
|
+
ticket_id = params["ticket_id"]
|
|
244
|
+
success = await self.adapter.delete(ticket_id)
|
|
245
|
+
|
|
246
|
+
return ResponseBuilder.status_result(
|
|
247
|
+
STATUS_COMPLETED, **ResponseBuilder.deletion_result(ticket_id, success)
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
async def _handle_list(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
251
|
+
"""Handle ticket listing - SYNCHRONOUS."""
|
|
252
|
+
tickets = await self.adapter.list(
|
|
253
|
+
limit=params.get("limit", DEFAULT_LIMIT),
|
|
254
|
+
offset=params.get("offset", DEFAULT_OFFSET),
|
|
255
|
+
filters=params.get("filters"),
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
return ResponseBuilder.status_result(
|
|
259
|
+
STATUS_COMPLETED, **ResponseBuilder.tickets_result(tickets)
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
async def _handle_search(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
263
|
+
"""Handle ticket search - SYNCHRONOUS."""
|
|
264
|
+
query = SearchQuery( # type: ignore[call-arg]
|
|
265
|
+
query=params.get("query"),
|
|
266
|
+
state=TicketState(params["state"]) if params.get("state") else None,
|
|
267
|
+
priority=Priority(params["priority"]) if params.get("priority") else None,
|
|
268
|
+
assignee=params.get("assignee"),
|
|
269
|
+
tags=params.get("tags"),
|
|
270
|
+
limit=params.get("limit", DEFAULT_LIMIT),
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
results = await self.adapter.search(query)
|
|
274
|
+
|
|
275
|
+
return ResponseBuilder.status_result(
|
|
276
|
+
STATUS_COMPLETED, **ResponseBuilder.tickets_result(results)
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
async def _handle_transition(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
280
|
+
"""Handle state transition - SYNCHRONOUS."""
|
|
281
|
+
ticket_id = params["ticket_id"]
|
|
282
|
+
target_state = TicketState(params["target_state"])
|
|
283
|
+
|
|
284
|
+
updated = await self.adapter.transition_state(ticket_id, target_state)
|
|
285
|
+
|
|
286
|
+
if updated is None:
|
|
287
|
+
return ResponseBuilder.status_result(
|
|
288
|
+
STATUS_ERROR, error=MSG_TRANSITION_FAILED.format(ticket_id=ticket_id)
|
|
289
|
+
)
|
|
290
|
+
|
|
291
|
+
return ResponseBuilder.status_result(
|
|
292
|
+
STATUS_COMPLETED, **ResponseBuilder.ticket_result(updated)
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
async def _handle_comment(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
296
|
+
"""Handle comment operations - SYNCHRONOUS."""
|
|
297
|
+
operation = params.get("operation", "add")
|
|
298
|
+
|
|
299
|
+
if operation == "add":
|
|
300
|
+
comment = Comment( # type: ignore[call-arg]
|
|
301
|
+
ticket_id=params["ticket_id"],
|
|
302
|
+
content=params["content"],
|
|
303
|
+
author=params.get("author"),
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
created = await self.adapter.add_comment(comment)
|
|
307
|
+
|
|
308
|
+
return ResponseBuilder.status_result(
|
|
309
|
+
STATUS_COMPLETED, **ResponseBuilder.comment_result(created)
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
elif operation == "list":
|
|
313
|
+
comments = await self.adapter.get_comments(
|
|
314
|
+
params["ticket_id"],
|
|
315
|
+
limit=params.get("limit", DEFAULT_LIMIT),
|
|
316
|
+
offset=params.get("offset", DEFAULT_OFFSET),
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
return ResponseBuilder.status_result(
|
|
320
|
+
STATUS_COMPLETED, **ResponseBuilder.comments_result(comments)
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
else:
|
|
324
|
+
raise ValueError(MSG_UNKNOWN_OPERATION.format(operation=operation))
|
|
325
|
+
|
|
326
|
+
# Hierarchy Management Handlers
|
|
327
|
+
|
|
328
|
+
async def _handle_epic_create(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
329
|
+
"""Handle epic creation - SYNCHRONOUS with validation."""
|
|
330
|
+
# Validate and parse request
|
|
331
|
+
request = CreateEpicRequest(**params)
|
|
332
|
+
|
|
333
|
+
# Build epic from validated DTO
|
|
334
|
+
metadata: dict[str, Any] = {}
|
|
335
|
+
if request.target_date:
|
|
336
|
+
metadata["target_date"] = request.target_date
|
|
337
|
+
if request.lead_id:
|
|
338
|
+
metadata["lead_id"] = request.lead_id
|
|
339
|
+
|
|
340
|
+
epic = Epic( # type: ignore[call-arg]
|
|
341
|
+
title=request.title,
|
|
342
|
+
description=request.description,
|
|
343
|
+
child_issues=request.child_issues,
|
|
344
|
+
metadata=metadata,
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
# Create directly
|
|
348
|
+
created = await self.adapter.create(epic)
|
|
349
|
+
|
|
350
|
+
# Return immediately
|
|
351
|
+
return ResponseBuilder.status_result(
|
|
352
|
+
STATUS_COMPLETED, **ResponseBuilder.ticket_result(created)
|
|
353
|
+
)
|
|
354
|
+
|
|
355
|
+
async def _handle_epic_list(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
356
|
+
"""Handle epic listing - SYNCHRONOUS."""
|
|
357
|
+
epics = await self.adapter.list_epics(
|
|
358
|
+
limit=params.get("limit", DEFAULT_LIMIT),
|
|
359
|
+
offset=params.get("offset", DEFAULT_OFFSET),
|
|
360
|
+
**{k: v for k, v in params.items() if k not in ["limit", "offset"]},
|
|
361
|
+
)
|
|
362
|
+
|
|
363
|
+
return ResponseBuilder.status_result(
|
|
364
|
+
STATUS_COMPLETED, **ResponseBuilder.epics_result(epics)
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
async def _handle_epic_issues(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
368
|
+
"""Handle listing issues in an epic - SYNCHRONOUS."""
|
|
369
|
+
epic_id = params["epic_id"]
|
|
370
|
+
issues = await self.adapter.list_issues_by_epic(epic_id)
|
|
371
|
+
|
|
372
|
+
return ResponseBuilder.status_result(
|
|
373
|
+
STATUS_COMPLETED, **ResponseBuilder.issues_result(issues)
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
async def _handle_issue_create(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
377
|
+
"""Handle issue creation - SYNCHRONOUS with validation.
|
|
378
|
+
|
|
379
|
+
Note: In the current model, 'issues' are Tasks with a parent epic.
|
|
380
|
+
"""
|
|
381
|
+
# Validate and parse request
|
|
382
|
+
request = CreateIssueRequest(**params)
|
|
383
|
+
|
|
384
|
+
# Build task (issue) from validated DTO
|
|
385
|
+
task = Task( # type: ignore[call-arg]
|
|
386
|
+
title=request.title,
|
|
387
|
+
description=request.description,
|
|
388
|
+
parent_epic=request.epic_id, # Issues are tasks under epics
|
|
389
|
+
priority=Priority(request.priority),
|
|
390
|
+
assignee=request.assignee,
|
|
391
|
+
tags=request.tags,
|
|
392
|
+
estimated_hours=request.estimated_hours,
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
# Create directly
|
|
396
|
+
created = await self.adapter.create(task)
|
|
397
|
+
|
|
398
|
+
# Return immediately
|
|
399
|
+
return ResponseBuilder.status_result(
|
|
400
|
+
STATUS_COMPLETED, **ResponseBuilder.ticket_result(created)
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
async def _handle_issue_tasks(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
404
|
+
"""Handle listing tasks in an issue - SYNCHRONOUS."""
|
|
405
|
+
issue_id = params["issue_id"]
|
|
406
|
+
tasks = await self.adapter.list_tasks_by_issue(issue_id)
|
|
407
|
+
|
|
408
|
+
return ResponseBuilder.status_result(
|
|
409
|
+
STATUS_COMPLETED, **ResponseBuilder.tasks_result(tasks)
|
|
410
|
+
)
|
|
411
|
+
|
|
412
|
+
async def _handle_task_create(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
413
|
+
"""Handle task creation - SYNCHRONOUS with validation."""
|
|
414
|
+
# Validate and parse request (will raise ValidationError if parent_id missing)
|
|
415
|
+
request = CreateTaskRequest(**params)
|
|
416
|
+
|
|
417
|
+
# Build task from validated DTO
|
|
418
|
+
task = Task( # type: ignore[call-arg]
|
|
419
|
+
title=request.title,
|
|
420
|
+
parent_issue=request.parent_id,
|
|
421
|
+
description=request.description,
|
|
422
|
+
priority=Priority(request.priority),
|
|
423
|
+
assignee=request.assignee,
|
|
424
|
+
tags=request.tags,
|
|
425
|
+
estimated_hours=request.estimated_hours,
|
|
426
|
+
)
|
|
427
|
+
|
|
428
|
+
# Create directly
|
|
429
|
+
created = await self.adapter.create(task)
|
|
430
|
+
|
|
431
|
+
# Return immediately
|
|
432
|
+
return ResponseBuilder.status_result(
|
|
433
|
+
STATUS_COMPLETED, **ResponseBuilder.ticket_result(created)
|
|
434
|
+
)
|
|
435
|
+
|
|
436
|
+
async def _handle_hierarchy_tree(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
437
|
+
"""Handle hierarchy tree visualization - SYNCHRONOUS."""
|
|
438
|
+
epic_id = params.get("epic_id")
|
|
439
|
+
max_depth = params.get("max_depth", DEFAULT_MAX_DEPTH)
|
|
440
|
+
|
|
441
|
+
if epic_id:
|
|
442
|
+
# Get specific epic tree
|
|
443
|
+
epic = await self.adapter.get_epic(epic_id)
|
|
444
|
+
if not epic:
|
|
445
|
+
return ResponseBuilder.status_result(
|
|
446
|
+
STATUS_ERROR, error=MSG_EPIC_NOT_FOUND.format(epic_id=epic_id)
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
# Build tree structure
|
|
450
|
+
tree: dict[str, Any] = {"epic": epic.model_dump(), "issues": []}
|
|
451
|
+
|
|
452
|
+
# Get issues in epic if depth allows (depth 1 = epic only, depth 2+ = issues)
|
|
453
|
+
if max_depth > 1:
|
|
454
|
+
issues = await self.adapter.list_issues_by_epic(epic_id)
|
|
455
|
+
for issue in issues:
|
|
456
|
+
issue_node: dict[str, Any] = {
|
|
457
|
+
"issue": issue.model_dump(),
|
|
458
|
+
"tasks": [],
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
# Get tasks in issue if depth allows (depth 3+ = tasks)
|
|
462
|
+
if max_depth > 2 and issue.id:
|
|
463
|
+
tasks = await self.adapter.list_tasks_by_issue(issue.id)
|
|
464
|
+
issue_node["tasks"] = [task.model_dump() for task in tasks]
|
|
465
|
+
|
|
466
|
+
tree["issues"].append(issue_node)
|
|
467
|
+
|
|
468
|
+
return ResponseBuilder.status_result(STATUS_COMPLETED, **tree)
|
|
469
|
+
else:
|
|
470
|
+
# Get all epics with their hierarchies
|
|
471
|
+
epics = await self.adapter.list_epics(
|
|
472
|
+
limit=params.get("limit", DEFAULT_LIMIT)
|
|
473
|
+
)
|
|
474
|
+
trees = []
|
|
475
|
+
|
|
476
|
+
for epic in epics:
|
|
477
|
+
tree = await self._handle_hierarchy_tree(
|
|
478
|
+
{"epic_id": epic.id, "max_depth": max_depth}
|
|
479
|
+
)
|
|
480
|
+
trees.append(tree)
|
|
481
|
+
|
|
482
|
+
return ResponseBuilder.status_result(STATUS_COMPLETED, trees=trees)
|
|
483
|
+
|
|
484
|
+
async def _handle_bulk_create(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
485
|
+
"""Handle bulk ticket creation - SYNCHRONOUS."""
|
|
486
|
+
tickets = params.get("tickets", [])
|
|
487
|
+
if not tickets:
|
|
488
|
+
return ResponseBuilder.status_result(
|
|
489
|
+
STATUS_ERROR, error=MSG_NO_TICKETS_PROVIDED
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
results = []
|
|
493
|
+
for i, ticket_data in enumerate(tickets):
|
|
494
|
+
if not ticket_data.get("title"):
|
|
495
|
+
return ResponseBuilder.status_result(
|
|
496
|
+
STATUS_ERROR, error=MSG_MISSING_TITLE.format(index=i)
|
|
497
|
+
)
|
|
498
|
+
|
|
499
|
+
try:
|
|
500
|
+
# Create ticket based on operation type
|
|
501
|
+
operation = ticket_data.get("operation", "create")
|
|
502
|
+
|
|
503
|
+
if operation == "create_epic":
|
|
504
|
+
result = await self._handle_epic_create(ticket_data)
|
|
505
|
+
elif operation == "create_issue":
|
|
506
|
+
result = await self._handle_issue_create(ticket_data)
|
|
507
|
+
elif operation == "create_task":
|
|
508
|
+
result = await self._handle_task_create(ticket_data)
|
|
509
|
+
else:
|
|
510
|
+
result = await self._handle_create(ticket_data)
|
|
511
|
+
|
|
512
|
+
results.append(result)
|
|
513
|
+
except Exception as e:
|
|
514
|
+
results.append(
|
|
515
|
+
ResponseBuilder.status_result(
|
|
516
|
+
STATUS_ERROR, error=str(e), ticket_index=i
|
|
517
|
+
)
|
|
518
|
+
)
|
|
519
|
+
|
|
520
|
+
return ResponseBuilder.status_result(
|
|
521
|
+
STATUS_COMPLETED, **ResponseBuilder.bulk_result(results)
|
|
522
|
+
)
|
|
523
|
+
|
|
524
|
+
async def _handle_bulk_update(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
525
|
+
"""Handle bulk ticket updates - SYNCHRONOUS."""
|
|
526
|
+
updates = params.get("updates", [])
|
|
527
|
+
if not updates:
|
|
528
|
+
return ResponseBuilder.status_result(
|
|
529
|
+
STATUS_ERROR, error=MSG_NO_UPDATES_PROVIDED
|
|
530
|
+
)
|
|
531
|
+
|
|
532
|
+
results = []
|
|
533
|
+
for i, update_data in enumerate(updates):
|
|
534
|
+
if not update_data.get("ticket_id"):
|
|
535
|
+
return ResponseBuilder.status_result(
|
|
536
|
+
STATUS_ERROR, error=MSG_MISSING_TICKET_ID.format(index=i)
|
|
537
|
+
)
|
|
538
|
+
|
|
539
|
+
try:
|
|
540
|
+
result = await self._handle_update(update_data)
|
|
541
|
+
results.append(result)
|
|
542
|
+
except Exception as e:
|
|
543
|
+
results.append(
|
|
544
|
+
ResponseBuilder.status_result(
|
|
545
|
+
STATUS_ERROR, error=str(e), ticket_id=update_data["ticket_id"]
|
|
546
|
+
)
|
|
547
|
+
)
|
|
548
|
+
|
|
549
|
+
return ResponseBuilder.status_result(
|
|
550
|
+
STATUS_COMPLETED, **ResponseBuilder.bulk_result(results)
|
|
551
|
+
)
|
|
552
|
+
|
|
553
|
+
async def _handle_search_hierarchy(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
554
|
+
"""Handle hierarchy-aware search - SYNCHRONOUS."""
|
|
555
|
+
query = params.get("query", "")
|
|
556
|
+
include_children = params.get("include_children", True)
|
|
557
|
+
include_parents = params.get("include_parents", True)
|
|
558
|
+
|
|
559
|
+
# Perform basic search
|
|
560
|
+
search_query = SearchQuery( # type: ignore[call-arg]
|
|
561
|
+
query=query,
|
|
562
|
+
state=TicketState(params["state"]) if params.get("state") else None,
|
|
563
|
+
priority=Priority(params["priority"]) if params.get("priority") else None,
|
|
564
|
+
limit=params.get("limit", 50),
|
|
565
|
+
)
|
|
566
|
+
|
|
567
|
+
tickets = await self.adapter.search(search_query)
|
|
568
|
+
|
|
569
|
+
# Enhance with hierarchy information
|
|
570
|
+
enhanced_results = []
|
|
571
|
+
for ticket in tickets:
|
|
572
|
+
result = {"ticket": ticket.model_dump(), "hierarchy": {}}
|
|
573
|
+
|
|
574
|
+
# Add parent information
|
|
575
|
+
if include_parents:
|
|
576
|
+
if hasattr(ticket, "parent_epic") and ticket.parent_epic:
|
|
577
|
+
parent_epic = await self.adapter.get_epic(ticket.parent_epic)
|
|
578
|
+
if parent_epic:
|
|
579
|
+
result["hierarchy"]["epic"] = parent_epic.model_dump()
|
|
580
|
+
|
|
581
|
+
if hasattr(ticket, "parent_issue") and ticket.parent_issue:
|
|
582
|
+
parent_issue = await self.adapter.read(ticket.parent_issue)
|
|
583
|
+
if parent_issue:
|
|
584
|
+
result["hierarchy"]["parent_issue"] = parent_issue.model_dump()
|
|
585
|
+
|
|
586
|
+
# Add children information
|
|
587
|
+
if include_children:
|
|
588
|
+
if ticket.ticket_type == "epic":
|
|
589
|
+
issues = await self.adapter.list_issues_by_epic(ticket.id)
|
|
590
|
+
result["hierarchy"]["issues"] = [
|
|
591
|
+
issue.model_dump() for issue in issues
|
|
592
|
+
]
|
|
593
|
+
elif ticket.ticket_type == "issue":
|
|
594
|
+
tasks = await self.adapter.list_tasks_by_issue(ticket.id)
|
|
595
|
+
result["hierarchy"]["tasks"] = [task.model_dump() for task in tasks]
|
|
596
|
+
|
|
597
|
+
enhanced_results.append(result)
|
|
598
|
+
|
|
599
|
+
return {
|
|
600
|
+
"status": "completed",
|
|
601
|
+
"results": enhanced_results,
|
|
602
|
+
"count": len(enhanced_results),
|
|
603
|
+
"query": query,
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
async def _handle_attach(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
607
|
+
"""Handle file attachment to ticket."""
|
|
608
|
+
# Note: This is a placeholder for attachment functionality
|
|
609
|
+
# Most adapters don't support file attachments directly
|
|
610
|
+
return {
|
|
611
|
+
"status": "not_implemented",
|
|
612
|
+
"error": "Attachment functionality not yet implemented",
|
|
613
|
+
"ticket_id": params.get("ticket_id"),
|
|
614
|
+
"details": {
|
|
615
|
+
"reason": "File attachments require adapter-specific implementation",
|
|
616
|
+
"alternatives": [
|
|
617
|
+
"Add file URLs in comments",
|
|
618
|
+
"Use external file storage",
|
|
619
|
+
],
|
|
620
|
+
},
|
|
621
|
+
}
|
|
622
|
+
|
|
623
|
+
async def _handle_list_attachments(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
624
|
+
"""Handle listing ticket attachments."""
|
|
625
|
+
# Note: This is a placeholder for attachment functionality
|
|
626
|
+
return {"status": "completed", "attachments": []}
|
|
627
|
+
|
|
628
|
+
async def _handle_create_pr(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
629
|
+
"""Handle PR creation for a ticket."""
|
|
630
|
+
ticket_id = params.get("ticket_id")
|
|
631
|
+
if not ticket_id:
|
|
632
|
+
raise ValueError("ticket_id is required")
|
|
633
|
+
|
|
634
|
+
# Check if adapter supports PR creation
|
|
635
|
+
adapter_name = self.adapter.__class__.__name__.lower()
|
|
636
|
+
|
|
637
|
+
if "github" in adapter_name:
|
|
638
|
+
# GitHub adapter supports direct PR creation
|
|
639
|
+
from ..adapters.github import GitHubAdapter
|
|
640
|
+
|
|
641
|
+
if isinstance(self.adapter, GitHubAdapter):
|
|
642
|
+
try:
|
|
643
|
+
result = await self.adapter.create_pull_request(
|
|
644
|
+
ticket_id=ticket_id,
|
|
645
|
+
base_branch=params.get("base_branch", "main"),
|
|
646
|
+
head_branch=params.get("head_branch"),
|
|
647
|
+
title=params.get("title"),
|
|
648
|
+
body=params.get("body"),
|
|
649
|
+
draft=params.get("draft", False),
|
|
650
|
+
)
|
|
651
|
+
return {
|
|
652
|
+
"success": True,
|
|
653
|
+
"pr_number": result.get("number"),
|
|
654
|
+
"pr_url": result.get("url"),
|
|
655
|
+
"branch": result.get("branch"),
|
|
656
|
+
"linked_issue": result.get("linked_issue"),
|
|
657
|
+
"message": f"Pull request created successfully: {result.get('url')}",
|
|
658
|
+
}
|
|
659
|
+
except Exception as e:
|
|
660
|
+
return {
|
|
661
|
+
"success": False,
|
|
662
|
+
"error": str(e),
|
|
663
|
+
"ticket_id": ticket_id,
|
|
664
|
+
}
|
|
665
|
+
# Fallback if not GitHub adapter instance
|
|
666
|
+
return {
|
|
667
|
+
"success": False,
|
|
668
|
+
"error": "GitHub adapter not properly initialized",
|
|
669
|
+
"ticket_id": ticket_id,
|
|
670
|
+
}
|
|
671
|
+
elif "linear" in adapter_name:
|
|
672
|
+
# Linear adapter needs GitHub config for PR creation
|
|
673
|
+
from ..adapters.linear import LinearAdapter
|
|
674
|
+
|
|
675
|
+
if isinstance(self.adapter, LinearAdapter):
|
|
676
|
+
# For Linear, we prepare the branch and metadata but can't create the actual PR
|
|
677
|
+
# without GitHub integration configured
|
|
678
|
+
try:
|
|
679
|
+
github_config = {
|
|
680
|
+
"owner": params.get("github_owner"),
|
|
681
|
+
"repo": params.get("github_repo"),
|
|
682
|
+
"base_branch": params.get("base_branch", "main"),
|
|
683
|
+
"head_branch": params.get("head_branch"),
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
# Validate GitHub config for Linear
|
|
687
|
+
if not github_config.get("owner") or not github_config.get("repo"):
|
|
688
|
+
return {
|
|
689
|
+
"success": False,
|
|
690
|
+
"error": "GitHub owner and repo are required for Linear PR creation",
|
|
691
|
+
"ticket_id": ticket_id,
|
|
692
|
+
}
|
|
693
|
+
|
|
694
|
+
result = await self.adapter.create_pull_request_for_issue(
|
|
695
|
+
ticket_id=ticket_id,
|
|
696
|
+
github_config=github_config,
|
|
697
|
+
)
|
|
698
|
+
return {
|
|
699
|
+
"success": True,
|
|
700
|
+
"branch_name": result.get("branch_name"),
|
|
701
|
+
"ticket_id": ticket_id,
|
|
702
|
+
"message": result.get("message"),
|
|
703
|
+
"github_config": {
|
|
704
|
+
"owner": result.get("github_owner"),
|
|
705
|
+
"repo": result.get("github_repo"),
|
|
706
|
+
"base_branch": result.get("base_branch"),
|
|
707
|
+
},
|
|
708
|
+
}
|
|
709
|
+
except Exception as e:
|
|
710
|
+
return {
|
|
711
|
+
"success": False,
|
|
712
|
+
"error": str(e),
|
|
713
|
+
"ticket_id": ticket_id,
|
|
714
|
+
}
|
|
715
|
+
# Fallback if not Linear adapter instance
|
|
716
|
+
return {
|
|
717
|
+
"success": False,
|
|
718
|
+
"error": "Linear adapter not properly initialized",
|
|
719
|
+
"ticket_id": ticket_id,
|
|
720
|
+
}
|
|
721
|
+
else:
|
|
722
|
+
return {
|
|
723
|
+
"success": False,
|
|
724
|
+
"error": f"PR creation not supported for adapter: {adapter_name}",
|
|
725
|
+
"ticket_id": ticket_id,
|
|
726
|
+
}
|
|
727
|
+
|
|
728
|
+
async def _handle_link_pr(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
729
|
+
"""Handle linking an existing PR to a ticket."""
|
|
730
|
+
ticket_id = params.get("ticket_id")
|
|
731
|
+
pr_url = params.get("pr_url")
|
|
732
|
+
|
|
733
|
+
if not ticket_id:
|
|
734
|
+
raise ValueError("ticket_id is required")
|
|
735
|
+
if not pr_url:
|
|
736
|
+
raise ValueError("pr_url is required")
|
|
737
|
+
|
|
738
|
+
adapter_name = self.adapter.__class__.__name__.lower()
|
|
739
|
+
|
|
740
|
+
if "github" in adapter_name:
|
|
741
|
+
from ..adapters.github import GitHubAdapter
|
|
742
|
+
|
|
743
|
+
if isinstance(self.adapter, GitHubAdapter):
|
|
744
|
+
try:
|
|
745
|
+
result: dict[str, Any] = (
|
|
746
|
+
await self.adapter.link_existing_pull_request(
|
|
747
|
+
ticket_id=ticket_id,
|
|
748
|
+
pr_url=pr_url,
|
|
749
|
+
)
|
|
750
|
+
)
|
|
751
|
+
return result
|
|
752
|
+
except Exception as e:
|
|
753
|
+
return {
|
|
754
|
+
"success": False,
|
|
755
|
+
"error": str(e),
|
|
756
|
+
"ticket_id": ticket_id,
|
|
757
|
+
"pr_url": pr_url,
|
|
758
|
+
}
|
|
759
|
+
# Fallback if not GitHub adapter instance
|
|
760
|
+
return {
|
|
761
|
+
"success": False,
|
|
762
|
+
"error": "GitHub adapter not properly initialized",
|
|
763
|
+
"ticket_id": ticket_id,
|
|
764
|
+
"pr_url": pr_url,
|
|
765
|
+
}
|
|
766
|
+
elif "linear" in adapter_name:
|
|
767
|
+
from ..adapters.linear import LinearAdapter
|
|
768
|
+
|
|
769
|
+
if isinstance(self.adapter, LinearAdapter):
|
|
770
|
+
try:
|
|
771
|
+
link_result: dict[str, Any] = (
|
|
772
|
+
await self.adapter.link_to_pull_request(
|
|
773
|
+
ticket_id=ticket_id,
|
|
774
|
+
pr_url=pr_url,
|
|
775
|
+
)
|
|
776
|
+
)
|
|
777
|
+
return link_result
|
|
778
|
+
except Exception as e:
|
|
779
|
+
return {
|
|
780
|
+
"success": False,
|
|
781
|
+
"error": str(e),
|
|
782
|
+
"ticket_id": ticket_id,
|
|
783
|
+
"pr_url": pr_url,
|
|
784
|
+
}
|
|
785
|
+
# Fallback if not Linear adapter instance
|
|
786
|
+
return {
|
|
787
|
+
"success": False,
|
|
788
|
+
"error": "Linear adapter not properly initialized",
|
|
789
|
+
"ticket_id": ticket_id,
|
|
790
|
+
"pr_url": pr_url,
|
|
791
|
+
}
|
|
792
|
+
else:
|
|
793
|
+
return {
|
|
794
|
+
"success": False,
|
|
795
|
+
"error": f"PR linking not supported for adapter: {adapter_name}",
|
|
796
|
+
"ticket_id": ticket_id,
|
|
797
|
+
"pr_url": pr_url,
|
|
798
|
+
}
|
|
799
|
+
|
|
800
|
+
async def _handle_initialize(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
801
|
+
"""Handle initialize request from MCP client.
|
|
802
|
+
|
|
803
|
+
Args:
|
|
804
|
+
----
|
|
805
|
+
params: Initialize parameters
|
|
806
|
+
|
|
807
|
+
Returns:
|
|
808
|
+
-------
|
|
809
|
+
Server capabilities
|
|
810
|
+
|
|
811
|
+
"""
|
|
812
|
+
return {
|
|
813
|
+
"protocolVersion": MCP_PROTOCOL_VERSION,
|
|
814
|
+
"serverInfo": {"name": SERVER_NAME, "version": SERVER_VERSION},
|
|
815
|
+
"capabilities": {"tools": {"listChanged": False}},
|
|
816
|
+
}
|
|
817
|
+
|
|
818
|
+
async def _handle_tools_list(self) -> dict[str, Any]:
|
|
819
|
+
"""List available MCP tools."""
|
|
820
|
+
return {
|
|
821
|
+
"tools": [
|
|
822
|
+
# Hierarchy Management Tools
|
|
823
|
+
{
|
|
824
|
+
"name": "epic_create",
|
|
825
|
+
"description": "Create a new epic (top-level project/milestone)",
|
|
826
|
+
"inputSchema": {
|
|
827
|
+
"type": "object",
|
|
828
|
+
"properties": {
|
|
829
|
+
"title": {"type": "string", "description": "Epic title"},
|
|
830
|
+
"description": {
|
|
831
|
+
"type": "string",
|
|
832
|
+
"description": "Epic description",
|
|
833
|
+
},
|
|
834
|
+
"target_date": {
|
|
835
|
+
"type": "string",
|
|
836
|
+
"description": "Target completion date (ISO format)",
|
|
837
|
+
},
|
|
838
|
+
"lead_id": {
|
|
839
|
+
"type": "string",
|
|
840
|
+
"description": "Epic lead/owner ID",
|
|
841
|
+
},
|
|
842
|
+
"child_issues": {
|
|
843
|
+
"type": "array",
|
|
844
|
+
"items": {"type": "string"},
|
|
845
|
+
"description": "Initial child issue IDs",
|
|
846
|
+
},
|
|
847
|
+
},
|
|
848
|
+
"required": ["title"],
|
|
849
|
+
},
|
|
850
|
+
},
|
|
851
|
+
{
|
|
852
|
+
"name": "epic_list",
|
|
853
|
+
"description": "List all epics",
|
|
854
|
+
"inputSchema": {
|
|
855
|
+
"type": "object",
|
|
856
|
+
"properties": {
|
|
857
|
+
"limit": {
|
|
858
|
+
"type": "integer",
|
|
859
|
+
"default": 10,
|
|
860
|
+
"description": "Maximum number of epics to return",
|
|
861
|
+
},
|
|
862
|
+
"offset": {
|
|
863
|
+
"type": "integer",
|
|
864
|
+
"default": 0,
|
|
865
|
+
"description": "Number of epics to skip",
|
|
866
|
+
},
|
|
867
|
+
},
|
|
868
|
+
},
|
|
869
|
+
},
|
|
870
|
+
# ... (rest of the tools list)
|
|
871
|
+
{
|
|
872
|
+
"name": "ticket_create",
|
|
873
|
+
"description": "Create a new ticket",
|
|
874
|
+
"inputSchema": {
|
|
875
|
+
"type": "object",
|
|
876
|
+
"properties": {
|
|
877
|
+
"title": {"type": "string", "description": "Ticket title"},
|
|
878
|
+
"description": {
|
|
879
|
+
"type": "string",
|
|
880
|
+
"description": "Description",
|
|
881
|
+
},
|
|
882
|
+
"priority": {
|
|
883
|
+
"type": "string",
|
|
884
|
+
"enum": ["low", "medium", "high", "critical"],
|
|
885
|
+
},
|
|
886
|
+
"tags": {"type": "array", "items": {"type": "string"}},
|
|
887
|
+
"assignee": {"type": "string"},
|
|
888
|
+
},
|
|
889
|
+
"required": ["title"],
|
|
890
|
+
},
|
|
891
|
+
},
|
|
892
|
+
{
|
|
893
|
+
"name": "ticket_comment",
|
|
894
|
+
"description": "Add or list comments on a ticket",
|
|
895
|
+
"inputSchema": {
|
|
896
|
+
"type": "object",
|
|
897
|
+
"properties": {
|
|
898
|
+
"operation": {
|
|
899
|
+
"type": "string",
|
|
900
|
+
"enum": ["add", "list"],
|
|
901
|
+
"description": "Operation to perform: 'add' to create a comment, 'list' to retrieve comments",
|
|
902
|
+
"default": "add",
|
|
903
|
+
},
|
|
904
|
+
"ticket_id": {
|
|
905
|
+
"type": "string",
|
|
906
|
+
"description": "Ticket ID to comment on",
|
|
907
|
+
},
|
|
908
|
+
"content": {
|
|
909
|
+
"type": "string",
|
|
910
|
+
"description": "Comment content (required for 'add' operation)",
|
|
911
|
+
},
|
|
912
|
+
"author": {
|
|
913
|
+
"type": "string",
|
|
914
|
+
"description": "Comment author (optional for 'add' operation)",
|
|
915
|
+
},
|
|
916
|
+
"limit": {
|
|
917
|
+
"type": "integer",
|
|
918
|
+
"default": 10,
|
|
919
|
+
"description": "Maximum number of comments to return (for 'list' operation)",
|
|
920
|
+
},
|
|
921
|
+
"offset": {
|
|
922
|
+
"type": "integer",
|
|
923
|
+
"default": 0,
|
|
924
|
+
"description": "Number of comments to skip (for 'list' operation)",
|
|
925
|
+
},
|
|
926
|
+
},
|
|
927
|
+
"required": ["ticket_id"],
|
|
928
|
+
},
|
|
929
|
+
},
|
|
930
|
+
]
|
|
931
|
+
}
|
|
932
|
+
|
|
933
|
+
async def _handle_tools_call(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
934
|
+
"""Handle tool invocation from MCP client.
|
|
935
|
+
|
|
936
|
+
Args:
|
|
937
|
+
----
|
|
938
|
+
params: Contains 'name' and 'arguments' fields
|
|
939
|
+
|
|
940
|
+
Returns:
|
|
941
|
+
-------
|
|
942
|
+
MCP formatted response with content array
|
|
943
|
+
|
|
944
|
+
"""
|
|
945
|
+
tool_name = params.get("name")
|
|
946
|
+
arguments = params.get("arguments", {})
|
|
947
|
+
|
|
948
|
+
try:
|
|
949
|
+
# Route to appropriate handler based on tool name
|
|
950
|
+
# Hierarchy management tools
|
|
951
|
+
if tool_name == "epic_create":
|
|
952
|
+
result = await self._handle_epic_create(arguments)
|
|
953
|
+
elif tool_name == "epic_list":
|
|
954
|
+
result = await self._handle_epic_list(arguments)
|
|
955
|
+
elif tool_name == "epic_issues":
|
|
956
|
+
result = await self._handle_epic_issues(arguments)
|
|
957
|
+
elif tool_name == "issue_create":
|
|
958
|
+
result = await self._handle_issue_create(arguments)
|
|
959
|
+
elif tool_name == "issue_tasks":
|
|
960
|
+
result = await self._handle_issue_tasks(arguments)
|
|
961
|
+
elif tool_name == "task_create":
|
|
962
|
+
result = await self._handle_task_create(arguments)
|
|
963
|
+
elif tool_name == "hierarchy_tree":
|
|
964
|
+
result = await self._handle_hierarchy_tree(arguments)
|
|
965
|
+
# Bulk operations
|
|
966
|
+
elif tool_name == "ticket_bulk_create":
|
|
967
|
+
result = await self._handle_bulk_create(arguments)
|
|
968
|
+
elif tool_name == "ticket_bulk_update":
|
|
969
|
+
result = await self._handle_bulk_update(arguments)
|
|
970
|
+
# Advanced search
|
|
971
|
+
elif tool_name == "ticket_search_hierarchy":
|
|
972
|
+
result = await self._handle_search_hierarchy(arguments)
|
|
973
|
+
# Standard ticket operations
|
|
974
|
+
elif tool_name == "ticket_create":
|
|
975
|
+
result = await self._handle_create(arguments)
|
|
976
|
+
elif tool_name == "ticket_list":
|
|
977
|
+
result = await self._handle_list(arguments)
|
|
978
|
+
elif tool_name == "ticket_update":
|
|
979
|
+
result = await self._handle_update(arguments)
|
|
980
|
+
elif tool_name == "ticket_transition":
|
|
981
|
+
result = await self._handle_transition(arguments)
|
|
982
|
+
elif tool_name == "ticket_search":
|
|
983
|
+
result = await self._handle_search(arguments)
|
|
984
|
+
elif tool_name == "ticket_comment":
|
|
985
|
+
result = await self._handle_comment(arguments)
|
|
986
|
+
# PR integration
|
|
987
|
+
elif tool_name == "ticket_create_pr":
|
|
988
|
+
result = await self._handle_create_pr(arguments)
|
|
989
|
+
elif tool_name == "ticket_link_pr":
|
|
990
|
+
result = await self._handle_link_pr(arguments)
|
|
991
|
+
else:
|
|
992
|
+
return {
|
|
993
|
+
"content": [{"type": "text", "text": f"Unknown tool: {tool_name}"}],
|
|
994
|
+
"isError": True,
|
|
995
|
+
}
|
|
996
|
+
|
|
997
|
+
# Format successful response in MCP content format
|
|
998
|
+
# Handle different response types
|
|
999
|
+
if isinstance(result, list):
|
|
1000
|
+
# For list operations, convert Pydantic models to dicts
|
|
1001
|
+
result_text = json.dumps(result, indent=2, default=str)
|
|
1002
|
+
elif isinstance(result, dict):
|
|
1003
|
+
# For dict responses (create, update, etc.)
|
|
1004
|
+
result_text = json.dumps(result, indent=2, default=str)
|
|
1005
|
+
else:
|
|
1006
|
+
result_text = str(result)
|
|
1007
|
+
|
|
1008
|
+
return {
|
|
1009
|
+
"content": [{"type": "text", "text": result_text}],
|
|
1010
|
+
"isError": False,
|
|
1011
|
+
}
|
|
1012
|
+
|
|
1013
|
+
except Exception as e:
|
|
1014
|
+
# Format error response
|
|
1015
|
+
return {
|
|
1016
|
+
"content": [
|
|
1017
|
+
{
|
|
1018
|
+
"type": "text",
|
|
1019
|
+
"text": f"Error calling tool {tool_name}: {str(e)}",
|
|
1020
|
+
}
|
|
1021
|
+
],
|
|
1022
|
+
"isError": True,
|
|
1023
|
+
}
|
|
1024
|
+
|
|
1025
|
+
async def run(self) -> None:
|
|
1026
|
+
"""Run the MCP server, reading from stdin and writing to stdout."""
|
|
1027
|
+
self.running = True
|
|
1028
|
+
|
|
1029
|
+
try:
|
|
1030
|
+
reader = asyncio.StreamReader()
|
|
1031
|
+
protocol = asyncio.StreamReaderProtocol(reader)
|
|
1032
|
+
await asyncio.get_event_loop().connect_read_pipe(
|
|
1033
|
+
lambda: protocol, sys.stdin
|
|
1034
|
+
)
|
|
1035
|
+
except Exception as e:
|
|
1036
|
+
sys.stderr.write(f"Failed to connect to stdin: {str(e)}\n")
|
|
1037
|
+
return
|
|
1038
|
+
|
|
1039
|
+
# Main message loop
|
|
1040
|
+
while self.running:
|
|
1041
|
+
try:
|
|
1042
|
+
line = await reader.readline()
|
|
1043
|
+
if not line:
|
|
1044
|
+
# EOF reached, exit gracefully
|
|
1045
|
+
sys.stderr.write("EOF reached, shutting down server\n")
|
|
1046
|
+
break
|
|
1047
|
+
|
|
1048
|
+
# Parse JSON-RPC request
|
|
1049
|
+
request = json.loads(line.decode())
|
|
1050
|
+
|
|
1051
|
+
# Handle request
|
|
1052
|
+
response = await self.handle_request(request)
|
|
1053
|
+
|
|
1054
|
+
# Send response
|
|
1055
|
+
sys.stdout.write(json.dumps(response) + "\n")
|
|
1056
|
+
sys.stdout.flush()
|
|
1057
|
+
|
|
1058
|
+
except json.JSONDecodeError as e:
|
|
1059
|
+
error_response = ResponseBuilder.error(
|
|
1060
|
+
None, ERROR_PARSE, f"Parse error: {str(e)}"
|
|
1061
|
+
)
|
|
1062
|
+
sys.stdout.write(json.dumps(error_response) + "\n")
|
|
1063
|
+
sys.stdout.flush()
|
|
1064
|
+
|
|
1065
|
+
except KeyboardInterrupt:
|
|
1066
|
+
sys.stderr.write("Received interrupt signal\n")
|
|
1067
|
+
break
|
|
1068
|
+
|
|
1069
|
+
except BrokenPipeError:
|
|
1070
|
+
sys.stderr.write("Connection closed by client\n")
|
|
1071
|
+
break
|
|
1072
|
+
|
|
1073
|
+
except Exception as e:
|
|
1074
|
+
# Log error but continue running
|
|
1075
|
+
sys.stderr.write(f"Error: {str(e)}\n")
|
|
1076
|
+
|
|
1077
|
+
async def stop(self) -> None:
|
|
1078
|
+
"""Stop the server."""
|
|
1079
|
+
self.running = False
|
|
1080
|
+
await self.adapter.close()
|
|
1081
|
+
|
|
1082
|
+
|
|
1083
|
+
async def main() -> None:
|
|
1084
|
+
"""Run main entry point for MCP server - kept for backward compatibility.
|
|
1085
|
+
|
|
1086
|
+
This function is maintained in case it's being called directly,
|
|
1087
|
+
but the preferred way is now through the CLI: `mcp-ticketer mcp`
|
|
1088
|
+
|
|
1089
|
+
SECURITY: This method ONLY reads from the current project directory
|
|
1090
|
+
to prevent configuration leakage across projects. It will NEVER read
|
|
1091
|
+
from user home directory or system-wide locations.
|
|
1092
|
+
"""
|
|
1093
|
+
# Load configuration
|
|
1094
|
+
import json
|
|
1095
|
+
import logging
|
|
1096
|
+
|
|
1097
|
+
logger = logging.getLogger(__name__)
|
|
1098
|
+
|
|
1099
|
+
# Load environment variables AFTER working directory has been set by __main__.py
|
|
1100
|
+
# This ensures we load .env files from the target project directory, not from where the command is executed
|
|
1101
|
+
env_local_file = Path.cwd() / ".env.local"
|
|
1102
|
+
if env_local_file.exists():
|
|
1103
|
+
load_dotenv(env_local_file, override=True)
|
|
1104
|
+
sys.stderr.write(f"[MCP Server] Loaded environment from: {env_local_file}\n")
|
|
1105
|
+
logger.debug(f"Loaded environment from: {env_local_file}")
|
|
1106
|
+
else:
|
|
1107
|
+
# Fall back to .env
|
|
1108
|
+
env_file = Path.cwd() / ".env"
|
|
1109
|
+
if env_file.exists():
|
|
1110
|
+
load_dotenv(env_file, override=True)
|
|
1111
|
+
sys.stderr.write(f"[MCP Server] Loaded environment from: {env_file}\n")
|
|
1112
|
+
logger.debug(f"Loaded environment from: {env_file}")
|
|
1113
|
+
else:
|
|
1114
|
+
# Try default dotenv loading (searches upward)
|
|
1115
|
+
load_dotenv(override=True)
|
|
1116
|
+
sys.stderr.write(
|
|
1117
|
+
"[MCP Server] Loaded environment from default search path\n"
|
|
1118
|
+
)
|
|
1119
|
+
logger.debug("Loaded environment from default search path")
|
|
1120
|
+
|
|
1121
|
+
# Initialize defaults
|
|
1122
|
+
adapter_type = "aitrackdown"
|
|
1123
|
+
adapter_config = {"base_path": DEFAULT_BASE_PATH}
|
|
1124
|
+
|
|
1125
|
+
# Priority 1: Check project-local config file (highest priority)
|
|
1126
|
+
config_file = Path.cwd() / ".mcp-ticketer" / "config.json"
|
|
1127
|
+
config_loaded = False
|
|
1128
|
+
|
|
1129
|
+
if config_file.exists():
|
|
1130
|
+
# Validate config is within project
|
|
1131
|
+
try:
|
|
1132
|
+
if not config_file.resolve().is_relative_to(Path.cwd().resolve()):
|
|
1133
|
+
logger.error(
|
|
1134
|
+
f"Security violation: Config file {config_file} "
|
|
1135
|
+
"is not within project directory"
|
|
1136
|
+
)
|
|
1137
|
+
raise ValueError(
|
|
1138
|
+
f"Security violation: Config file {config_file} "
|
|
1139
|
+
"is not within project directory"
|
|
1140
|
+
)
|
|
1141
|
+
except (ValueError, RuntimeError):
|
|
1142
|
+
# is_relative_to may raise ValueError in some cases
|
|
1143
|
+
pass
|
|
1144
|
+
|
|
1145
|
+
try:
|
|
1146
|
+
with open(config_file) as f:
|
|
1147
|
+
config = json.load(f)
|
|
1148
|
+
adapter_type = config.get("default_adapter", "aitrackdown")
|
|
1149
|
+
# Get adapter-specific config
|
|
1150
|
+
adapters_config = config.get("adapters", {})
|
|
1151
|
+
adapter_config = adapters_config.get(adapter_type, {})
|
|
1152
|
+
# Fallback to legacy config format
|
|
1153
|
+
if not adapter_config and "config" in config:
|
|
1154
|
+
adapter_config = config["config"]
|
|
1155
|
+
config_loaded = True
|
|
1156
|
+
logger.info(
|
|
1157
|
+
f"Loaded MCP configuration from project-local: {config_file}"
|
|
1158
|
+
)
|
|
1159
|
+
sys.stderr.write(
|
|
1160
|
+
f"[MCP Server] Using adapter from config: {adapter_type}\n"
|
|
1161
|
+
)
|
|
1162
|
+
except (OSError, json.JSONDecodeError) as e:
|
|
1163
|
+
logger.warning(f"Could not load project config: {e}, will try .env files")
|
|
1164
|
+
|
|
1165
|
+
# Priority 2: Check .env files (only if no config file found)
|
|
1166
|
+
if not config_loaded:
|
|
1167
|
+
env_config = _load_env_configuration()
|
|
1168
|
+
if env_config and env_config.get("adapter_type"):
|
|
1169
|
+
adapter_type = env_config["adapter_type"]
|
|
1170
|
+
adapter_config = env_config["adapter_config"]
|
|
1171
|
+
config_loaded = True
|
|
1172
|
+
logger.info(f"Using adapter from .env files: {adapter_type}")
|
|
1173
|
+
logger.info(
|
|
1174
|
+
f"Built adapter config from .env: {list(adapter_config.keys())}"
|
|
1175
|
+
)
|
|
1176
|
+
sys.stderr.write(f"[MCP Server] Using adapter from .env: {adapter_type}\n")
|
|
1177
|
+
|
|
1178
|
+
# Priority 3: Default to aitrackdown
|
|
1179
|
+
if not config_loaded:
|
|
1180
|
+
logger.info("No configuration found, defaulting to aitrackdown adapter")
|
|
1181
|
+
sys.stderr.write("[MCP Server] No config found, using default: aitrackdown\n")
|
|
1182
|
+
adapter_type = "aitrackdown"
|
|
1183
|
+
adapter_config = {"base_path": DEFAULT_BASE_PATH}
|
|
1184
|
+
|
|
1185
|
+
# Log final configuration for debugging
|
|
1186
|
+
logger.info(f"Starting MCP server with adapter: {adapter_type}")
|
|
1187
|
+
logger.debug(f"Adapter config keys: {list(adapter_config.keys())}")
|
|
1188
|
+
|
|
1189
|
+
# Create and run server
|
|
1190
|
+
server = MCPTicketServer(adapter_type, adapter_config)
|
|
1191
|
+
await server.run()
|
|
1192
|
+
|
|
1193
|
+
|
|
1194
|
+
def _load_env_configuration() -> dict[str, Any] | None:
|
|
1195
|
+
"""Load adapter configuration from environment variables and .env files.
|
|
1196
|
+
|
|
1197
|
+
Priority order (highest to lowest):
|
|
1198
|
+
1. os.environ (set by MCP clients like Claude Desktop)
|
|
1199
|
+
2. .env.local file (local overrides)
|
|
1200
|
+
3. .env file (default configuration)
|
|
1201
|
+
|
|
1202
|
+
Returns:
|
|
1203
|
+
-------
|
|
1204
|
+
Dictionary with 'adapter_type' and 'adapter_config' keys, or None if no config found
|
|
1205
|
+
|
|
1206
|
+
"""
|
|
1207
|
+
import os
|
|
1208
|
+
|
|
1209
|
+
env_vars = {}
|
|
1210
|
+
|
|
1211
|
+
# Priority 1: Check process environment variables (set by MCP client)
|
|
1212
|
+
# This allows Claude Desktop and other MCP clients to configure the adapter
|
|
1213
|
+
relevant_env_keys = [
|
|
1214
|
+
"MCP_TICKETER_ADAPTER",
|
|
1215
|
+
"LINEAR_API_KEY",
|
|
1216
|
+
"LINEAR_TEAM_ID",
|
|
1217
|
+
"LINEAR_TEAM_KEY",
|
|
1218
|
+
"LINEAR_API_URL",
|
|
1219
|
+
"JIRA_SERVER",
|
|
1220
|
+
"JIRA_EMAIL",
|
|
1221
|
+
"JIRA_API_TOKEN",
|
|
1222
|
+
"JIRA_PROJECT_KEY",
|
|
1223
|
+
"GITHUB_TOKEN",
|
|
1224
|
+
"GITHUB_OWNER",
|
|
1225
|
+
"GITHUB_REPO",
|
|
1226
|
+
"MCP_TICKETER_BASE_PATH",
|
|
1227
|
+
]
|
|
1228
|
+
|
|
1229
|
+
for key in relevant_env_keys:
|
|
1230
|
+
if os.environ.get(key):
|
|
1231
|
+
env_vars[key] = os.environ[key]
|
|
1232
|
+
|
|
1233
|
+
# Priority 2: Check .env files (only for keys not already set)
|
|
1234
|
+
# This allows .env files to provide fallback values
|
|
1235
|
+
env_files = [".env.local", ".env"]
|
|
1236
|
+
|
|
1237
|
+
for env_file in env_files:
|
|
1238
|
+
env_path = Path.cwd() / env_file
|
|
1239
|
+
if env_path.exists():
|
|
1240
|
+
try:
|
|
1241
|
+
# Parse .env file manually to avoid external dependencies
|
|
1242
|
+
with open(env_path) as f:
|
|
1243
|
+
for line in f:
|
|
1244
|
+
line = line.strip()
|
|
1245
|
+
if line and not line.startswith("#") and "=" in line:
|
|
1246
|
+
key, value = line.split("=", 1)
|
|
1247
|
+
key = key.strip()
|
|
1248
|
+
value = value.strip().strip('"').strip("'")
|
|
1249
|
+
|
|
1250
|
+
# Only set if not already in env_vars (os.environ takes priority)
|
|
1251
|
+
if key not in env_vars and value:
|
|
1252
|
+
env_vars[key] = value
|
|
1253
|
+
except Exception:
|
|
1254
|
+
continue
|
|
1255
|
+
|
|
1256
|
+
if not env_vars:
|
|
1257
|
+
return None
|
|
1258
|
+
|
|
1259
|
+
# Determine adapter type and build config
|
|
1260
|
+
adapter_type = env_vars.get("MCP_TICKETER_ADAPTER")
|
|
1261
|
+
if not adapter_type:
|
|
1262
|
+
# Auto-detect based on available keys
|
|
1263
|
+
if any(key.startswith("LINEAR_") for key in env_vars):
|
|
1264
|
+
adapter_type = "linear"
|
|
1265
|
+
elif any(key.startswith("GITHUB_") for key in env_vars):
|
|
1266
|
+
adapter_type = "github"
|
|
1267
|
+
elif any(key.startswith("JIRA_") for key in env_vars):
|
|
1268
|
+
adapter_type = "jira"
|
|
1269
|
+
else:
|
|
1270
|
+
return None
|
|
1271
|
+
|
|
1272
|
+
# Build adapter-specific configuration
|
|
1273
|
+
adapter_config = _build_adapter_config_from_env_vars(adapter_type, env_vars)
|
|
1274
|
+
|
|
1275
|
+
if not adapter_config:
|
|
1276
|
+
return None
|
|
1277
|
+
|
|
1278
|
+
return {"adapter_type": adapter_type, "adapter_config": adapter_config}
|
|
1279
|
+
|
|
1280
|
+
|
|
1281
|
+
def _build_adapter_config_from_env_vars(
|
|
1282
|
+
adapter_type: str, env_vars: dict[str, str]
|
|
1283
|
+
) -> dict[str, Any]:
|
|
1284
|
+
"""Build adapter configuration from parsed environment variables.
|
|
1285
|
+
|
|
1286
|
+
Args:
|
|
1287
|
+
----
|
|
1288
|
+
adapter_type: Type of adapter to configure
|
|
1289
|
+
env_vars: Dictionary of environment variables from .env files
|
|
1290
|
+
|
|
1291
|
+
Returns:
|
|
1292
|
+
-------
|
|
1293
|
+
Dictionary of adapter configuration
|
|
1294
|
+
|
|
1295
|
+
"""
|
|
1296
|
+
config: dict[str, Any] = {}
|
|
1297
|
+
|
|
1298
|
+
if adapter_type == "linear":
|
|
1299
|
+
# Linear adapter configuration
|
|
1300
|
+
if env_vars.get("LINEAR_API_KEY"):
|
|
1301
|
+
config["api_key"] = env_vars["LINEAR_API_KEY"]
|
|
1302
|
+
if env_vars.get("LINEAR_TEAM_ID"):
|
|
1303
|
+
config["team_id"] = env_vars["LINEAR_TEAM_ID"]
|
|
1304
|
+
if env_vars.get("LINEAR_TEAM_KEY"):
|
|
1305
|
+
config["team_key"] = env_vars["LINEAR_TEAM_KEY"]
|
|
1306
|
+
if env_vars.get("LINEAR_API_URL"):
|
|
1307
|
+
config["api_url"] = env_vars["LINEAR_API_URL"]
|
|
1308
|
+
|
|
1309
|
+
elif adapter_type == "github":
|
|
1310
|
+
# GitHub adapter configuration
|
|
1311
|
+
if env_vars.get("GITHUB_TOKEN"):
|
|
1312
|
+
config["token"] = env_vars["GITHUB_TOKEN"]
|
|
1313
|
+
if env_vars.get("GITHUB_OWNER"):
|
|
1314
|
+
config["owner"] = env_vars["GITHUB_OWNER"]
|
|
1315
|
+
if env_vars.get("GITHUB_REPO"):
|
|
1316
|
+
config["repo"] = env_vars["GITHUB_REPO"]
|
|
1317
|
+
|
|
1318
|
+
elif adapter_type == "jira":
|
|
1319
|
+
# JIRA adapter configuration
|
|
1320
|
+
if env_vars.get("JIRA_SERVER"):
|
|
1321
|
+
config["server"] = env_vars["JIRA_SERVER"]
|
|
1322
|
+
if env_vars.get("JIRA_EMAIL"):
|
|
1323
|
+
config["email"] = env_vars["JIRA_EMAIL"]
|
|
1324
|
+
if env_vars.get("JIRA_API_TOKEN"):
|
|
1325
|
+
config["api_token"] = env_vars["JIRA_API_TOKEN"]
|
|
1326
|
+
if env_vars.get("JIRA_PROJECT_KEY"):
|
|
1327
|
+
config["project_key"] = env_vars["JIRA_PROJECT_KEY"]
|
|
1328
|
+
|
|
1329
|
+
elif adapter_type == "aitrackdown":
|
|
1330
|
+
# AITrackdown adapter configuration
|
|
1331
|
+
base_path = env_vars.get("MCP_TICKETER_BASE_PATH", DEFAULT_BASE_PATH)
|
|
1332
|
+
config["base_path"] = base_path
|
|
1333
|
+
config["auto_create_dirs"] = True
|
|
1334
|
+
|
|
1335
|
+
# Add any generic overrides
|
|
1336
|
+
if env_vars.get("MCP_TICKETER_API_KEY"):
|
|
1337
|
+
config["api_key"] = env_vars["MCP_TICKETER_API_KEY"]
|
|
1338
|
+
|
|
1339
|
+
return config
|
|
1340
|
+
|
|
1341
|
+
|
|
1342
|
+
if __name__ == "__main__":
|
|
1343
|
+
asyncio.run(main())
|