mcp-ticketer 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__init__.py +27 -0
- mcp_ticketer/__version__.py +40 -0
- mcp_ticketer/adapters/__init__.py +8 -0
- mcp_ticketer/adapters/aitrackdown.py +396 -0
- mcp_ticketer/adapters/github.py +974 -0
- mcp_ticketer/adapters/jira.py +831 -0
- mcp_ticketer/adapters/linear.py +1355 -0
- mcp_ticketer/cache/__init__.py +5 -0
- mcp_ticketer/cache/memory.py +193 -0
- mcp_ticketer/cli/__init__.py +5 -0
- mcp_ticketer/cli/main.py +812 -0
- mcp_ticketer/cli/queue_commands.py +285 -0
- mcp_ticketer/cli/utils.py +523 -0
- mcp_ticketer/core/__init__.py +15 -0
- mcp_ticketer/core/adapter.py +211 -0
- mcp_ticketer/core/config.py +403 -0
- mcp_ticketer/core/http_client.py +430 -0
- mcp_ticketer/core/mappers.py +492 -0
- mcp_ticketer/core/models.py +111 -0
- mcp_ticketer/core/registry.py +128 -0
- mcp_ticketer/mcp/__init__.py +5 -0
- mcp_ticketer/mcp/server.py +459 -0
- mcp_ticketer/py.typed +0 -0
- mcp_ticketer/queue/__init__.py +7 -0
- mcp_ticketer/queue/__main__.py +6 -0
- mcp_ticketer/queue/manager.py +261 -0
- mcp_ticketer/queue/queue.py +357 -0
- mcp_ticketer/queue/run_worker.py +38 -0
- mcp_ticketer/queue/worker.py +425 -0
- mcp_ticketer-0.1.1.dist-info/METADATA +362 -0
- mcp_ticketer-0.1.1.dist-info/RECORD +35 -0
- mcp_ticketer-0.1.1.dist-info/WHEEL +5 -0
- mcp_ticketer-0.1.1.dist-info/entry_points.txt +3 -0
- mcp_ticketer-0.1.1.dist-info/licenses/LICENSE +21 -0
- mcp_ticketer-0.1.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,831 @@
|
|
|
1
|
+
"""JIRA adapter implementation using REST API v3."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import asyncio
|
|
5
|
+
from typing import List, Optional, Dict, Any, Union, Tuple
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from enum import Enum
|
|
8
|
+
import logging
|
|
9
|
+
|
|
10
|
+
import httpx
|
|
11
|
+
from httpx import AsyncClient, HTTPStatusError, TimeoutException
|
|
12
|
+
|
|
13
|
+
from ..core.adapter import BaseAdapter
|
|
14
|
+
from ..core.models import Epic, Task, Comment, SearchQuery, TicketState, Priority
|
|
15
|
+
from ..core.registry import AdapterRegistry
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class JiraIssueType(str, Enum):
|
|
21
|
+
"""Common JIRA issue types."""
|
|
22
|
+
EPIC = "Epic"
|
|
23
|
+
STORY = "Story"
|
|
24
|
+
TASK = "Task"
|
|
25
|
+
BUG = "Bug"
|
|
26
|
+
SUBTASK = "Sub-task"
|
|
27
|
+
IMPROVEMENT = "Improvement"
|
|
28
|
+
NEW_FEATURE = "New Feature"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class JiraPriority(str, Enum):
|
|
32
|
+
"""Standard JIRA priority levels."""
|
|
33
|
+
HIGHEST = "Highest"
|
|
34
|
+
HIGH = "High"
|
|
35
|
+
MEDIUM = "Medium"
|
|
36
|
+
LOW = "Low"
|
|
37
|
+
LOWEST = "Lowest"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
41
|
+
"""Adapter for JIRA using REST API v3."""
|
|
42
|
+
|
|
43
|
+
def __init__(self, config: Dict[str, Any]):
|
|
44
|
+
"""Initialize JIRA adapter.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
config: Configuration with:
|
|
48
|
+
- server: JIRA server URL (e.g., https://company.atlassian.net)
|
|
49
|
+
- email: User email for authentication
|
|
50
|
+
- api_token: API token for authentication
|
|
51
|
+
- project_key: Default project key
|
|
52
|
+
- cloud: Whether this is JIRA Cloud (default: True)
|
|
53
|
+
- verify_ssl: Whether to verify SSL certificates (default: True)
|
|
54
|
+
- timeout: Request timeout in seconds (default: 30)
|
|
55
|
+
- max_retries: Maximum retry attempts (default: 3)
|
|
56
|
+
"""
|
|
57
|
+
super().__init__(config)
|
|
58
|
+
|
|
59
|
+
# Configuration
|
|
60
|
+
self.server = config.get("server") or os.getenv("JIRA_SERVER", "")
|
|
61
|
+
self.email = config.get("email") or os.getenv("JIRA_EMAIL", "")
|
|
62
|
+
self.api_token = config.get("api_token") or os.getenv("JIRA_API_TOKEN", "")
|
|
63
|
+
self.project_key = config.get("project_key") or os.getenv("JIRA_PROJECT_KEY", "")
|
|
64
|
+
self.is_cloud = config.get("cloud", True)
|
|
65
|
+
self.verify_ssl = config.get("verify_ssl", True)
|
|
66
|
+
self.timeout = config.get("timeout", 30)
|
|
67
|
+
self.max_retries = config.get("max_retries", 3)
|
|
68
|
+
|
|
69
|
+
# Validate required fields
|
|
70
|
+
if not all([self.server, self.email, self.api_token]):
|
|
71
|
+
raise ValueError("JIRA adapter requires server, email, and api_token")
|
|
72
|
+
|
|
73
|
+
# Clean up server URL
|
|
74
|
+
self.server = self.server.rstrip("/")
|
|
75
|
+
|
|
76
|
+
# API base URL
|
|
77
|
+
self.api_base = f"{self.server}/rest/api/3" if self.is_cloud else f"{self.server}/rest/api/2"
|
|
78
|
+
|
|
79
|
+
# HTTP client setup
|
|
80
|
+
self.auth = httpx.BasicAuth(self.email, self.api_token)
|
|
81
|
+
self.headers = {
|
|
82
|
+
"Accept": "application/json",
|
|
83
|
+
"Content-Type": "application/json"
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
# Cache for workflow states and transitions
|
|
87
|
+
self._workflow_cache: Dict[str, Any] = {}
|
|
88
|
+
self._priority_cache: List[Dict[str, Any]] = []
|
|
89
|
+
self._issue_types_cache: Dict[str, Any] = {}
|
|
90
|
+
self._custom_fields_cache: Dict[str, Any] = {}
|
|
91
|
+
|
|
92
|
+
def _get_state_mapping(self) -> Dict[TicketState, str]:
|
|
93
|
+
"""Map universal states to common JIRA workflow states."""
|
|
94
|
+
return {
|
|
95
|
+
TicketState.OPEN: "To Do",
|
|
96
|
+
TicketState.IN_PROGRESS: "In Progress",
|
|
97
|
+
TicketState.READY: "In Review",
|
|
98
|
+
TicketState.TESTED: "Testing",
|
|
99
|
+
TicketState.DONE: "Done",
|
|
100
|
+
TicketState.WAITING: "Waiting",
|
|
101
|
+
TicketState.BLOCKED: "Blocked",
|
|
102
|
+
TicketState.CLOSED: "Closed",
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
async def _get_client(self) -> AsyncClient:
|
|
106
|
+
"""Get configured async HTTP client."""
|
|
107
|
+
return AsyncClient(
|
|
108
|
+
auth=self.auth,
|
|
109
|
+
headers=self.headers,
|
|
110
|
+
timeout=self.timeout,
|
|
111
|
+
verify=self.verify_ssl
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
async def _make_request(
|
|
115
|
+
self,
|
|
116
|
+
method: str,
|
|
117
|
+
endpoint: str,
|
|
118
|
+
data: Optional[Dict[str, Any]] = None,
|
|
119
|
+
params: Optional[Dict[str, Any]] = None,
|
|
120
|
+
retry_count: int = 0
|
|
121
|
+
) -> Dict[str, Any]:
|
|
122
|
+
"""Make HTTP request to JIRA API with retry logic.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
method: HTTP method
|
|
126
|
+
endpoint: API endpoint
|
|
127
|
+
data: Request body data
|
|
128
|
+
params: Query parameters
|
|
129
|
+
retry_count: Current retry attempt
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
Response data
|
|
133
|
+
|
|
134
|
+
Raises:
|
|
135
|
+
HTTPStatusError: On API errors
|
|
136
|
+
TimeoutException: On timeout
|
|
137
|
+
"""
|
|
138
|
+
url = f"{self.api_base}/{endpoint.lstrip('/')}"
|
|
139
|
+
|
|
140
|
+
async with await self._get_client() as client:
|
|
141
|
+
try:
|
|
142
|
+
response = await client.request(
|
|
143
|
+
method=method,
|
|
144
|
+
url=url,
|
|
145
|
+
json=data,
|
|
146
|
+
params=params
|
|
147
|
+
)
|
|
148
|
+
response.raise_for_status()
|
|
149
|
+
|
|
150
|
+
# Handle empty responses
|
|
151
|
+
if response.status_code == 204:
|
|
152
|
+
return {}
|
|
153
|
+
|
|
154
|
+
return response.json()
|
|
155
|
+
|
|
156
|
+
except TimeoutException as e:
|
|
157
|
+
if retry_count < self.max_retries:
|
|
158
|
+
await asyncio.sleep(2 ** retry_count) # Exponential backoff
|
|
159
|
+
return await self._make_request(
|
|
160
|
+
method, endpoint, data, params, retry_count + 1
|
|
161
|
+
)
|
|
162
|
+
raise e
|
|
163
|
+
|
|
164
|
+
except HTTPStatusError as e:
|
|
165
|
+
# Handle rate limiting
|
|
166
|
+
if e.response.status_code == 429 and retry_count < self.max_retries:
|
|
167
|
+
retry_after = int(e.response.headers.get("Retry-After", 5))
|
|
168
|
+
await asyncio.sleep(retry_after)
|
|
169
|
+
return await self._make_request(
|
|
170
|
+
method, endpoint, data, params, retry_count + 1
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
# Log error details
|
|
174
|
+
logger.error(f"JIRA API error: {e.response.status_code} - {e.response.text}")
|
|
175
|
+
raise e
|
|
176
|
+
|
|
177
|
+
async def _get_priorities(self) -> List[Dict[str, Any]]:
|
|
178
|
+
"""Get available priorities from JIRA."""
|
|
179
|
+
if not self._priority_cache:
|
|
180
|
+
self._priority_cache = await self._make_request("GET", "priority")
|
|
181
|
+
return self._priority_cache
|
|
182
|
+
|
|
183
|
+
async def _get_issue_types(self, project_key: Optional[str] = None) -> List[Dict[str, Any]]:
|
|
184
|
+
"""Get available issue types for a project."""
|
|
185
|
+
key = project_key or self.project_key
|
|
186
|
+
if key not in self._issue_types_cache:
|
|
187
|
+
data = await self._make_request("GET", f"project/{key}")
|
|
188
|
+
self._issue_types_cache[key] = data.get("issueTypes", [])
|
|
189
|
+
return self._issue_types_cache[key]
|
|
190
|
+
|
|
191
|
+
async def _get_transitions(self, issue_key: str) -> List[Dict[str, Any]]:
|
|
192
|
+
"""Get available transitions for an issue."""
|
|
193
|
+
data = await self._make_request("GET", f"issue/{issue_key}/transitions")
|
|
194
|
+
return data.get("transitions", [])
|
|
195
|
+
|
|
196
|
+
async def _get_custom_fields(self) -> Dict[str, str]:
|
|
197
|
+
"""Get custom field definitions."""
|
|
198
|
+
if not self._custom_fields_cache:
|
|
199
|
+
fields = await self._make_request("GET", "field")
|
|
200
|
+
self._custom_fields_cache = {
|
|
201
|
+
field["name"]: field["id"]
|
|
202
|
+
for field in fields
|
|
203
|
+
if field.get("custom", False)
|
|
204
|
+
}
|
|
205
|
+
return self._custom_fields_cache
|
|
206
|
+
|
|
207
|
+
def _convert_from_adf(self, adf_content: Any) -> str:
|
|
208
|
+
"""Convert Atlassian Document Format (ADF) to plain text.
|
|
209
|
+
|
|
210
|
+
This extracts text content from ADF structure for display.
|
|
211
|
+
"""
|
|
212
|
+
if not adf_content:
|
|
213
|
+
return ""
|
|
214
|
+
|
|
215
|
+
# If it's already a string, return it (JIRA Server)
|
|
216
|
+
if isinstance(adf_content, str):
|
|
217
|
+
return adf_content
|
|
218
|
+
|
|
219
|
+
# Handle ADF structure
|
|
220
|
+
if not isinstance(adf_content, dict):
|
|
221
|
+
return str(adf_content)
|
|
222
|
+
|
|
223
|
+
content_nodes = adf_content.get("content", [])
|
|
224
|
+
lines = []
|
|
225
|
+
|
|
226
|
+
for node in content_nodes:
|
|
227
|
+
if node.get("type") == "paragraph":
|
|
228
|
+
paragraph_text = ""
|
|
229
|
+
for content_item in node.get("content", []):
|
|
230
|
+
if content_item.get("type") == "text":
|
|
231
|
+
paragraph_text += content_item.get("text", "")
|
|
232
|
+
lines.append(paragraph_text)
|
|
233
|
+
elif node.get("type") == "heading":
|
|
234
|
+
heading_text = ""
|
|
235
|
+
for content_item in node.get("content", []):
|
|
236
|
+
if content_item.get("type") == "text":
|
|
237
|
+
heading_text += content_item.get("text", "")
|
|
238
|
+
lines.append(heading_text)
|
|
239
|
+
|
|
240
|
+
return "\n".join(lines)
|
|
241
|
+
|
|
242
|
+
def _convert_to_adf(self, text: str) -> Dict[str, Any]:
|
|
243
|
+
"""Convert plain text to Atlassian Document Format (ADF).
|
|
244
|
+
|
|
245
|
+
ADF is required for JIRA Cloud description fields.
|
|
246
|
+
This creates a simple document with paragraphs for each line.
|
|
247
|
+
"""
|
|
248
|
+
if not text:
|
|
249
|
+
return {
|
|
250
|
+
"type": "doc",
|
|
251
|
+
"version": 1,
|
|
252
|
+
"content": []
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
# Split text into lines and create paragraphs
|
|
256
|
+
lines = text.split('\n')
|
|
257
|
+
content = []
|
|
258
|
+
|
|
259
|
+
for line in lines:
|
|
260
|
+
if line.strip(): # Non-empty line
|
|
261
|
+
content.append({
|
|
262
|
+
"type": "paragraph",
|
|
263
|
+
"content": [
|
|
264
|
+
{
|
|
265
|
+
"type": "text",
|
|
266
|
+
"text": line
|
|
267
|
+
}
|
|
268
|
+
]
|
|
269
|
+
})
|
|
270
|
+
else: # Empty line becomes empty paragraph
|
|
271
|
+
content.append({
|
|
272
|
+
"type": "paragraph",
|
|
273
|
+
"content": []
|
|
274
|
+
})
|
|
275
|
+
|
|
276
|
+
return {
|
|
277
|
+
"type": "doc",
|
|
278
|
+
"version": 1,
|
|
279
|
+
"content": content
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
def _map_priority_to_jira(self, priority: Priority) -> str:
|
|
283
|
+
"""Map universal priority to JIRA priority."""
|
|
284
|
+
mapping = {
|
|
285
|
+
Priority.CRITICAL: JiraPriority.HIGHEST,
|
|
286
|
+
Priority.HIGH: JiraPriority.HIGH,
|
|
287
|
+
Priority.MEDIUM: JiraPriority.MEDIUM,
|
|
288
|
+
Priority.LOW: JiraPriority.LOW,
|
|
289
|
+
}
|
|
290
|
+
return mapping.get(priority, JiraPriority.MEDIUM)
|
|
291
|
+
|
|
292
|
+
def _map_priority_from_jira(self, jira_priority: Optional[Dict[str, Any]]) -> Priority:
|
|
293
|
+
"""Map JIRA priority to universal priority."""
|
|
294
|
+
if not jira_priority:
|
|
295
|
+
return Priority.MEDIUM
|
|
296
|
+
|
|
297
|
+
name = jira_priority.get("name", "").lower()
|
|
298
|
+
|
|
299
|
+
if "highest" in name or "urgent" in name or "critical" in name:
|
|
300
|
+
return Priority.CRITICAL
|
|
301
|
+
elif "high" in name:
|
|
302
|
+
return Priority.HIGH
|
|
303
|
+
elif "low" in name:
|
|
304
|
+
return Priority.LOW
|
|
305
|
+
else:
|
|
306
|
+
return Priority.MEDIUM
|
|
307
|
+
|
|
308
|
+
def _map_state_from_jira(self, status: Dict[str, Any]) -> TicketState:
|
|
309
|
+
"""Map JIRA status to universal state."""
|
|
310
|
+
if not status:
|
|
311
|
+
return TicketState.OPEN
|
|
312
|
+
|
|
313
|
+
name = status.get("name", "").lower()
|
|
314
|
+
category = status.get("statusCategory", {}).get("key", "").lower()
|
|
315
|
+
|
|
316
|
+
# Try to match by category first (more reliable)
|
|
317
|
+
if category == "new":
|
|
318
|
+
return TicketState.OPEN
|
|
319
|
+
elif category == "indeterminate":
|
|
320
|
+
return TicketState.IN_PROGRESS
|
|
321
|
+
elif category == "done":
|
|
322
|
+
return TicketState.DONE
|
|
323
|
+
|
|
324
|
+
# Fall back to name matching
|
|
325
|
+
if "block" in name:
|
|
326
|
+
return TicketState.BLOCKED
|
|
327
|
+
elif "wait" in name:
|
|
328
|
+
return TicketState.WAITING
|
|
329
|
+
elif "progress" in name or "doing" in name:
|
|
330
|
+
return TicketState.IN_PROGRESS
|
|
331
|
+
elif "review" in name:
|
|
332
|
+
return TicketState.READY
|
|
333
|
+
elif "test" in name:
|
|
334
|
+
return TicketState.TESTED
|
|
335
|
+
elif "done" in name or "resolved" in name:
|
|
336
|
+
return TicketState.DONE
|
|
337
|
+
elif "closed" in name:
|
|
338
|
+
return TicketState.CLOSED
|
|
339
|
+
else:
|
|
340
|
+
return TicketState.OPEN
|
|
341
|
+
|
|
342
|
+
def _issue_to_ticket(self, issue: Dict[str, Any]) -> Union[Epic, Task]:
|
|
343
|
+
"""Convert JIRA issue to universal ticket model."""
|
|
344
|
+
fields = issue.get("fields", {})
|
|
345
|
+
|
|
346
|
+
# Determine ticket type
|
|
347
|
+
issue_type = fields.get("issuetype", {}).get("name", "").lower()
|
|
348
|
+
is_epic = "epic" in issue_type
|
|
349
|
+
|
|
350
|
+
# Extract common fields
|
|
351
|
+
# Convert ADF description back to plain text if needed
|
|
352
|
+
description = self._convert_from_adf(fields.get("description", ""))
|
|
353
|
+
|
|
354
|
+
base_data = {
|
|
355
|
+
"id": issue.get("key"),
|
|
356
|
+
"title": fields.get("summary", ""),
|
|
357
|
+
"description": description,
|
|
358
|
+
"state": self._map_state_from_jira(fields.get("status", {})),
|
|
359
|
+
"priority": self._map_priority_from_jira(fields.get("priority")),
|
|
360
|
+
"tags": [
|
|
361
|
+
label.get("name", "") if isinstance(label, dict) else str(label)
|
|
362
|
+
for label in fields.get("labels", [])
|
|
363
|
+
],
|
|
364
|
+
"created_at": datetime.fromisoformat(
|
|
365
|
+
fields.get("created", "").replace("Z", "+00:00")
|
|
366
|
+
) if fields.get("created") else None,
|
|
367
|
+
"updated_at": datetime.fromisoformat(
|
|
368
|
+
fields.get("updated", "").replace("Z", "+00:00")
|
|
369
|
+
) if fields.get("updated") else None,
|
|
370
|
+
"metadata": {
|
|
371
|
+
"jira": {
|
|
372
|
+
"id": issue.get("id"),
|
|
373
|
+
"key": issue.get("key"),
|
|
374
|
+
"self": issue.get("self"),
|
|
375
|
+
"url": f"{self.server}/browse/{issue.get('key')}",
|
|
376
|
+
"issue_type": fields.get("issuetype", {}),
|
|
377
|
+
"project": fields.get("project", {}),
|
|
378
|
+
"components": fields.get("components", []),
|
|
379
|
+
"fix_versions": fields.get("fixVersions", []),
|
|
380
|
+
"resolution": fields.get("resolution"),
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
if is_epic:
|
|
386
|
+
# Create Epic
|
|
387
|
+
return Epic(
|
|
388
|
+
**base_data,
|
|
389
|
+
child_issues=[
|
|
390
|
+
subtask.get("key")
|
|
391
|
+
for subtask in fields.get("subtasks", [])
|
|
392
|
+
]
|
|
393
|
+
)
|
|
394
|
+
else:
|
|
395
|
+
# Create Task
|
|
396
|
+
parent = fields.get("parent", {})
|
|
397
|
+
epic_link = fields.get("customfield_10014") # Common epic link field
|
|
398
|
+
|
|
399
|
+
return Task(
|
|
400
|
+
**base_data,
|
|
401
|
+
parent_issue=parent.get("key") if parent else None,
|
|
402
|
+
parent_epic=epic_link if epic_link else None,
|
|
403
|
+
assignee=fields.get("assignee", {}).get("displayName")
|
|
404
|
+
if fields.get("assignee") else None,
|
|
405
|
+
estimated_hours=fields.get("timetracking", {}).get(
|
|
406
|
+
"originalEstimateSeconds", 0
|
|
407
|
+
) / 3600 if fields.get("timetracking") else None,
|
|
408
|
+
actual_hours=fields.get("timetracking", {}).get(
|
|
409
|
+
"timeSpentSeconds", 0
|
|
410
|
+
) / 3600 if fields.get("timetracking") else None,
|
|
411
|
+
)
|
|
412
|
+
|
|
413
|
+
def _ticket_to_issue_fields(
|
|
414
|
+
self,
|
|
415
|
+
ticket: Union[Epic, Task],
|
|
416
|
+
issue_type: Optional[str] = None
|
|
417
|
+
) -> Dict[str, Any]:
|
|
418
|
+
"""Convert universal ticket to JIRA issue fields."""
|
|
419
|
+
# Convert description to ADF format for JIRA Cloud
|
|
420
|
+
description = self._convert_to_adf(ticket.description or "") if self.is_cloud else (ticket.description or "")
|
|
421
|
+
|
|
422
|
+
fields = {
|
|
423
|
+
"summary": ticket.title,
|
|
424
|
+
"description": description,
|
|
425
|
+
"labels": ticket.tags,
|
|
426
|
+
"priority": {"name": self._map_priority_to_jira(ticket.priority)},
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
# Add project if creating new issue
|
|
430
|
+
if not ticket.id and self.project_key:
|
|
431
|
+
fields["project"] = {"key": self.project_key}
|
|
432
|
+
|
|
433
|
+
# Set issue type
|
|
434
|
+
if issue_type:
|
|
435
|
+
fields["issuetype"] = {"name": issue_type}
|
|
436
|
+
elif isinstance(ticket, Epic):
|
|
437
|
+
fields["issuetype"] = {"name": JiraIssueType.EPIC}
|
|
438
|
+
else:
|
|
439
|
+
fields["issuetype"] = {"name": JiraIssueType.TASK}
|
|
440
|
+
|
|
441
|
+
# Add task-specific fields
|
|
442
|
+
if isinstance(ticket, Task):
|
|
443
|
+
if ticket.assignee:
|
|
444
|
+
# Note: Need to resolve user account ID
|
|
445
|
+
fields["assignee"] = {"accountId": ticket.assignee}
|
|
446
|
+
|
|
447
|
+
if ticket.parent_issue:
|
|
448
|
+
fields["parent"] = {"key": ticket.parent_issue}
|
|
449
|
+
|
|
450
|
+
# Time tracking
|
|
451
|
+
if ticket.estimated_hours:
|
|
452
|
+
fields["timetracking"] = {
|
|
453
|
+
"originalEstimate": f"{int(ticket.estimated_hours)}h"
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
return fields
|
|
457
|
+
|
|
458
|
+
async def create(self, ticket: Union[Epic, Task]) -> Union[Epic, Task]:
|
|
459
|
+
"""Create a new JIRA issue."""
|
|
460
|
+
# Prepare issue fields
|
|
461
|
+
fields = self._ticket_to_issue_fields(ticket)
|
|
462
|
+
|
|
463
|
+
# Create issue
|
|
464
|
+
data = await self._make_request(
|
|
465
|
+
"POST",
|
|
466
|
+
"issue",
|
|
467
|
+
data={"fields": fields}
|
|
468
|
+
)
|
|
469
|
+
|
|
470
|
+
# Set the ID and fetch full issue data
|
|
471
|
+
ticket.id = data.get("key")
|
|
472
|
+
|
|
473
|
+
# Fetch complete issue data
|
|
474
|
+
created_issue = await self._make_request("GET", f"issue/{ticket.id}")
|
|
475
|
+
return self._issue_to_ticket(created_issue)
|
|
476
|
+
|
|
477
|
+
async def read(self, ticket_id: str) -> Optional[Union[Epic, Task]]:
|
|
478
|
+
"""Read a JIRA issue by key."""
|
|
479
|
+
try:
|
|
480
|
+
issue = await self._make_request(
|
|
481
|
+
"GET",
|
|
482
|
+
f"issue/{ticket_id}",
|
|
483
|
+
params={"expand": "renderedFields"}
|
|
484
|
+
)
|
|
485
|
+
return self._issue_to_ticket(issue)
|
|
486
|
+
except HTTPStatusError as e:
|
|
487
|
+
if e.response.status_code == 404:
|
|
488
|
+
return None
|
|
489
|
+
raise
|
|
490
|
+
|
|
491
|
+
async def update(
|
|
492
|
+
self,
|
|
493
|
+
ticket_id: str,
|
|
494
|
+
updates: Dict[str, Any]
|
|
495
|
+
) -> Optional[Union[Epic, Task]]:
|
|
496
|
+
"""Update a JIRA issue."""
|
|
497
|
+
# Read current issue
|
|
498
|
+
current = await self.read(ticket_id)
|
|
499
|
+
if not current:
|
|
500
|
+
return None
|
|
501
|
+
|
|
502
|
+
# Prepare update fields
|
|
503
|
+
fields = {}
|
|
504
|
+
|
|
505
|
+
if "title" in updates:
|
|
506
|
+
fields["summary"] = updates["title"]
|
|
507
|
+
if "description" in updates:
|
|
508
|
+
fields["description"] = updates["description"]
|
|
509
|
+
if "priority" in updates:
|
|
510
|
+
fields["priority"] = {"name": self._map_priority_to_jira(updates["priority"])}
|
|
511
|
+
if "tags" in updates:
|
|
512
|
+
fields["labels"] = updates["tags"]
|
|
513
|
+
if "assignee" in updates:
|
|
514
|
+
fields["assignee"] = {"accountId": updates["assignee"]}
|
|
515
|
+
|
|
516
|
+
# Apply update
|
|
517
|
+
if fields:
|
|
518
|
+
await self._make_request(
|
|
519
|
+
"PUT",
|
|
520
|
+
f"issue/{ticket_id}",
|
|
521
|
+
data={"fields": fields}
|
|
522
|
+
)
|
|
523
|
+
|
|
524
|
+
# Handle state transitions separately
|
|
525
|
+
if "state" in updates:
|
|
526
|
+
await self.transition_state(ticket_id, updates["state"])
|
|
527
|
+
|
|
528
|
+
# Return updated issue
|
|
529
|
+
return await self.read(ticket_id)
|
|
530
|
+
|
|
531
|
+
async def delete(self, ticket_id: str) -> bool:
|
|
532
|
+
"""Delete a JIRA issue."""
|
|
533
|
+
try:
|
|
534
|
+
await self._make_request("DELETE", f"issue/{ticket_id}")
|
|
535
|
+
return True
|
|
536
|
+
except HTTPStatusError as e:
|
|
537
|
+
if e.response.status_code == 404:
|
|
538
|
+
return False
|
|
539
|
+
raise
|
|
540
|
+
|
|
541
|
+
async def list(
|
|
542
|
+
self,
|
|
543
|
+
limit: int = 10,
|
|
544
|
+
offset: int = 0,
|
|
545
|
+
filters: Optional[Dict[str, Any]] = None
|
|
546
|
+
) -> List[Union[Epic, Task]]:
|
|
547
|
+
"""List JIRA issues with pagination."""
|
|
548
|
+
# Build JQL query
|
|
549
|
+
jql_parts = []
|
|
550
|
+
|
|
551
|
+
if self.project_key:
|
|
552
|
+
jql_parts.append(f"project = {self.project_key}")
|
|
553
|
+
|
|
554
|
+
if filters:
|
|
555
|
+
if "state" in filters:
|
|
556
|
+
status = self.map_state_to_system(filters["state"])
|
|
557
|
+
jql_parts.append(f'status = "{status}"')
|
|
558
|
+
if "priority" in filters:
|
|
559
|
+
priority = self._map_priority_to_jira(filters["priority"])
|
|
560
|
+
jql_parts.append(f'priority = "{priority}"')
|
|
561
|
+
if "assignee" in filters:
|
|
562
|
+
jql_parts.append(f'assignee = "{filters["assignee"]}"')
|
|
563
|
+
if "ticket_type" in filters:
|
|
564
|
+
jql_parts.append(f'issuetype = "{filters["ticket_type"]}"')
|
|
565
|
+
|
|
566
|
+
jql = " AND ".join(jql_parts) if jql_parts else "ORDER BY created DESC"
|
|
567
|
+
|
|
568
|
+
# Search issues using the new API endpoint
|
|
569
|
+
data = await self._make_request(
|
|
570
|
+
"POST",
|
|
571
|
+
"search/jql", # Updated to use new API endpoint
|
|
572
|
+
data={
|
|
573
|
+
"jql": jql,
|
|
574
|
+
"startAt": offset,
|
|
575
|
+
"maxResults": limit,
|
|
576
|
+
"fields": ["*all"],
|
|
577
|
+
"expand": ["renderedFields"]
|
|
578
|
+
}
|
|
579
|
+
)
|
|
580
|
+
|
|
581
|
+
# Convert issues
|
|
582
|
+
issues = data.get("issues", [])
|
|
583
|
+
return [self._issue_to_ticket(issue) for issue in issues]
|
|
584
|
+
|
|
585
|
+
async def search(self, query: SearchQuery) -> List[Union[Epic, Task]]:
|
|
586
|
+
"""Search JIRA issues using JQL."""
|
|
587
|
+
# Build JQL query
|
|
588
|
+
jql_parts = []
|
|
589
|
+
|
|
590
|
+
if self.project_key:
|
|
591
|
+
jql_parts.append(f"project = {self.project_key}")
|
|
592
|
+
|
|
593
|
+
# Text search
|
|
594
|
+
if query.query:
|
|
595
|
+
jql_parts.append(f'text ~ "{query.query}"')
|
|
596
|
+
|
|
597
|
+
# State filter
|
|
598
|
+
if query.state:
|
|
599
|
+
status = self.map_state_to_system(query.state)
|
|
600
|
+
jql_parts.append(f'status = "{status}"')
|
|
601
|
+
|
|
602
|
+
# Priority filter
|
|
603
|
+
if query.priority:
|
|
604
|
+
priority = self._map_priority_to_jira(query.priority)
|
|
605
|
+
jql_parts.append(f'priority = "{priority}"')
|
|
606
|
+
|
|
607
|
+
# Assignee filter
|
|
608
|
+
if query.assignee:
|
|
609
|
+
jql_parts.append(f'assignee = "{query.assignee}"')
|
|
610
|
+
|
|
611
|
+
# Tags/labels filter
|
|
612
|
+
if query.tags:
|
|
613
|
+
label_conditions = [f'labels = "{tag}"' for tag in query.tags]
|
|
614
|
+
jql_parts.append(f"({' OR '.join(label_conditions)})")
|
|
615
|
+
|
|
616
|
+
jql = " AND ".join(jql_parts) if jql_parts else "ORDER BY created DESC"
|
|
617
|
+
|
|
618
|
+
# Execute search using the new API endpoint
|
|
619
|
+
data = await self._make_request(
|
|
620
|
+
"POST",
|
|
621
|
+
"search/jql", # Updated to use new API endpoint
|
|
622
|
+
data={
|
|
623
|
+
"jql": jql,
|
|
624
|
+
"startAt": query.offset,
|
|
625
|
+
"maxResults": query.limit,
|
|
626
|
+
"fields": ["*all"],
|
|
627
|
+
"expand": ["renderedFields"]
|
|
628
|
+
}
|
|
629
|
+
)
|
|
630
|
+
|
|
631
|
+
# Convert and return results
|
|
632
|
+
issues = data.get("issues", [])
|
|
633
|
+
return [self._issue_to_ticket(issue) for issue in issues]
|
|
634
|
+
|
|
635
|
+
async def transition_state(
|
|
636
|
+
self,
|
|
637
|
+
ticket_id: str,
|
|
638
|
+
target_state: TicketState
|
|
639
|
+
) -> Optional[Union[Epic, Task]]:
|
|
640
|
+
"""Transition JIRA issue to a new state."""
|
|
641
|
+
# Get available transitions
|
|
642
|
+
transitions = await self._get_transitions(ticket_id)
|
|
643
|
+
|
|
644
|
+
# Find matching transition
|
|
645
|
+
target_name = self.map_state_to_system(target_state).lower()
|
|
646
|
+
transition = None
|
|
647
|
+
|
|
648
|
+
for trans in transitions:
|
|
649
|
+
trans_name = trans.get("to", {}).get("name", "").lower()
|
|
650
|
+
if target_name in trans_name or trans_name in target_name:
|
|
651
|
+
transition = trans
|
|
652
|
+
break
|
|
653
|
+
|
|
654
|
+
if not transition:
|
|
655
|
+
# Try to find by status category
|
|
656
|
+
for trans in transitions:
|
|
657
|
+
category = trans.get("to", {}).get("statusCategory", {}).get("key", "").lower()
|
|
658
|
+
if (target_state == TicketState.DONE and category == "done") or \
|
|
659
|
+
(target_state == TicketState.IN_PROGRESS and category == "indeterminate") or \
|
|
660
|
+
(target_state == TicketState.OPEN and category == "new"):
|
|
661
|
+
transition = trans
|
|
662
|
+
break
|
|
663
|
+
|
|
664
|
+
if not transition:
|
|
665
|
+
logger.warning(
|
|
666
|
+
f"No transition found to move {ticket_id} to {target_state}. "
|
|
667
|
+
f"Available transitions: {[t.get('name') for t in transitions]}"
|
|
668
|
+
)
|
|
669
|
+
return None
|
|
670
|
+
|
|
671
|
+
# Execute transition
|
|
672
|
+
await self._make_request(
|
|
673
|
+
"POST",
|
|
674
|
+
f"issue/{ticket_id}/transitions",
|
|
675
|
+
data={"transition": {"id": transition["id"]}}
|
|
676
|
+
)
|
|
677
|
+
|
|
678
|
+
# Return updated issue
|
|
679
|
+
return await self.read(ticket_id)
|
|
680
|
+
|
|
681
|
+
async def add_comment(self, comment: Comment) -> Comment:
|
|
682
|
+
"""Add a comment to a JIRA issue."""
|
|
683
|
+
# Prepare comment data
|
|
684
|
+
data = {
|
|
685
|
+
"body": comment.content
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
# Add comment
|
|
689
|
+
result = await self._make_request(
|
|
690
|
+
"POST",
|
|
691
|
+
f"issue/{comment.ticket_id}/comment",
|
|
692
|
+
data=data
|
|
693
|
+
)
|
|
694
|
+
|
|
695
|
+
# Update comment with JIRA data
|
|
696
|
+
comment.id = result.get("id")
|
|
697
|
+
comment.created_at = datetime.fromisoformat(
|
|
698
|
+
result.get("created", "").replace("Z", "+00:00")
|
|
699
|
+
) if result.get("created") else datetime.now()
|
|
700
|
+
comment.author = result.get("author", {}).get("displayName", comment.author)
|
|
701
|
+
comment.metadata["jira"] = result
|
|
702
|
+
|
|
703
|
+
return comment
|
|
704
|
+
|
|
705
|
+
async def get_comments(
|
|
706
|
+
self,
|
|
707
|
+
ticket_id: str,
|
|
708
|
+
limit: int = 10,
|
|
709
|
+
offset: int = 0
|
|
710
|
+
) -> List[Comment]:
|
|
711
|
+
"""Get comments for a JIRA issue."""
|
|
712
|
+
# Fetch issue with comments
|
|
713
|
+
params = {
|
|
714
|
+
"expand": "comments",
|
|
715
|
+
"fields": "comment"
|
|
716
|
+
}
|
|
717
|
+
|
|
718
|
+
issue = await self._make_request(
|
|
719
|
+
"GET",
|
|
720
|
+
f"issue/{ticket_id}",
|
|
721
|
+
params=params
|
|
722
|
+
)
|
|
723
|
+
|
|
724
|
+
# Extract comments
|
|
725
|
+
comments_data = issue.get("fields", {}).get("comment", {}).get("comments", [])
|
|
726
|
+
|
|
727
|
+
# Apply pagination
|
|
728
|
+
paginated = comments_data[offset:offset + limit]
|
|
729
|
+
|
|
730
|
+
# Convert to Comment objects
|
|
731
|
+
comments = []
|
|
732
|
+
for comment_data in paginated:
|
|
733
|
+
comment = Comment(
|
|
734
|
+
id=comment_data.get("id"),
|
|
735
|
+
ticket_id=ticket_id,
|
|
736
|
+
author=comment_data.get("author", {}).get("displayName", "Unknown"),
|
|
737
|
+
content=comment_data.get("body", ""),
|
|
738
|
+
created_at=datetime.fromisoformat(
|
|
739
|
+
comment_data.get("created", "").replace("Z", "+00:00")
|
|
740
|
+
) if comment_data.get("created") else None,
|
|
741
|
+
metadata={"jira": comment_data}
|
|
742
|
+
)
|
|
743
|
+
comments.append(comment)
|
|
744
|
+
|
|
745
|
+
return comments
|
|
746
|
+
|
|
747
|
+
async def get_project_info(self, project_key: Optional[str] = None) -> Dict[str, Any]:
|
|
748
|
+
"""Get JIRA project information including workflows and fields."""
|
|
749
|
+
key = project_key or self.project_key
|
|
750
|
+
if not key:
|
|
751
|
+
raise ValueError("Project key is required")
|
|
752
|
+
|
|
753
|
+
project = await self._make_request("GET", f"project/{key}")
|
|
754
|
+
|
|
755
|
+
# Get additional project details
|
|
756
|
+
issue_types = await self._get_issue_types(key)
|
|
757
|
+
priorities = await self._get_priorities()
|
|
758
|
+
custom_fields = await self._get_custom_fields()
|
|
759
|
+
|
|
760
|
+
return {
|
|
761
|
+
"project": project,
|
|
762
|
+
"issue_types": issue_types,
|
|
763
|
+
"priorities": priorities,
|
|
764
|
+
"custom_fields": custom_fields,
|
|
765
|
+
}
|
|
766
|
+
|
|
767
|
+
async def execute_jql(self, jql: str, limit: int = 50) -> List[Union[Epic, Task]]:
|
|
768
|
+
"""Execute a raw JQL query.
|
|
769
|
+
|
|
770
|
+
Args:
|
|
771
|
+
jql: JIRA Query Language string
|
|
772
|
+
limit: Maximum number of results
|
|
773
|
+
|
|
774
|
+
Returns:
|
|
775
|
+
List of matching tickets
|
|
776
|
+
"""
|
|
777
|
+
data = await self._make_request(
|
|
778
|
+
"POST",
|
|
779
|
+
"search",
|
|
780
|
+
data={
|
|
781
|
+
"jql": jql,
|
|
782
|
+
"startAt": 0,
|
|
783
|
+
"maxResults": limit,
|
|
784
|
+
"fields": ["*all"],
|
|
785
|
+
}
|
|
786
|
+
)
|
|
787
|
+
|
|
788
|
+
issues = data.get("issues", [])
|
|
789
|
+
return [self._issue_to_ticket(issue) for issue in issues]
|
|
790
|
+
|
|
791
|
+
async def get_sprints(self, board_id: Optional[int] = None) -> List[Dict[str, Any]]:
|
|
792
|
+
"""Get active sprints for a board (requires JIRA Software).
|
|
793
|
+
|
|
794
|
+
Args:
|
|
795
|
+
board_id: Agile board ID
|
|
796
|
+
|
|
797
|
+
Returns:
|
|
798
|
+
List of sprint information
|
|
799
|
+
"""
|
|
800
|
+
if not board_id:
|
|
801
|
+
# Try to find a board for the project
|
|
802
|
+
boards_data = await self._make_request(
|
|
803
|
+
"GET",
|
|
804
|
+
f"/rest/agile/1.0/board",
|
|
805
|
+
params={"projectKeyOrId": self.project_key}
|
|
806
|
+
)
|
|
807
|
+
boards = boards_data.get("values", [])
|
|
808
|
+
if not boards:
|
|
809
|
+
return []
|
|
810
|
+
board_id = boards[0]["id"]
|
|
811
|
+
|
|
812
|
+
# Get sprints for the board
|
|
813
|
+
sprints_data = await self._make_request(
|
|
814
|
+
"GET",
|
|
815
|
+
f"/rest/agile/1.0/board/{board_id}/sprint",
|
|
816
|
+
params={"state": "active,future"}
|
|
817
|
+
)
|
|
818
|
+
|
|
819
|
+
return sprints_data.get("values", [])
|
|
820
|
+
|
|
821
|
+
async def close(self) -> None:
|
|
822
|
+
"""Close the adapter and cleanup resources."""
|
|
823
|
+
# Clear caches
|
|
824
|
+
self._workflow_cache.clear()
|
|
825
|
+
self._priority_cache.clear()
|
|
826
|
+
self._issue_types_cache.clear()
|
|
827
|
+
self._custom_fields_cache.clear()
|
|
828
|
+
|
|
829
|
+
|
|
830
|
+
# Register the adapter
|
|
831
|
+
AdapterRegistry.register("jira", JiraAdapter)
|