mcp-ticketer 2.0.1__py3-none-any.whl → 2.2.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__version__.py +1 -1
- mcp_ticketer/_version_scm.py +1 -0
- mcp_ticketer/adapters/aitrackdown.py +122 -0
- mcp_ticketer/adapters/asana/adapter.py +121 -0
- mcp_ticketer/adapters/github/__init__.py +26 -0
- mcp_ticketer/adapters/{github.py → github/adapter.py} +1506 -365
- mcp_ticketer/adapters/github/client.py +335 -0
- mcp_ticketer/adapters/github/mappers.py +797 -0
- mcp_ticketer/adapters/github/queries.py +692 -0
- mcp_ticketer/adapters/github/types.py +460 -0
- mcp_ticketer/adapters/jira/__init__.py +35 -0
- mcp_ticketer/adapters/{jira.py → jira/adapter.py} +250 -678
- mcp_ticketer/adapters/jira/client.py +271 -0
- mcp_ticketer/adapters/jira/mappers.py +246 -0
- mcp_ticketer/adapters/jira/queries.py +216 -0
- mcp_ticketer/adapters/jira/types.py +304 -0
- mcp_ticketer/adapters/linear/adapter.py +1000 -92
- mcp_ticketer/adapters/linear/client.py +91 -1
- mcp_ticketer/adapters/linear/mappers.py +107 -0
- mcp_ticketer/adapters/linear/queries.py +112 -2
- mcp_ticketer/adapters/linear/types.py +50 -10
- mcp_ticketer/cli/configure.py +524 -89
- mcp_ticketer/cli/install_mcp_server.py +418 -0
- mcp_ticketer/cli/main.py +10 -0
- mcp_ticketer/cli/mcp_configure.py +177 -49
- mcp_ticketer/cli/platform_installer.py +9 -0
- mcp_ticketer/cli/setup_command.py +157 -1
- mcp_ticketer/cli/ticket_commands.py +443 -81
- mcp_ticketer/cli/utils.py +113 -0
- mcp_ticketer/core/__init__.py +28 -0
- mcp_ticketer/core/adapter.py +367 -1
- mcp_ticketer/core/milestone_manager.py +252 -0
- mcp_ticketer/core/models.py +345 -0
- mcp_ticketer/core/project_utils.py +281 -0
- mcp_ticketer/core/project_validator.py +376 -0
- mcp_ticketer/core/session_state.py +6 -1
- mcp_ticketer/core/state_matcher.py +36 -3
- mcp_ticketer/mcp/server/__main__.py +2 -1
- mcp_ticketer/mcp/server/routing.py +68 -0
- mcp_ticketer/mcp/server/tools/__init__.py +7 -4
- mcp_ticketer/mcp/server/tools/attachment_tools.py +3 -1
- mcp_ticketer/mcp/server/tools/config_tools.py +233 -35
- mcp_ticketer/mcp/server/tools/milestone_tools.py +338 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +30 -1
- mcp_ticketer/mcp/server/tools/ticket_tools.py +37 -1
- mcp_ticketer/queue/queue.py +68 -0
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/METADATA +33 -3
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/RECORD +72 -36
- mcp_ticketer-2.2.13.dist-info/top_level.txt +2 -0
- py_mcp_installer/examples/phase3_demo.py +178 -0
- py_mcp_installer/scripts/manage_version.py +54 -0
- py_mcp_installer/setup.py +6 -0
- py_mcp_installer/src/py_mcp_installer/__init__.py +153 -0
- py_mcp_installer/src/py_mcp_installer/command_builder.py +445 -0
- py_mcp_installer/src/py_mcp_installer/config_manager.py +541 -0
- py_mcp_installer/src/py_mcp_installer/exceptions.py +243 -0
- py_mcp_installer/src/py_mcp_installer/installation_strategy.py +617 -0
- py_mcp_installer/src/py_mcp_installer/installer.py +656 -0
- py_mcp_installer/src/py_mcp_installer/mcp_inspector.py +750 -0
- py_mcp_installer/src/py_mcp_installer/platform_detector.py +451 -0
- py_mcp_installer/src/py_mcp_installer/platforms/__init__.py +26 -0
- py_mcp_installer/src/py_mcp_installer/platforms/claude_code.py +225 -0
- py_mcp_installer/src/py_mcp_installer/platforms/codex.py +181 -0
- py_mcp_installer/src/py_mcp_installer/platforms/cursor.py +191 -0
- py_mcp_installer/src/py_mcp_installer/types.py +222 -0
- py_mcp_installer/src/py_mcp_installer/utils.py +463 -0
- py_mcp_installer/tests/__init__.py +0 -0
- py_mcp_installer/tests/platforms/__init__.py +0 -0
- py_mcp_installer/tests/test_platform_detector.py +17 -0
- mcp_ticketer-2.0.1.dist-info/top_level.txt +0 -1
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/WHEEL +0 -0
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/licenses/LICENSE +0 -0
|
@@ -2,20 +2,17 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
import asyncio
|
|
6
5
|
import builtins
|
|
7
6
|
import logging
|
|
8
|
-
import re
|
|
9
7
|
from datetime import datetime
|
|
10
|
-
from
|
|
8
|
+
from pathlib import Path
|
|
11
9
|
from typing import Any, Union
|
|
12
10
|
|
|
13
|
-
import
|
|
14
|
-
from httpx import AsyncClient, HTTPStatusError, TimeoutException
|
|
11
|
+
from httpx import HTTPStatusError
|
|
15
12
|
|
|
16
|
-
from
|
|
17
|
-
from
|
|
18
|
-
from
|
|
13
|
+
from ...core.adapter import BaseAdapter
|
|
14
|
+
from ...core.env_loader import load_adapter_config, validate_adapter_config
|
|
15
|
+
from ...core.models import (
|
|
19
16
|
Attachment,
|
|
20
17
|
Comment,
|
|
21
18
|
Epic,
|
|
@@ -24,102 +21,31 @@ from ..core.models import (
|
|
|
24
21
|
Task,
|
|
25
22
|
TicketState,
|
|
26
23
|
)
|
|
27
|
-
from
|
|
24
|
+
from ...core.registry import AdapterRegistry
|
|
25
|
+
from .client import JiraClient
|
|
26
|
+
from .mappers import (
|
|
27
|
+
issue_to_ticket,
|
|
28
|
+
map_epic_update_fields,
|
|
29
|
+
map_update_fields,
|
|
30
|
+
ticket_to_issue_fields,
|
|
31
|
+
)
|
|
32
|
+
from .queries import (
|
|
33
|
+
build_epic_list_jql,
|
|
34
|
+
build_list_jql,
|
|
35
|
+
build_project_labels_jql,
|
|
36
|
+
build_search_jql,
|
|
37
|
+
get_labels_search_params,
|
|
38
|
+
get_search_params,
|
|
39
|
+
)
|
|
40
|
+
from .types import (
|
|
41
|
+
extract_text_from_adf,
|
|
42
|
+
get_state_mapping,
|
|
43
|
+
parse_jira_datetime,
|
|
44
|
+
)
|
|
28
45
|
|
|
29
46
|
logger = logging.getLogger(__name__)
|
|
30
47
|
|
|
31
48
|
|
|
32
|
-
def parse_jira_datetime(date_str: str) -> datetime | None:
|
|
33
|
-
"""Parse JIRA datetime strings which can be in various formats.
|
|
34
|
-
|
|
35
|
-
JIRA can return dates in formats like:
|
|
36
|
-
- 2025-10-24T14:12:18.771-0400
|
|
37
|
-
- 2025-10-24T14:12:18.771Z
|
|
38
|
-
- 2025-10-24T14:12:18.771+00:00
|
|
39
|
-
"""
|
|
40
|
-
if not date_str:
|
|
41
|
-
return None
|
|
42
|
-
|
|
43
|
-
try:
|
|
44
|
-
# Handle Z timezone
|
|
45
|
-
if date_str.endswith("Z"):
|
|
46
|
-
return datetime.fromisoformat(date_str.replace("Z", "+00:00"))
|
|
47
|
-
|
|
48
|
-
# Handle timezone formats like -0400, +0500 (need to add colon)
|
|
49
|
-
if re.match(r".*[+-]\d{4}$", date_str):
|
|
50
|
-
# Insert colon in timezone: -0400 -> -04:00
|
|
51
|
-
date_str = re.sub(r"([+-]\d{2})(\d{2})$", r"\1:\2", date_str)
|
|
52
|
-
|
|
53
|
-
return datetime.fromisoformat(date_str)
|
|
54
|
-
|
|
55
|
-
except (ValueError, TypeError) as e:
|
|
56
|
-
logger.warning(f"Failed to parse JIRA datetime '{date_str}': {e}")
|
|
57
|
-
return None
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
def extract_text_from_adf(adf_content: str | dict[str, Any]) -> str:
|
|
61
|
-
"""Extract plain text from Atlassian Document Format (ADF).
|
|
62
|
-
|
|
63
|
-
Args:
|
|
64
|
-
----
|
|
65
|
-
adf_content: Either a string (already plain text) or ADF document dict
|
|
66
|
-
|
|
67
|
-
Returns:
|
|
68
|
-
-------
|
|
69
|
-
Plain text string extracted from the ADF content
|
|
70
|
-
|
|
71
|
-
"""
|
|
72
|
-
if isinstance(adf_content, str):
|
|
73
|
-
return adf_content
|
|
74
|
-
|
|
75
|
-
if not isinstance(adf_content, dict):
|
|
76
|
-
return str(adf_content) if adf_content else ""
|
|
77
|
-
|
|
78
|
-
def extract_text_recursive(node: dict[str, Any]) -> str:
|
|
79
|
-
"""Recursively extract text from ADF nodes."""
|
|
80
|
-
if not isinstance(node, dict):
|
|
81
|
-
return ""
|
|
82
|
-
|
|
83
|
-
# If this is a text node, return its text
|
|
84
|
-
if node.get("type") == "text":
|
|
85
|
-
return node.get("text", "")
|
|
86
|
-
|
|
87
|
-
# If this node has content, process it recursively
|
|
88
|
-
content = node.get("content", [])
|
|
89
|
-
if isinstance(content, list):
|
|
90
|
-
return "".join(extract_text_recursive(child) for child in content)
|
|
91
|
-
|
|
92
|
-
return ""
|
|
93
|
-
|
|
94
|
-
try:
|
|
95
|
-
return extract_text_recursive(adf_content)
|
|
96
|
-
except Exception as e:
|
|
97
|
-
logger.warning(f"Failed to extract text from ADF: {e}")
|
|
98
|
-
return str(adf_content) if adf_content else ""
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
class JiraIssueType(str, Enum):
|
|
102
|
-
"""Common JIRA issue types."""
|
|
103
|
-
|
|
104
|
-
EPIC = "Epic"
|
|
105
|
-
STORY = "Story"
|
|
106
|
-
TASK = "Task"
|
|
107
|
-
BUG = "Bug"
|
|
108
|
-
SUBTASK = "Sub-task"
|
|
109
|
-
IMPROVEMENT = "Improvement"
|
|
110
|
-
NEW_FEATURE = "New Feature"
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
class JiraPriority(str, Enum):
|
|
114
|
-
"""Standard JIRA priority levels."""
|
|
115
|
-
|
|
116
|
-
HIGHEST = "Highest"
|
|
117
|
-
HIGH = "High"
|
|
118
|
-
MEDIUM = "Medium"
|
|
119
|
-
LOW = "Low"
|
|
120
|
-
LOWEST = "Lowest"
|
|
121
|
-
|
|
122
|
-
|
|
123
49
|
class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
124
50
|
"""Adapter for JIRA using REST API v3."""
|
|
125
51
|
|
|
@@ -152,7 +78,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
152
78
|
)
|
|
153
79
|
|
|
154
80
|
# Configuration
|
|
155
|
-
self.server = full_config.get("server", "")
|
|
81
|
+
self.server = full_config.get("server", "").rstrip("/")
|
|
156
82
|
self.email = full_config.get("email", "")
|
|
157
83
|
self.api_token = full_config.get("api_token", "")
|
|
158
84
|
self.project_key = full_config.get("project_key", "")
|
|
@@ -161,23 +87,17 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
161
87
|
self.timeout = full_config.get("timeout", 30)
|
|
162
88
|
self.max_retries = full_config.get("max_retries", 3)
|
|
163
89
|
|
|
164
|
-
#
|
|
165
|
-
self.
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
90
|
+
# Initialize HTTP client
|
|
91
|
+
self.client = JiraClient(
|
|
92
|
+
server=self.server,
|
|
93
|
+
email=self.email,
|
|
94
|
+
api_token=self.api_token,
|
|
95
|
+
is_cloud=self.is_cloud,
|
|
96
|
+
verify_ssl=self.verify_ssl,
|
|
97
|
+
timeout=self.timeout,
|
|
98
|
+
max_retries=self.max_retries,
|
|
172
99
|
)
|
|
173
100
|
|
|
174
|
-
# HTTP client setup
|
|
175
|
-
self.auth = httpx.BasicAuth(self.email, self.api_token)
|
|
176
|
-
self.headers = {
|
|
177
|
-
"Accept": "application/json",
|
|
178
|
-
"Content-Type": "application/json",
|
|
179
|
-
}
|
|
180
|
-
|
|
181
101
|
# Cache for workflow states and transitions
|
|
182
102
|
self._workflow_cache: dict[str, Any] = {}
|
|
183
103
|
self._priority_cache: list[dict[str, Any]] = []
|
|
@@ -211,96 +131,12 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
211
131
|
|
|
212
132
|
def _get_state_mapping(self) -> dict[TicketState, str]:
|
|
213
133
|
"""Map universal states to common JIRA workflow states."""
|
|
214
|
-
return
|
|
215
|
-
TicketState.OPEN: "To Do",
|
|
216
|
-
TicketState.IN_PROGRESS: "In Progress",
|
|
217
|
-
TicketState.READY: "In Review",
|
|
218
|
-
TicketState.TESTED: "Testing",
|
|
219
|
-
TicketState.DONE: "Done",
|
|
220
|
-
TicketState.WAITING: "Waiting",
|
|
221
|
-
TicketState.BLOCKED: "Blocked",
|
|
222
|
-
TicketState.CLOSED: "Closed",
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
async def _get_client(self) -> AsyncClient:
|
|
226
|
-
"""Get configured async HTTP client."""
|
|
227
|
-
return AsyncClient(
|
|
228
|
-
auth=self.auth,
|
|
229
|
-
headers=self.headers,
|
|
230
|
-
timeout=self.timeout,
|
|
231
|
-
verify=self.verify_ssl,
|
|
232
|
-
)
|
|
233
|
-
|
|
234
|
-
async def _make_request(
|
|
235
|
-
self,
|
|
236
|
-
method: str,
|
|
237
|
-
endpoint: str,
|
|
238
|
-
data: dict[str, Any] | None = None,
|
|
239
|
-
params: dict[str, Any] | None = None,
|
|
240
|
-
retry_count: int = 0,
|
|
241
|
-
) -> dict[str, Any]:
|
|
242
|
-
"""Make HTTP request to JIRA API with retry logic.
|
|
243
|
-
|
|
244
|
-
Args:
|
|
245
|
-
----
|
|
246
|
-
method: HTTP method
|
|
247
|
-
endpoint: API endpoint
|
|
248
|
-
data: Request body data
|
|
249
|
-
params: Query parameters
|
|
250
|
-
retry_count: Current retry attempt
|
|
251
|
-
|
|
252
|
-
Returns:
|
|
253
|
-
-------
|
|
254
|
-
Response data
|
|
255
|
-
|
|
256
|
-
Raises:
|
|
257
|
-
------
|
|
258
|
-
HTTPStatusError: On API errors
|
|
259
|
-
TimeoutException: On timeout
|
|
260
|
-
|
|
261
|
-
"""
|
|
262
|
-
url = f"{self.api_base}/{endpoint.lstrip('/')}"
|
|
263
|
-
|
|
264
|
-
async with await self._get_client() as client:
|
|
265
|
-
try:
|
|
266
|
-
response = await client.request(
|
|
267
|
-
method=method, url=url, json=data, params=params
|
|
268
|
-
)
|
|
269
|
-
response.raise_for_status()
|
|
270
|
-
|
|
271
|
-
# Handle empty responses
|
|
272
|
-
if response.status_code == 204:
|
|
273
|
-
return {}
|
|
274
|
-
|
|
275
|
-
return response.json()
|
|
276
|
-
|
|
277
|
-
except TimeoutException as e:
|
|
278
|
-
if retry_count < self.max_retries:
|
|
279
|
-
await asyncio.sleep(2**retry_count) # Exponential backoff
|
|
280
|
-
return await self._make_request(
|
|
281
|
-
method, endpoint, data, params, retry_count + 1
|
|
282
|
-
)
|
|
283
|
-
raise e
|
|
284
|
-
|
|
285
|
-
except HTTPStatusError as e:
|
|
286
|
-
# Handle rate limiting
|
|
287
|
-
if e.response.status_code == 429 and retry_count < self.max_retries:
|
|
288
|
-
retry_after = int(e.response.headers.get("Retry-After", 5))
|
|
289
|
-
await asyncio.sleep(retry_after)
|
|
290
|
-
return await self._make_request(
|
|
291
|
-
method, endpoint, data, params, retry_count + 1
|
|
292
|
-
)
|
|
293
|
-
|
|
294
|
-
# Log error details
|
|
295
|
-
logger.error(
|
|
296
|
-
f"JIRA API error: {e.response.status_code} - {e.response.text}"
|
|
297
|
-
)
|
|
298
|
-
raise e
|
|
134
|
+
return get_state_mapping()
|
|
299
135
|
|
|
300
136
|
async def _get_priorities(self) -> list[dict[str, Any]]:
|
|
301
137
|
"""Get available priorities from JIRA."""
|
|
302
138
|
if not self._priority_cache:
|
|
303
|
-
self._priority_cache = await self.
|
|
139
|
+
self._priority_cache = await self.client.get("priority")
|
|
304
140
|
return self._priority_cache
|
|
305
141
|
|
|
306
142
|
async def _get_issue_types(
|
|
@@ -309,19 +145,19 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
309
145
|
"""Get available issue types for a project."""
|
|
310
146
|
key = project_key or self.project_key
|
|
311
147
|
if key not in self._issue_types_cache:
|
|
312
|
-
data = await self.
|
|
148
|
+
data = await self.client.get(f"project/{key}")
|
|
313
149
|
self._issue_types_cache[key] = data.get("issueTypes", [])
|
|
314
150
|
return self._issue_types_cache[key]
|
|
315
151
|
|
|
316
152
|
async def _get_transitions(self, issue_key: str) -> list[dict[str, Any]]:
|
|
317
153
|
"""Get available transitions for an issue."""
|
|
318
|
-
data = await self.
|
|
154
|
+
data = await self.client.get(f"issue/{issue_key}/transitions")
|
|
319
155
|
return data.get("transitions", [])
|
|
320
156
|
|
|
321
157
|
async def _get_custom_fields(self) -> dict[str, str]:
|
|
322
158
|
"""Get custom field definitions."""
|
|
323
159
|
if not self._custom_fields_cache:
|
|
324
|
-
fields = await self.
|
|
160
|
+
fields = await self.client.get("field")
|
|
325
161
|
self._custom_fields_cache = {
|
|
326
162
|
field["name"]: field["id"]
|
|
327
163
|
for field in fields
|
|
@@ -329,248 +165,6 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
329
165
|
}
|
|
330
166
|
return self._custom_fields_cache
|
|
331
167
|
|
|
332
|
-
def _convert_from_adf(self, adf_content: Any) -> str:
|
|
333
|
-
"""Convert Atlassian Document Format (ADF) to plain text.
|
|
334
|
-
|
|
335
|
-
This extracts text content from ADF structure for display.
|
|
336
|
-
"""
|
|
337
|
-
if not adf_content:
|
|
338
|
-
return ""
|
|
339
|
-
|
|
340
|
-
# If it's already a string, return it (JIRA Server)
|
|
341
|
-
if isinstance(adf_content, str):
|
|
342
|
-
return adf_content
|
|
343
|
-
|
|
344
|
-
# Handle ADF structure
|
|
345
|
-
if not isinstance(adf_content, dict):
|
|
346
|
-
return str(adf_content)
|
|
347
|
-
|
|
348
|
-
content_nodes = adf_content.get("content", [])
|
|
349
|
-
lines = []
|
|
350
|
-
|
|
351
|
-
for node in content_nodes:
|
|
352
|
-
if node.get("type") == "paragraph":
|
|
353
|
-
paragraph_text = ""
|
|
354
|
-
for content_item in node.get("content", []):
|
|
355
|
-
if content_item.get("type") == "text":
|
|
356
|
-
paragraph_text += content_item.get("text", "")
|
|
357
|
-
lines.append(paragraph_text)
|
|
358
|
-
elif node.get("type") == "heading":
|
|
359
|
-
heading_text = ""
|
|
360
|
-
for content_item in node.get("content", []):
|
|
361
|
-
if content_item.get("type") == "text":
|
|
362
|
-
heading_text += content_item.get("text", "")
|
|
363
|
-
lines.append(heading_text)
|
|
364
|
-
|
|
365
|
-
return "\n".join(lines)
|
|
366
|
-
|
|
367
|
-
def _convert_to_adf(self, text: str) -> dict[str, Any]:
|
|
368
|
-
"""Convert plain text to Atlassian Document Format (ADF).
|
|
369
|
-
|
|
370
|
-
ADF is required for JIRA Cloud description fields.
|
|
371
|
-
This creates a simple document with paragraphs for each line.
|
|
372
|
-
"""
|
|
373
|
-
if not text:
|
|
374
|
-
return {"type": "doc", "version": 1, "content": []}
|
|
375
|
-
|
|
376
|
-
# Split text into lines and create paragraphs
|
|
377
|
-
lines = text.split("\n")
|
|
378
|
-
content = []
|
|
379
|
-
|
|
380
|
-
for line in lines:
|
|
381
|
-
if line.strip(): # Non-empty line
|
|
382
|
-
content.append(
|
|
383
|
-
{"type": "paragraph", "content": [{"type": "text", "text": line}]}
|
|
384
|
-
)
|
|
385
|
-
else: # Empty line becomes empty paragraph
|
|
386
|
-
content.append({"type": "paragraph", "content": []})
|
|
387
|
-
|
|
388
|
-
return {"type": "doc", "version": 1, "content": content}
|
|
389
|
-
|
|
390
|
-
def _map_priority_to_jira(self, priority: Priority) -> str:
|
|
391
|
-
"""Map universal priority to JIRA priority."""
|
|
392
|
-
mapping = {
|
|
393
|
-
Priority.CRITICAL: JiraPriority.HIGHEST,
|
|
394
|
-
Priority.HIGH: JiraPriority.HIGH,
|
|
395
|
-
Priority.MEDIUM: JiraPriority.MEDIUM,
|
|
396
|
-
Priority.LOW: JiraPriority.LOW,
|
|
397
|
-
}
|
|
398
|
-
return mapping.get(priority, JiraPriority.MEDIUM)
|
|
399
|
-
|
|
400
|
-
def _map_priority_from_jira(self, jira_priority: dict[str, Any] | None) -> Priority:
|
|
401
|
-
"""Map JIRA priority to universal priority."""
|
|
402
|
-
if not jira_priority:
|
|
403
|
-
return Priority.MEDIUM
|
|
404
|
-
|
|
405
|
-
name = jira_priority.get("name", "").lower()
|
|
406
|
-
|
|
407
|
-
if "highest" in name or "urgent" in name or "critical" in name:
|
|
408
|
-
return Priority.CRITICAL
|
|
409
|
-
elif "high" in name:
|
|
410
|
-
return Priority.HIGH
|
|
411
|
-
elif "low" in name:
|
|
412
|
-
return Priority.LOW
|
|
413
|
-
else:
|
|
414
|
-
return Priority.MEDIUM
|
|
415
|
-
|
|
416
|
-
def _map_state_from_jira(self, status: dict[str, Any]) -> TicketState:
|
|
417
|
-
"""Map JIRA status to universal state."""
|
|
418
|
-
if not status:
|
|
419
|
-
return TicketState.OPEN
|
|
420
|
-
|
|
421
|
-
name = status.get("name", "").lower()
|
|
422
|
-
category = status.get("statusCategory", {}).get("key", "").lower()
|
|
423
|
-
|
|
424
|
-
# Try to match by category first (more reliable)
|
|
425
|
-
if category == "new":
|
|
426
|
-
return TicketState.OPEN
|
|
427
|
-
elif category == "indeterminate":
|
|
428
|
-
return TicketState.IN_PROGRESS
|
|
429
|
-
elif category == "done":
|
|
430
|
-
return TicketState.DONE
|
|
431
|
-
|
|
432
|
-
# Fall back to name matching
|
|
433
|
-
if "block" in name:
|
|
434
|
-
return TicketState.BLOCKED
|
|
435
|
-
elif "wait" in name:
|
|
436
|
-
return TicketState.WAITING
|
|
437
|
-
elif "progress" in name or "doing" in name:
|
|
438
|
-
return TicketState.IN_PROGRESS
|
|
439
|
-
elif "review" in name:
|
|
440
|
-
return TicketState.READY
|
|
441
|
-
elif "test" in name:
|
|
442
|
-
return TicketState.TESTED
|
|
443
|
-
elif "done" in name or "resolved" in name:
|
|
444
|
-
return TicketState.DONE
|
|
445
|
-
elif "closed" in name:
|
|
446
|
-
return TicketState.CLOSED
|
|
447
|
-
else:
|
|
448
|
-
return TicketState.OPEN
|
|
449
|
-
|
|
450
|
-
def _issue_to_ticket(self, issue: dict[str, Any]) -> Epic | Task:
|
|
451
|
-
"""Convert JIRA issue to universal ticket model."""
|
|
452
|
-
fields = issue.get("fields", {})
|
|
453
|
-
|
|
454
|
-
# Determine ticket type
|
|
455
|
-
issue_type = fields.get("issuetype", {}).get("name", "").lower()
|
|
456
|
-
is_epic = "epic" in issue_type
|
|
457
|
-
|
|
458
|
-
# Extract common fields
|
|
459
|
-
# Convert ADF description back to plain text if needed
|
|
460
|
-
description = self._convert_from_adf(fields.get("description", ""))
|
|
461
|
-
|
|
462
|
-
base_data = {
|
|
463
|
-
"id": issue.get("key"),
|
|
464
|
-
"title": fields.get("summary", ""),
|
|
465
|
-
"description": description,
|
|
466
|
-
"state": self._map_state_from_jira(fields.get("status", {})),
|
|
467
|
-
"priority": self._map_priority_from_jira(fields.get("priority")),
|
|
468
|
-
"tags": [
|
|
469
|
-
label.get("name", "") if isinstance(label, dict) else str(label)
|
|
470
|
-
for label in fields.get("labels", [])
|
|
471
|
-
],
|
|
472
|
-
"created_at": parse_jira_datetime(fields.get("created")),
|
|
473
|
-
"updated_at": parse_jira_datetime(fields.get("updated")),
|
|
474
|
-
"metadata": {
|
|
475
|
-
"jira": {
|
|
476
|
-
"id": issue.get("id"),
|
|
477
|
-
"key": issue.get("key"),
|
|
478
|
-
"self": issue.get("self"),
|
|
479
|
-
"url": f"{self.server}/browse/{issue.get('key')}",
|
|
480
|
-
"issue_type": fields.get("issuetype", {}),
|
|
481
|
-
"project": fields.get("project", {}),
|
|
482
|
-
"components": fields.get("components", []),
|
|
483
|
-
"fix_versions": fields.get("fixVersions", []),
|
|
484
|
-
"resolution": fields.get("resolution"),
|
|
485
|
-
}
|
|
486
|
-
},
|
|
487
|
-
}
|
|
488
|
-
|
|
489
|
-
if is_epic:
|
|
490
|
-
# Create Epic
|
|
491
|
-
return Epic(
|
|
492
|
-
**base_data,
|
|
493
|
-
child_issues=[
|
|
494
|
-
subtask.get("key") for subtask in fields.get("subtasks", [])
|
|
495
|
-
],
|
|
496
|
-
)
|
|
497
|
-
else:
|
|
498
|
-
# Create Task
|
|
499
|
-
parent = fields.get("parent", {})
|
|
500
|
-
epic_link = fields.get("customfield_10014") # Common epic link field
|
|
501
|
-
|
|
502
|
-
return Task(
|
|
503
|
-
**base_data,
|
|
504
|
-
parent_issue=parent.get("key") if parent else None,
|
|
505
|
-
parent_epic=epic_link if epic_link else None,
|
|
506
|
-
assignee=(
|
|
507
|
-
fields.get("assignee", {}).get("displayName")
|
|
508
|
-
if fields.get("assignee")
|
|
509
|
-
else None
|
|
510
|
-
),
|
|
511
|
-
estimated_hours=(
|
|
512
|
-
fields.get("timetracking", {}).get("originalEstimateSeconds", 0)
|
|
513
|
-
/ 3600
|
|
514
|
-
if fields.get("timetracking")
|
|
515
|
-
else None
|
|
516
|
-
),
|
|
517
|
-
actual_hours=(
|
|
518
|
-
fields.get("timetracking", {}).get("timeSpentSeconds", 0) / 3600
|
|
519
|
-
if fields.get("timetracking")
|
|
520
|
-
else None
|
|
521
|
-
),
|
|
522
|
-
)
|
|
523
|
-
|
|
524
|
-
def _ticket_to_issue_fields(
|
|
525
|
-
self, ticket: Epic | Task, issue_type: str | None = None
|
|
526
|
-
) -> dict[str, Any]:
|
|
527
|
-
"""Convert universal ticket to JIRA issue fields."""
|
|
528
|
-
# Convert description to ADF format for JIRA Cloud
|
|
529
|
-
description = (
|
|
530
|
-
self._convert_to_adf(ticket.description or "")
|
|
531
|
-
if self.is_cloud
|
|
532
|
-
else (ticket.description or "")
|
|
533
|
-
)
|
|
534
|
-
|
|
535
|
-
fields = {
|
|
536
|
-
"summary": ticket.title,
|
|
537
|
-
"description": description,
|
|
538
|
-
"labels": ticket.tags,
|
|
539
|
-
}
|
|
540
|
-
|
|
541
|
-
# Only add priority for Tasks, not Epics (some JIRA configurations don't allow priority on Epics)
|
|
542
|
-
if isinstance(ticket, Task):
|
|
543
|
-
fields["priority"] = {"name": self._map_priority_to_jira(ticket.priority)}
|
|
544
|
-
|
|
545
|
-
# Add project if creating new issue
|
|
546
|
-
if not ticket.id and self.project_key:
|
|
547
|
-
fields["project"] = {"key": self.project_key}
|
|
548
|
-
|
|
549
|
-
# Set issue type
|
|
550
|
-
if issue_type:
|
|
551
|
-
fields["issuetype"] = {"name": issue_type}
|
|
552
|
-
elif isinstance(ticket, Epic):
|
|
553
|
-
fields["issuetype"] = {"name": JiraIssueType.EPIC}
|
|
554
|
-
else:
|
|
555
|
-
fields["issuetype"] = {"name": JiraIssueType.TASK}
|
|
556
|
-
|
|
557
|
-
# Add task-specific fields
|
|
558
|
-
if isinstance(ticket, Task):
|
|
559
|
-
if ticket.assignee:
|
|
560
|
-
# Note: Need to resolve user account ID
|
|
561
|
-
fields["assignee"] = {"accountId": ticket.assignee}
|
|
562
|
-
|
|
563
|
-
if ticket.parent_issue:
|
|
564
|
-
fields["parent"] = {"key": ticket.parent_issue}
|
|
565
|
-
|
|
566
|
-
# Time tracking
|
|
567
|
-
if ticket.estimated_hours:
|
|
568
|
-
fields["timetracking"] = {
|
|
569
|
-
"originalEstimate": f"{int(ticket.estimated_hours)}h"
|
|
570
|
-
}
|
|
571
|
-
|
|
572
|
-
return fields
|
|
573
|
-
|
|
574
168
|
async def create(self, ticket: Epic | Task) -> Epic | Task:
|
|
575
169
|
"""Create a new JIRA issue."""
|
|
576
170
|
# Validate credentials before attempting operation
|
|
@@ -579,17 +173,21 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
579
173
|
raise ValueError(error_message)
|
|
580
174
|
|
|
581
175
|
# Prepare issue fields
|
|
582
|
-
fields =
|
|
176
|
+
fields = ticket_to_issue_fields(
|
|
177
|
+
ticket,
|
|
178
|
+
is_cloud=self.is_cloud,
|
|
179
|
+
project_key=self.project_key,
|
|
180
|
+
)
|
|
583
181
|
|
|
584
182
|
# Create issue
|
|
585
|
-
data = await self.
|
|
183
|
+
data = await self.client.post("issue", data={"fields": fields})
|
|
586
184
|
|
|
587
185
|
# Set the ID and fetch full issue data
|
|
588
186
|
ticket.id = data.get("key")
|
|
589
187
|
|
|
590
188
|
# Fetch complete issue data
|
|
591
|
-
created_issue = await self.
|
|
592
|
-
return self.
|
|
189
|
+
created_issue = await self.client.get(f"issue/{ticket.id}")
|
|
190
|
+
return issue_to_ticket(created_issue, self.server)
|
|
593
191
|
|
|
594
192
|
async def read(self, ticket_id: str) -> Epic | Task | None:
|
|
595
193
|
"""Read a JIRA issue by key."""
|
|
@@ -599,10 +197,10 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
599
197
|
raise ValueError(error_message)
|
|
600
198
|
|
|
601
199
|
try:
|
|
602
|
-
issue = await self.
|
|
603
|
-
|
|
200
|
+
issue = await self.client.get(
|
|
201
|
+
f"issue/{ticket_id}", params={"expand": "renderedFields"}
|
|
604
202
|
)
|
|
605
|
-
return self.
|
|
203
|
+
return issue_to_ticket(issue, self.server)
|
|
606
204
|
except HTTPStatusError as e:
|
|
607
205
|
if e.response.status_code == 404:
|
|
608
206
|
return None
|
|
@@ -623,26 +221,11 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
623
221
|
return None
|
|
624
222
|
|
|
625
223
|
# Prepare update fields
|
|
626
|
-
fields =
|
|
627
|
-
|
|
628
|
-
if "title" in updates:
|
|
629
|
-
fields["summary"] = updates["title"]
|
|
630
|
-
if "description" in updates:
|
|
631
|
-
fields["description"] = updates["description"]
|
|
632
|
-
if "priority" in updates:
|
|
633
|
-
fields["priority"] = {
|
|
634
|
-
"name": self._map_priority_to_jira(updates["priority"])
|
|
635
|
-
}
|
|
636
|
-
if "tags" in updates:
|
|
637
|
-
fields["labels"] = updates["tags"]
|
|
638
|
-
if "assignee" in updates:
|
|
639
|
-
fields["assignee"] = {"accountId": updates["assignee"]}
|
|
224
|
+
fields = map_update_fields(updates, is_cloud=self.is_cloud)
|
|
640
225
|
|
|
641
226
|
# Apply update
|
|
642
227
|
if fields:
|
|
643
|
-
await self.
|
|
644
|
-
"PUT", f"issue/{ticket_id}", data={"fields": fields}
|
|
645
|
-
)
|
|
228
|
+
await self.client.put(f"issue/{ticket_id}", data={"fields": fields})
|
|
646
229
|
|
|
647
230
|
# Handle state transitions separately
|
|
648
231
|
if "state" in updates:
|
|
@@ -659,7 +242,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
659
242
|
raise ValueError(error_message)
|
|
660
243
|
|
|
661
244
|
try:
|
|
662
|
-
await self.
|
|
245
|
+
await self.client.delete(f"issue/{ticket_id}")
|
|
663
246
|
return True
|
|
664
247
|
except HTTPStatusError as e:
|
|
665
248
|
if e.response.status_code == 404:
|
|
@@ -671,91 +254,40 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
671
254
|
) -> list[Epic | Task]:
|
|
672
255
|
"""List JIRA issues with pagination."""
|
|
673
256
|
# Build JQL query
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
if filters:
|
|
680
|
-
if "state" in filters:
|
|
681
|
-
status = self.map_state_to_system(filters["state"])
|
|
682
|
-
jql_parts.append(f'status = "{status}"')
|
|
683
|
-
if "priority" in filters:
|
|
684
|
-
priority = self._map_priority_to_jira(filters["priority"])
|
|
685
|
-
jql_parts.append(f'priority = "{priority}"')
|
|
686
|
-
if "assignee" in filters:
|
|
687
|
-
jql_parts.append(f'assignee = "{filters["assignee"]}"')
|
|
688
|
-
if "ticket_type" in filters:
|
|
689
|
-
jql_parts.append(f'issuetype = "{filters["ticket_type"]}"')
|
|
690
|
-
|
|
691
|
-
jql = " AND ".join(jql_parts) if jql_parts else "ORDER BY created DESC"
|
|
257
|
+
jql = build_list_jql(
|
|
258
|
+
self.project_key,
|
|
259
|
+
filters=filters,
|
|
260
|
+
state_mapper=self.map_state_to_system,
|
|
261
|
+
)
|
|
692
262
|
|
|
693
263
|
# Search issues using the JIRA API endpoint
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
"search/jql", # JIRA search endpoint (new API v3)
|
|
697
|
-
params={
|
|
698
|
-
"jql": jql,
|
|
699
|
-
"startAt": offset,
|
|
700
|
-
"maxResults": limit,
|
|
701
|
-
"fields": "*all",
|
|
702
|
-
"expand": "renderedFields",
|
|
703
|
-
},
|
|
704
|
-
)
|
|
264
|
+
params = get_search_params(jql, start_at=offset, max_results=limit)
|
|
265
|
+
data = await self.client.get("search/jql", params=params)
|
|
705
266
|
|
|
706
267
|
# Convert issues
|
|
707
268
|
issues = data.get("issues", [])
|
|
708
|
-
return [self.
|
|
269
|
+
return [issue_to_ticket(issue, self.server) for issue in issues]
|
|
709
270
|
|
|
710
271
|
async def search(self, query: SearchQuery) -> builtins.list[Epic | Task]:
|
|
711
272
|
"""Search JIRA issues using JQL."""
|
|
712
273
|
# Build JQL query
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
# Text search
|
|
719
|
-
if query.query:
|
|
720
|
-
jql_parts.append(f'text ~ "{query.query}"')
|
|
721
|
-
|
|
722
|
-
# State filter
|
|
723
|
-
if query.state:
|
|
724
|
-
status = self.map_state_to_system(query.state)
|
|
725
|
-
jql_parts.append(f'status = "{status}"')
|
|
726
|
-
|
|
727
|
-
# Priority filter
|
|
728
|
-
if query.priority:
|
|
729
|
-
priority = self._map_priority_to_jira(query.priority)
|
|
730
|
-
jql_parts.append(f'priority = "{priority}"')
|
|
731
|
-
|
|
732
|
-
# Assignee filter
|
|
733
|
-
if query.assignee:
|
|
734
|
-
jql_parts.append(f'assignee = "{query.assignee}"')
|
|
735
|
-
|
|
736
|
-
# Tags/labels filter
|
|
737
|
-
if query.tags:
|
|
738
|
-
label_conditions = [f'labels = "{tag}"' for tag in query.tags]
|
|
739
|
-
jql_parts.append(f"({' OR '.join(label_conditions)})")
|
|
740
|
-
|
|
741
|
-
jql = " AND ".join(jql_parts) if jql_parts else "ORDER BY created DESC"
|
|
274
|
+
jql = build_search_jql(
|
|
275
|
+
self.project_key,
|
|
276
|
+
query,
|
|
277
|
+
state_mapper=self.map_state_to_system,
|
|
278
|
+
)
|
|
742
279
|
|
|
743
280
|
# Execute search using the JIRA API endpoint
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
"jql": jql,
|
|
749
|
-
"startAt": query.offset,
|
|
750
|
-
"maxResults": query.limit,
|
|
751
|
-
"fields": "*all",
|
|
752
|
-
"expand": "renderedFields",
|
|
753
|
-
},
|
|
281
|
+
params = get_search_params(
|
|
282
|
+
jql,
|
|
283
|
+
start_at=query.offset,
|
|
284
|
+
max_results=query.limit,
|
|
754
285
|
)
|
|
286
|
+
data = await self.client.get("search/jql", params=params)
|
|
755
287
|
|
|
756
288
|
# Convert and return results
|
|
757
289
|
issues = data.get("issues", [])
|
|
758
|
-
return [self.
|
|
290
|
+
return [issue_to_ticket(issue, self.server) for issue in issues]
|
|
759
291
|
|
|
760
292
|
async def transition_state(
|
|
761
293
|
self, ticket_id: str, target_state: TicketState
|
|
@@ -799,8 +331,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
799
331
|
return None
|
|
800
332
|
|
|
801
333
|
# Execute transition
|
|
802
|
-
await self.
|
|
803
|
-
"POST",
|
|
334
|
+
await self.client.post(
|
|
804
335
|
f"issue/{ticket_id}/transitions",
|
|
805
336
|
data={"transition": {"id": transition["id"]}},
|
|
806
337
|
)
|
|
@@ -825,9 +356,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
825
356
|
}
|
|
826
357
|
|
|
827
358
|
# Add comment
|
|
828
|
-
result = await self.
|
|
829
|
-
"POST", f"issue/{comment.ticket_id}/comment", data=data
|
|
830
|
-
)
|
|
359
|
+
result = await self.client.post(f"issue/{comment.ticket_id}/comment", data=data)
|
|
831
360
|
|
|
832
361
|
# Update comment with JIRA data
|
|
833
362
|
comment.id = result.get("id")
|
|
@@ -846,7 +375,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
846
375
|
# Fetch issue with comments
|
|
847
376
|
params = {"expand": "comments", "fields": "comment"}
|
|
848
377
|
|
|
849
|
-
issue = await self.
|
|
378
|
+
issue = await self.client.get(f"issue/{ticket_id}", params=params)
|
|
850
379
|
|
|
851
380
|
# Extract comments
|
|
852
381
|
comments_data = issue.get("fields", {}).get("comment", {}).get("comments", [])
|
|
@@ -879,7 +408,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
879
408
|
if not key:
|
|
880
409
|
raise ValueError("Project key is required")
|
|
881
410
|
|
|
882
|
-
project = await self.
|
|
411
|
+
project = await self.client.get(f"project/{key}")
|
|
883
412
|
|
|
884
413
|
# Get additional project details
|
|
885
414
|
issue_types = await self._get_issue_types(key)
|
|
@@ -908,8 +437,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
908
437
|
List of matching tickets
|
|
909
438
|
|
|
910
439
|
"""
|
|
911
|
-
data = await self.
|
|
912
|
-
"POST",
|
|
440
|
+
data = await self.client.post(
|
|
913
441
|
"search",
|
|
914
442
|
data={
|
|
915
443
|
"jql": jql,
|
|
@@ -920,7 +448,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
920
448
|
)
|
|
921
449
|
|
|
922
450
|
issues = data.get("issues", [])
|
|
923
|
-
return [self.
|
|
451
|
+
return [issue_to_ticket(issue, self.server) for issue in issues]
|
|
924
452
|
|
|
925
453
|
async def get_sprints(
|
|
926
454
|
self, board_id: int | None = None
|
|
@@ -938,8 +466,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
938
466
|
"""
|
|
939
467
|
if not board_id:
|
|
940
468
|
# Try to find a board for the project
|
|
941
|
-
boards_data = await self.
|
|
942
|
-
"GET",
|
|
469
|
+
boards_data = await self.client.get(
|
|
943
470
|
"/rest/agile/1.0/board",
|
|
944
471
|
params={"projectKeyOrId": self.project_key},
|
|
945
472
|
)
|
|
@@ -949,8 +476,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
949
476
|
board_id = boards[0]["id"]
|
|
950
477
|
|
|
951
478
|
# Get sprints for the board
|
|
952
|
-
sprints_data = await self.
|
|
953
|
-
"GET",
|
|
479
|
+
sprints_data = await self.client.get(
|
|
954
480
|
f"/rest/agile/1.0/board/{board_id}/sprint",
|
|
955
481
|
params={"state": "active,future"},
|
|
956
482
|
)
|
|
@@ -964,9 +490,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
964
490
|
|
|
965
491
|
try:
|
|
966
492
|
# Get project role users
|
|
967
|
-
project_data = await self.
|
|
968
|
-
"GET", f"project/{self.project_key}"
|
|
969
|
-
)
|
|
493
|
+
project_data = await self.client.get(f"project/{self.project_key}")
|
|
970
494
|
|
|
971
495
|
# Get users from project roles
|
|
972
496
|
users = []
|
|
@@ -975,8 +499,8 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
975
499
|
# Extract role ID from URL
|
|
976
500
|
role_id = role_url.split("/")[-1]
|
|
977
501
|
try:
|
|
978
|
-
role_data = await self.
|
|
979
|
-
|
|
502
|
+
role_data = await self.client.get(
|
|
503
|
+
f"project/{self.project_key}/role/{role_id}"
|
|
980
504
|
)
|
|
981
505
|
if "actors" in role_data:
|
|
982
506
|
for actor in role_data["actors"]:
|
|
@@ -1000,8 +524,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1000
524
|
except Exception:
|
|
1001
525
|
# Fallback: try to get assignable users for the project
|
|
1002
526
|
try:
|
|
1003
|
-
users_data = await self.
|
|
1004
|
-
"GET",
|
|
527
|
+
users_data = await self.client.get(
|
|
1005
528
|
"user/assignable/search",
|
|
1006
529
|
params={"project": self.project_key, "maxResults": 50},
|
|
1007
530
|
)
|
|
@@ -1012,7 +535,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1012
535
|
async def get_current_user(self) -> dict[str, Any] | None:
|
|
1013
536
|
"""Get current authenticated user information."""
|
|
1014
537
|
try:
|
|
1015
|
-
return await self.
|
|
538
|
+
return await self.client.get("myself")
|
|
1016
539
|
except Exception:
|
|
1017
540
|
return None
|
|
1018
541
|
|
|
@@ -1030,15 +553,8 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1030
553
|
try:
|
|
1031
554
|
# Query recent issues to get labels in use
|
|
1032
555
|
jql = f"project = {self.project_key} ORDER BY updated DESC"
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
"search/jql",
|
|
1036
|
-
params={
|
|
1037
|
-
"jql": jql,
|
|
1038
|
-
"maxResults": 100, # Sample from recent 100 issues
|
|
1039
|
-
"fields": "labels",
|
|
1040
|
-
},
|
|
1041
|
-
)
|
|
556
|
+
params = get_labels_search_params(jql, max_results=100)
|
|
557
|
+
data = await self.client.get("search/jql", params=params)
|
|
1042
558
|
|
|
1043
559
|
# Collect unique labels
|
|
1044
560
|
unique_labels = set()
|
|
@@ -1138,16 +654,9 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1138
654
|
|
|
1139
655
|
try:
|
|
1140
656
|
# Query recent issues to get labels in use
|
|
1141
|
-
jql =
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
"search/jql",
|
|
1145
|
-
params={
|
|
1146
|
-
"jql": jql,
|
|
1147
|
-
"maxResults": 500, # Sample from more issues for better coverage
|
|
1148
|
-
"fields": "labels",
|
|
1149
|
-
},
|
|
1150
|
-
)
|
|
657
|
+
jql = build_project_labels_jql(key, max_results=500)
|
|
658
|
+
params = get_labels_search_params(jql, max_results=500)
|
|
659
|
+
data = await self.client.get("search/jql", params=params)
|
|
1151
660
|
|
|
1152
661
|
# Collect labels with usage count
|
|
1153
662
|
label_counts: dict[str, int] = {}
|
|
@@ -1211,8 +720,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1211
720
|
try:
|
|
1212
721
|
# If no board_id provided, try to find a board for the project
|
|
1213
722
|
if not board_id:
|
|
1214
|
-
boards_data = await self.
|
|
1215
|
-
"GET",
|
|
723
|
+
boards_data = await self.client.get(
|
|
1216
724
|
"/rest/agile/1.0/board",
|
|
1217
725
|
params={"projectKeyOrId": self.project_key, "maxResults": 1},
|
|
1218
726
|
)
|
|
@@ -1229,8 +737,8 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1229
737
|
if state:
|
|
1230
738
|
params["state"] = state
|
|
1231
739
|
|
|
1232
|
-
sprints_data = await self.
|
|
1233
|
-
|
|
740
|
+
sprints_data = await self.client.get(
|
|
741
|
+
f"/rest/agile/1.0/board/{board_id}/sprint", params=params
|
|
1234
742
|
)
|
|
1235
743
|
|
|
1236
744
|
sprints = sprints_data.get("values", [])
|
|
@@ -1292,9 +800,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1292
800
|
# Use project-specific statuses if project key provided
|
|
1293
801
|
if project_key:
|
|
1294
802
|
# Get statuses for the project
|
|
1295
|
-
data = await self.
|
|
1296
|
-
"GET", f"project/{project_key}/statuses"
|
|
1297
|
-
)
|
|
803
|
+
data = await self.client.get(f"project/{project_key}/statuses")
|
|
1298
804
|
|
|
1299
805
|
# Extract unique statuses from all issue types
|
|
1300
806
|
status_map: dict[str, dict[str, Any]] = {}
|
|
@@ -1307,7 +813,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1307
813
|
statuses = list(status_map.values())
|
|
1308
814
|
else:
|
|
1309
815
|
# Get all statuses
|
|
1310
|
-
statuses = await self.
|
|
816
|
+
statuses = await self.client.get("status")
|
|
1311
817
|
|
|
1312
818
|
# Transform to standardized format
|
|
1313
819
|
return [
|
|
@@ -1358,8 +864,8 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1358
864
|
|
|
1359
865
|
try:
|
|
1360
866
|
# Get issue with status field
|
|
1361
|
-
issue = await self.
|
|
1362
|
-
|
|
867
|
+
issue = await self.client.get(
|
|
868
|
+
f"issue/{issue_key}", params={"fields": "status"}
|
|
1363
869
|
)
|
|
1364
870
|
|
|
1365
871
|
if not issue:
|
|
@@ -1368,9 +874,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1368
874
|
status = issue.get("fields", {}).get("status", {})
|
|
1369
875
|
|
|
1370
876
|
# Get available transitions
|
|
1371
|
-
transitions_data = await self.
|
|
1372
|
-
"GET", f"issue/{issue_key}/transitions"
|
|
1373
|
-
)
|
|
877
|
+
transitions_data = await self.client.get(f"issue/{issue_key}/transitions")
|
|
1374
878
|
transitions = transitions_data.get("transitions", [])
|
|
1375
879
|
|
|
1376
880
|
# Transform transitions to simplified format
|
|
@@ -1517,34 +1021,19 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1517
1021
|
raise ValueError(error_message)
|
|
1518
1022
|
|
|
1519
1023
|
# Build JQL query for epics
|
|
1520
|
-
|
|
1521
|
-
|
|
1522
|
-
# Add state filter if provided
|
|
1523
|
-
if state:
|
|
1524
|
-
jql_parts.append(f'status = "{state}"')
|
|
1525
|
-
|
|
1526
|
-
jql = " AND ".join(jql_parts) + " ORDER BY updated DESC"
|
|
1024
|
+
jql = build_epic_list_jql(self.project_key, state=state)
|
|
1527
1025
|
|
|
1528
1026
|
try:
|
|
1529
1027
|
# Execute search
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
"search/jql",
|
|
1533
|
-
params={
|
|
1534
|
-
"jql": jql,
|
|
1535
|
-
"startAt": offset,
|
|
1536
|
-
"maxResults": limit,
|
|
1537
|
-
"fields": "*all",
|
|
1538
|
-
"expand": "renderedFields",
|
|
1539
|
-
},
|
|
1540
|
-
)
|
|
1028
|
+
params = get_search_params(jql, start_at=offset, max_results=limit)
|
|
1029
|
+
data = await self.client.get("search/jql", params=params)
|
|
1541
1030
|
|
|
1542
1031
|
# Convert issues to tickets
|
|
1543
1032
|
issues = data.get("issues", [])
|
|
1544
1033
|
epics = []
|
|
1545
1034
|
|
|
1546
1035
|
for issue in issues:
|
|
1547
|
-
ticket = self.
|
|
1036
|
+
ticket = issue_to_ticket(issue, self.server)
|
|
1548
1037
|
# Only include if it's actually an Epic
|
|
1549
1038
|
if isinstance(ticket, Epic):
|
|
1550
1039
|
epics.append(ticket)
|
|
@@ -1577,37 +1066,14 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1577
1066
|
HTTPStatusError: If update fails
|
|
1578
1067
|
|
|
1579
1068
|
"""
|
|
1580
|
-
fields =
|
|
1581
|
-
|
|
1582
|
-
# Map title to summary
|
|
1583
|
-
if "title" in updates:
|
|
1584
|
-
fields["summary"] = updates["title"]
|
|
1585
|
-
|
|
1586
|
-
# Convert description to ADF format
|
|
1587
|
-
if "description" in updates:
|
|
1588
|
-
fields["description"] = self._convert_to_adf(updates["description"])
|
|
1589
|
-
|
|
1590
|
-
# Map tags to labels
|
|
1591
|
-
if "tags" in updates:
|
|
1592
|
-
fields["labels"] = updates["tags"]
|
|
1593
|
-
|
|
1594
|
-
# Map priority (some JIRA configs allow priority on Epics)
|
|
1595
|
-
if "priority" in updates:
|
|
1596
|
-
priority_value = updates["priority"]
|
|
1597
|
-
if isinstance(priority_value, Priority):
|
|
1598
|
-
fields["priority"] = {
|
|
1599
|
-
"name": self._map_priority_to_jira(priority_value)
|
|
1600
|
-
}
|
|
1601
|
-
else:
|
|
1602
|
-
# String priority passed directly
|
|
1603
|
-
fields["priority"] = {"name": priority_value}
|
|
1069
|
+
fields = map_epic_update_fields(updates)
|
|
1604
1070
|
|
|
1605
1071
|
if not fields and "state" not in updates:
|
|
1606
1072
|
raise ValueError("At least one field must be updated")
|
|
1607
1073
|
|
|
1608
1074
|
# Apply field updates if any
|
|
1609
1075
|
if fields:
|
|
1610
|
-
await self.
|
|
1076
|
+
await self.client.put(f"issue/{epic_id}", data={"fields": fields})
|
|
1611
1077
|
|
|
1612
1078
|
# Handle state transitions separately (JIRA uses workflow transitions)
|
|
1613
1079
|
if "state" in updates:
|
|
@@ -1638,8 +1104,6 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1638
1104
|
HTTPStatusError: If upload fails
|
|
1639
1105
|
|
|
1640
1106
|
"""
|
|
1641
|
-
from pathlib import Path
|
|
1642
|
-
|
|
1643
1107
|
# Validate credentials before attempting operation
|
|
1644
1108
|
is_valid, error_message = self.validate_credentials()
|
|
1645
1109
|
if not is_valid:
|
|
@@ -1649,40 +1113,28 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1649
1113
|
if not file_path_obj.exists():
|
|
1650
1114
|
raise FileNotFoundError(f"File not found: {file_path}")
|
|
1651
1115
|
|
|
1652
|
-
#
|
|
1653
|
-
|
|
1654
|
-
"
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
# Prepare multipart file upload
|
|
1659
|
-
with open(file_path_obj, "rb") as f:
|
|
1660
|
-
files = {"file": (file_path_obj.name, f, "application/octet-stream")}
|
|
1661
|
-
|
|
1662
|
-
url = f"{self.api_base}/issue/{ticket_id}/attachments"
|
|
1663
|
-
|
|
1664
|
-
# Use existing client infrastructure
|
|
1665
|
-
async with await self._get_client() as client:
|
|
1666
|
-
response = await client.post(
|
|
1667
|
-
url, files=files, headers={**self.headers, **headers}
|
|
1668
|
-
)
|
|
1669
|
-
response.raise_for_status()
|
|
1670
|
-
|
|
1671
|
-
# JIRA returns array with single attachment
|
|
1672
|
-
attachment_data = response.json()[0]
|
|
1116
|
+
# Upload file
|
|
1117
|
+
result = await self.client.upload_file(
|
|
1118
|
+
f"issue/{ticket_id}/attachments",
|
|
1119
|
+
str(file_path_obj),
|
|
1120
|
+
file_path_obj.name,
|
|
1121
|
+
)
|
|
1673
1122
|
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1679
|
-
|
|
1680
|
-
|
|
1681
|
-
|
|
1682
|
-
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
|
|
1123
|
+
# JIRA returns array with single attachment
|
|
1124
|
+
attachment_data = result[0]
|
|
1125
|
+
|
|
1126
|
+
return Attachment(
|
|
1127
|
+
id=attachment_data["id"],
|
|
1128
|
+
ticket_id=ticket_id,
|
|
1129
|
+
filename=attachment_data["filename"],
|
|
1130
|
+
url=attachment_data["content"],
|
|
1131
|
+
content_type=attachment_data["mimeType"],
|
|
1132
|
+
size_bytes=attachment_data["size"],
|
|
1133
|
+
created_at=parse_jira_datetime(attachment_data["created"]),
|
|
1134
|
+
created_by=attachment_data["author"]["displayName"],
|
|
1135
|
+
description=description,
|
|
1136
|
+
metadata={"jira": attachment_data},
|
|
1137
|
+
)
|
|
1686
1138
|
|
|
1687
1139
|
async def get_attachments(self, ticket_id: str) -> builtins.list[Attachment]:
|
|
1688
1140
|
"""Get all attachments for a JIRA issue.
|
|
@@ -1707,8 +1159,8 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1707
1159
|
raise ValueError(error_message)
|
|
1708
1160
|
|
|
1709
1161
|
# Fetch issue with attachment field
|
|
1710
|
-
issue = await self.
|
|
1711
|
-
|
|
1162
|
+
issue = await self.client.get(
|
|
1163
|
+
f"issue/{ticket_id}", params={"fields": "attachment"}
|
|
1712
1164
|
)
|
|
1713
1165
|
|
|
1714
1166
|
attachments = []
|
|
@@ -1752,7 +1204,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1752
1204
|
raise ValueError(error_message)
|
|
1753
1205
|
|
|
1754
1206
|
try:
|
|
1755
|
-
await self.
|
|
1207
|
+
await self.client.delete(f"attachment/{attachment_id}")
|
|
1756
1208
|
return True
|
|
1757
1209
|
except HTTPStatusError as e:
|
|
1758
1210
|
if e.response.status_code == 404:
|
|
@@ -1774,6 +1226,126 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
1774
1226
|
self._issue_types_cache.clear()
|
|
1775
1227
|
self._custom_fields_cache.clear()
|
|
1776
1228
|
|
|
1229
|
+
# Milestone Methods (Not yet implemented)
|
|
1230
|
+
|
|
1231
|
+
async def milestone_create(
|
|
1232
|
+
self,
|
|
1233
|
+
name: str,
|
|
1234
|
+
target_date: datetime | None = None,
|
|
1235
|
+
labels: list[str] | None = None,
|
|
1236
|
+
description: str = "",
|
|
1237
|
+
project_id: str | None = None,
|
|
1238
|
+
) -> Any:
|
|
1239
|
+
"""Create milestone - not yet implemented for Jira.
|
|
1240
|
+
|
|
1241
|
+
Args:
|
|
1242
|
+
----
|
|
1243
|
+
name: Milestone name
|
|
1244
|
+
target_date: Target completion date
|
|
1245
|
+
labels: Labels that define this milestone
|
|
1246
|
+
description: Milestone description
|
|
1247
|
+
project_id: Associated project ID
|
|
1248
|
+
|
|
1249
|
+
Raises:
|
|
1250
|
+
------
|
|
1251
|
+
NotImplementedError: Milestone support coming in v2.1.0
|
|
1252
|
+
|
|
1253
|
+
"""
|
|
1254
|
+
raise NotImplementedError("Milestone support for Jira coming in v2.1.0")
|
|
1255
|
+
|
|
1256
|
+
async def milestone_get(self, milestone_id: str) -> Any:
|
|
1257
|
+
"""Get milestone - not yet implemented for Jira.
|
|
1258
|
+
|
|
1259
|
+
Args:
|
|
1260
|
+
----
|
|
1261
|
+
milestone_id: Milestone identifier
|
|
1262
|
+
|
|
1263
|
+
Raises:
|
|
1264
|
+
------
|
|
1265
|
+
NotImplementedError: Milestone support coming in v2.1.0
|
|
1266
|
+
|
|
1267
|
+
"""
|
|
1268
|
+
raise NotImplementedError("Milestone support for Jira coming in v2.1.0")
|
|
1269
|
+
|
|
1270
|
+
async def milestone_list(
|
|
1271
|
+
self,
|
|
1272
|
+
project_id: str | None = None,
|
|
1273
|
+
state: str | None = None,
|
|
1274
|
+
) -> list[Any]:
|
|
1275
|
+
"""List milestones - not yet implemented for Jira.
|
|
1276
|
+
|
|
1277
|
+
Args:
|
|
1278
|
+
----
|
|
1279
|
+
project_id: Filter by project
|
|
1280
|
+
state: Filter by state
|
|
1281
|
+
|
|
1282
|
+
Raises:
|
|
1283
|
+
------
|
|
1284
|
+
NotImplementedError: Milestone support coming in v2.1.0
|
|
1285
|
+
|
|
1286
|
+
"""
|
|
1287
|
+
raise NotImplementedError("Milestone support for Jira coming in v2.1.0")
|
|
1288
|
+
|
|
1289
|
+
async def milestone_update(
|
|
1290
|
+
self,
|
|
1291
|
+
milestone_id: str,
|
|
1292
|
+
name: str | None = None,
|
|
1293
|
+
target_date: datetime | None = None,
|
|
1294
|
+
state: str | None = None,
|
|
1295
|
+
labels: list[str] | None = None,
|
|
1296
|
+
description: str | None = None,
|
|
1297
|
+
) -> Any:
|
|
1298
|
+
"""Update milestone - not yet implemented for Jira.
|
|
1299
|
+
|
|
1300
|
+
Args:
|
|
1301
|
+
----
|
|
1302
|
+
milestone_id: Milestone identifier
|
|
1303
|
+
name: New name
|
|
1304
|
+
target_date: New target date
|
|
1305
|
+
state: New state
|
|
1306
|
+
labels: New labels
|
|
1307
|
+
description: New description
|
|
1308
|
+
|
|
1309
|
+
Raises:
|
|
1310
|
+
------
|
|
1311
|
+
NotImplementedError: Milestone support coming in v2.1.0
|
|
1312
|
+
|
|
1313
|
+
"""
|
|
1314
|
+
raise NotImplementedError("Milestone support for Jira coming in v2.1.0")
|
|
1315
|
+
|
|
1316
|
+
async def milestone_delete(self, milestone_id: str) -> bool:
|
|
1317
|
+
"""Delete milestone - not yet implemented for Jira.
|
|
1318
|
+
|
|
1319
|
+
Args:
|
|
1320
|
+
----
|
|
1321
|
+
milestone_id: Milestone identifier
|
|
1322
|
+
|
|
1323
|
+
Raises:
|
|
1324
|
+
------
|
|
1325
|
+
NotImplementedError: Milestone support coming in v2.1.0
|
|
1326
|
+
|
|
1327
|
+
"""
|
|
1328
|
+
raise NotImplementedError("Milestone support for Jira coming in v2.1.0")
|
|
1329
|
+
|
|
1330
|
+
async def milestone_get_issues(
|
|
1331
|
+
self,
|
|
1332
|
+
milestone_id: str,
|
|
1333
|
+
state: str | None = None,
|
|
1334
|
+
) -> list[Any]:
|
|
1335
|
+
"""Get milestone issues - not yet implemented for Jira.
|
|
1336
|
+
|
|
1337
|
+
Args:
|
|
1338
|
+
----
|
|
1339
|
+
milestone_id: Milestone identifier
|
|
1340
|
+
state: Filter by issue state
|
|
1341
|
+
|
|
1342
|
+
Raises:
|
|
1343
|
+
------
|
|
1344
|
+
NotImplementedError: Milestone support coming in v2.1.0
|
|
1345
|
+
|
|
1346
|
+
"""
|
|
1347
|
+
raise NotImplementedError("Milestone support for Jira coming in v2.1.0")
|
|
1348
|
+
|
|
1777
1349
|
|
|
1778
1350
|
# Register the adapter
|
|
1779
1351
|
AdapterRegistry.register("jira", JiraAdapter)
|