mcp-ticketer 0.1.30__py3-none-any.whl → 1.2.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__init__.py +10 -10
- mcp_ticketer/__version__.py +3 -3
- mcp_ticketer/adapters/__init__.py +2 -0
- mcp_ticketer/adapters/aitrackdown.py +796 -46
- mcp_ticketer/adapters/asana/__init__.py +15 -0
- mcp_ticketer/adapters/asana/adapter.py +1416 -0
- mcp_ticketer/adapters/asana/client.py +292 -0
- mcp_ticketer/adapters/asana/mappers.py +348 -0
- mcp_ticketer/adapters/asana/types.py +146 -0
- mcp_ticketer/adapters/github.py +879 -129
- mcp_ticketer/adapters/hybrid.py +11 -11
- mcp_ticketer/adapters/jira.py +973 -73
- mcp_ticketer/adapters/linear/__init__.py +24 -0
- mcp_ticketer/adapters/linear/adapter.py +2732 -0
- mcp_ticketer/adapters/linear/client.py +344 -0
- mcp_ticketer/adapters/linear/mappers.py +420 -0
- mcp_ticketer/adapters/linear/queries.py +479 -0
- mcp_ticketer/adapters/linear/types.py +360 -0
- mcp_ticketer/adapters/linear.py +10 -2315
- mcp_ticketer/analysis/__init__.py +23 -0
- mcp_ticketer/analysis/orphaned.py +218 -0
- mcp_ticketer/analysis/similarity.py +224 -0
- mcp_ticketer/analysis/staleness.py +266 -0
- mcp_ticketer/cache/memory.py +9 -8
- mcp_ticketer/cli/adapter_diagnostics.py +421 -0
- mcp_ticketer/cli/auggie_configure.py +116 -15
- mcp_ticketer/cli/codex_configure.py +274 -82
- mcp_ticketer/cli/configure.py +888 -151
- mcp_ticketer/cli/diagnostics.py +400 -157
- mcp_ticketer/cli/discover.py +297 -26
- mcp_ticketer/cli/gemini_configure.py +119 -26
- mcp_ticketer/cli/init_command.py +880 -0
- mcp_ticketer/cli/instruction_commands.py +435 -0
- mcp_ticketer/cli/linear_commands.py +616 -0
- mcp_ticketer/cli/main.py +203 -1165
- mcp_ticketer/cli/mcp_configure.py +474 -90
- mcp_ticketer/cli/mcp_server_commands.py +415 -0
- mcp_ticketer/cli/migrate_config.py +12 -8
- mcp_ticketer/cli/platform_commands.py +123 -0
- mcp_ticketer/cli/platform_detection.py +418 -0
- mcp_ticketer/cli/platform_installer.py +513 -0
- mcp_ticketer/cli/python_detection.py +126 -0
- mcp_ticketer/cli/queue_commands.py +15 -15
- mcp_ticketer/cli/setup_command.py +639 -0
- mcp_ticketer/cli/simple_health.py +90 -65
- mcp_ticketer/cli/ticket_commands.py +1013 -0
- mcp_ticketer/cli/update_checker.py +313 -0
- mcp_ticketer/cli/utils.py +114 -66
- mcp_ticketer/core/__init__.py +24 -1
- mcp_ticketer/core/adapter.py +250 -16
- mcp_ticketer/core/config.py +145 -37
- mcp_ticketer/core/env_discovery.py +101 -22
- mcp_ticketer/core/env_loader.py +349 -0
- mcp_ticketer/core/exceptions.py +160 -0
- mcp_ticketer/core/http_client.py +26 -26
- mcp_ticketer/core/instructions.py +405 -0
- mcp_ticketer/core/label_manager.py +732 -0
- mcp_ticketer/core/mappers.py +42 -30
- mcp_ticketer/core/models.py +280 -28
- mcp_ticketer/core/onepassword_secrets.py +379 -0
- mcp_ticketer/core/project_config.py +183 -49
- mcp_ticketer/core/registry.py +3 -3
- mcp_ticketer/core/session_state.py +171 -0
- mcp_ticketer/core/state_matcher.py +592 -0
- mcp_ticketer/core/url_parser.py +425 -0
- mcp_ticketer/core/validators.py +69 -0
- mcp_ticketer/defaults/ticket_instructions.md +644 -0
- mcp_ticketer/mcp/__init__.py +29 -1
- mcp_ticketer/mcp/__main__.py +60 -0
- mcp_ticketer/mcp/server/__init__.py +25 -0
- mcp_ticketer/mcp/server/__main__.py +60 -0
- mcp_ticketer/mcp/server/constants.py +58 -0
- mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
- mcp_ticketer/mcp/server/dto.py +195 -0
- mcp_ticketer/mcp/server/main.py +1343 -0
- mcp_ticketer/mcp/server/response_builder.py +206 -0
- mcp_ticketer/mcp/server/routing.py +655 -0
- mcp_ticketer/mcp/server/server_sdk.py +151 -0
- mcp_ticketer/mcp/server/tools/__init__.py +56 -0
- mcp_ticketer/mcp/server/tools/analysis_tools.py +495 -0
- mcp_ticketer/mcp/server/tools/attachment_tools.py +226 -0
- mcp_ticketer/mcp/server/tools/bulk_tools.py +273 -0
- mcp_ticketer/mcp/server/tools/comment_tools.py +152 -0
- mcp_ticketer/mcp/server/tools/config_tools.py +1439 -0
- mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
- mcp_ticketer/mcp/server/tools/hierarchy_tools.py +921 -0
- mcp_ticketer/mcp/server/tools/instruction_tools.py +300 -0
- mcp_ticketer/mcp/server/tools/label_tools.py +948 -0
- mcp_ticketer/mcp/server/tools/pr_tools.py +152 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +215 -0
- mcp_ticketer/mcp/server/tools/session_tools.py +170 -0
- mcp_ticketer/mcp/server/tools/ticket_tools.py +1268 -0
- mcp_ticketer/mcp/server/tools/user_ticket_tools.py +547 -0
- mcp_ticketer/queue/__init__.py +1 -0
- mcp_ticketer/queue/health_monitor.py +168 -136
- mcp_ticketer/queue/manager.py +95 -25
- mcp_ticketer/queue/queue.py +40 -21
- mcp_ticketer/queue/run_worker.py +6 -1
- mcp_ticketer/queue/ticket_registry.py +213 -155
- mcp_ticketer/queue/worker.py +109 -49
- mcp_ticketer-1.2.11.dist-info/METADATA +792 -0
- mcp_ticketer-1.2.11.dist-info/RECORD +110 -0
- mcp_ticketer/mcp/server.py +0 -1895
- mcp_ticketer-0.1.30.dist-info/METADATA +0 -413
- mcp_ticketer-0.1.30.dist-info/RECORD +0 -49
- {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/licenses/LICENSE +0 -0
- {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/top_level.txt +0 -0
mcp_ticketer/adapters/jira.py
CHANGED
|
@@ -1,23 +1,103 @@
|
|
|
1
1
|
"""JIRA adapter implementation using REST API v3."""
|
|
2
2
|
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
3
5
|
import asyncio
|
|
4
6
|
import builtins
|
|
5
7
|
import logging
|
|
6
|
-
import
|
|
8
|
+
import re
|
|
7
9
|
from datetime import datetime
|
|
8
10
|
from enum import Enum
|
|
9
|
-
from typing import Any,
|
|
11
|
+
from typing import Any, Union
|
|
10
12
|
|
|
11
13
|
import httpx
|
|
12
14
|
from httpx import AsyncClient, HTTPStatusError, TimeoutException
|
|
13
15
|
|
|
14
16
|
from ..core.adapter import BaseAdapter
|
|
15
|
-
from ..core.
|
|
17
|
+
from ..core.env_loader import load_adapter_config, validate_adapter_config
|
|
18
|
+
from ..core.models import (
|
|
19
|
+
Attachment,
|
|
20
|
+
Comment,
|
|
21
|
+
Epic,
|
|
22
|
+
Priority,
|
|
23
|
+
SearchQuery,
|
|
24
|
+
Task,
|
|
25
|
+
TicketState,
|
|
26
|
+
)
|
|
16
27
|
from ..core.registry import AdapterRegistry
|
|
17
28
|
|
|
18
29
|
logger = logging.getLogger(__name__)
|
|
19
30
|
|
|
20
31
|
|
|
32
|
+
def parse_jira_datetime(date_str: str) -> datetime | None:
|
|
33
|
+
"""Parse JIRA datetime strings which can be in various formats.
|
|
34
|
+
|
|
35
|
+
JIRA can return dates in formats like:
|
|
36
|
+
- 2025-10-24T14:12:18.771-0400
|
|
37
|
+
- 2025-10-24T14:12:18.771Z
|
|
38
|
+
- 2025-10-24T14:12:18.771+00:00
|
|
39
|
+
"""
|
|
40
|
+
if not date_str:
|
|
41
|
+
return None
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
# Handle Z timezone
|
|
45
|
+
if date_str.endswith("Z"):
|
|
46
|
+
return datetime.fromisoformat(date_str.replace("Z", "+00:00"))
|
|
47
|
+
|
|
48
|
+
# Handle timezone formats like -0400, +0500 (need to add colon)
|
|
49
|
+
if re.match(r".*[+-]\d{4}$", date_str):
|
|
50
|
+
# Insert colon in timezone: -0400 -> -04:00
|
|
51
|
+
date_str = re.sub(r"([+-]\d{2})(\d{2})$", r"\1:\2", date_str)
|
|
52
|
+
|
|
53
|
+
return datetime.fromisoformat(date_str)
|
|
54
|
+
|
|
55
|
+
except (ValueError, TypeError) as e:
|
|
56
|
+
logger.warning(f"Failed to parse JIRA datetime '{date_str}': {e}")
|
|
57
|
+
return None
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def extract_text_from_adf(adf_content: str | dict[str, Any]) -> str:
|
|
61
|
+
"""Extract plain text from Atlassian Document Format (ADF).
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
----
|
|
65
|
+
adf_content: Either a string (already plain text) or ADF document dict
|
|
66
|
+
|
|
67
|
+
Returns:
|
|
68
|
+
-------
|
|
69
|
+
Plain text string extracted from the ADF content
|
|
70
|
+
|
|
71
|
+
"""
|
|
72
|
+
if isinstance(adf_content, str):
|
|
73
|
+
return adf_content
|
|
74
|
+
|
|
75
|
+
if not isinstance(adf_content, dict):
|
|
76
|
+
return str(adf_content) if adf_content else ""
|
|
77
|
+
|
|
78
|
+
def extract_text_recursive(node: dict[str, Any]) -> str:
|
|
79
|
+
"""Recursively extract text from ADF nodes."""
|
|
80
|
+
if not isinstance(node, dict):
|
|
81
|
+
return ""
|
|
82
|
+
|
|
83
|
+
# If this is a text node, return its text
|
|
84
|
+
if node.get("type") == "text":
|
|
85
|
+
return node.get("text", "")
|
|
86
|
+
|
|
87
|
+
# If this node has content, process it recursively
|
|
88
|
+
content = node.get("content", [])
|
|
89
|
+
if isinstance(content, list):
|
|
90
|
+
return "".join(extract_text_recursive(child) for child in content)
|
|
91
|
+
|
|
92
|
+
return ""
|
|
93
|
+
|
|
94
|
+
try:
|
|
95
|
+
return extract_text_recursive(adf_content)
|
|
96
|
+
except Exception as e:
|
|
97
|
+
logger.warning(f"Failed to extract text from ADF: {e}")
|
|
98
|
+
return str(adf_content) if adf_content else ""
|
|
99
|
+
|
|
100
|
+
|
|
21
101
|
class JiraIssueType(str, Enum):
|
|
22
102
|
"""Common JIRA issue types."""
|
|
23
103
|
|
|
@@ -47,6 +127,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
47
127
|
"""Initialize JIRA adapter.
|
|
48
128
|
|
|
49
129
|
Args:
|
|
130
|
+
----
|
|
50
131
|
config: Configuration with:
|
|
51
132
|
- server: JIRA server URL (e.g., https://company.atlassian.net)
|
|
52
133
|
- email: User email for authentication
|
|
@@ -60,21 +141,25 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
60
141
|
"""
|
|
61
142
|
super().__init__(config)
|
|
62
143
|
|
|
63
|
-
#
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
self.timeout = config.get("timeout", 30)
|
|
73
|
-
self.max_retries = config.get("max_retries", 3)
|
|
144
|
+
# Load configuration with environment variable resolution
|
|
145
|
+
full_config = load_adapter_config("jira", config)
|
|
146
|
+
|
|
147
|
+
# Validate required configuration
|
|
148
|
+
missing_keys = validate_adapter_config("jira", full_config)
|
|
149
|
+
if missing_keys:
|
|
150
|
+
raise ValueError(
|
|
151
|
+
f"JIRA adapter missing required configuration: {', '.join(missing_keys)}"
|
|
152
|
+
)
|
|
74
153
|
|
|
75
|
-
#
|
|
76
|
-
|
|
77
|
-
|
|
154
|
+
# Configuration
|
|
155
|
+
self.server = full_config.get("server", "")
|
|
156
|
+
self.email = full_config.get("email", "")
|
|
157
|
+
self.api_token = full_config.get("api_token", "")
|
|
158
|
+
self.project_key = full_config.get("project_key", "")
|
|
159
|
+
self.is_cloud = full_config.get("cloud", True)
|
|
160
|
+
self.verify_ssl = full_config.get("verify_ssl", True)
|
|
161
|
+
self.timeout = full_config.get("timeout", 30)
|
|
162
|
+
self.max_retries = full_config.get("max_retries", 3)
|
|
78
163
|
|
|
79
164
|
# Clean up server URL
|
|
80
165
|
self.server = self.server.rstrip("/")
|
|
@@ -103,6 +188,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
103
188
|
"""Validate that required credentials are present.
|
|
104
189
|
|
|
105
190
|
Returns:
|
|
191
|
+
-------
|
|
106
192
|
(is_valid, error_message) - Tuple of validation result and error message
|
|
107
193
|
|
|
108
194
|
"""
|
|
@@ -149,13 +235,14 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
149
235
|
self,
|
|
150
236
|
method: str,
|
|
151
237
|
endpoint: str,
|
|
152
|
-
data:
|
|
153
|
-
params:
|
|
238
|
+
data: dict[str, Any] | None = None,
|
|
239
|
+
params: dict[str, Any] | None = None,
|
|
154
240
|
retry_count: int = 0,
|
|
155
241
|
) -> dict[str, Any]:
|
|
156
242
|
"""Make HTTP request to JIRA API with retry logic.
|
|
157
243
|
|
|
158
244
|
Args:
|
|
245
|
+
----
|
|
159
246
|
method: HTTP method
|
|
160
247
|
endpoint: API endpoint
|
|
161
248
|
data: Request body data
|
|
@@ -163,9 +250,11 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
163
250
|
retry_count: Current retry attempt
|
|
164
251
|
|
|
165
252
|
Returns:
|
|
253
|
+
-------
|
|
166
254
|
Response data
|
|
167
255
|
|
|
168
256
|
Raises:
|
|
257
|
+
------
|
|
169
258
|
HTTPStatusError: On API errors
|
|
170
259
|
TimeoutException: On timeout
|
|
171
260
|
|
|
@@ -215,7 +304,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
215
304
|
return self._priority_cache
|
|
216
305
|
|
|
217
306
|
async def _get_issue_types(
|
|
218
|
-
self, project_key:
|
|
307
|
+
self, project_key: str | None = None
|
|
219
308
|
) -> list[dict[str, Any]]:
|
|
220
309
|
"""Get available issue types for a project."""
|
|
221
310
|
key = project_key or self.project_key
|
|
@@ -308,9 +397,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
308
397
|
}
|
|
309
398
|
return mapping.get(priority, JiraPriority.MEDIUM)
|
|
310
399
|
|
|
311
|
-
def _map_priority_from_jira(
|
|
312
|
-
self, jira_priority: Optional[dict[str, Any]]
|
|
313
|
-
) -> Priority:
|
|
400
|
+
def _map_priority_from_jira(self, jira_priority: dict[str, Any] | None) -> Priority:
|
|
314
401
|
"""Map JIRA priority to universal priority."""
|
|
315
402
|
if not jira_priority:
|
|
316
403
|
return Priority.MEDIUM
|
|
@@ -360,7 +447,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
360
447
|
else:
|
|
361
448
|
return TicketState.OPEN
|
|
362
449
|
|
|
363
|
-
def _issue_to_ticket(self, issue: dict[str, Any]) ->
|
|
450
|
+
def _issue_to_ticket(self, issue: dict[str, Any]) -> Epic | Task:
|
|
364
451
|
"""Convert JIRA issue to universal ticket model."""
|
|
365
452
|
fields = issue.get("fields", {})
|
|
366
453
|
|
|
@@ -382,16 +469,8 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
382
469
|
label.get("name", "") if isinstance(label, dict) else str(label)
|
|
383
470
|
for label in fields.get("labels", [])
|
|
384
471
|
],
|
|
385
|
-
"created_at": (
|
|
386
|
-
|
|
387
|
-
if fields.get("created")
|
|
388
|
-
else None
|
|
389
|
-
),
|
|
390
|
-
"updated_at": (
|
|
391
|
-
datetime.fromisoformat(fields.get("updated", "").replace("Z", "+00:00"))
|
|
392
|
-
if fields.get("updated")
|
|
393
|
-
else None
|
|
394
|
-
),
|
|
472
|
+
"created_at": parse_jira_datetime(fields.get("created")),
|
|
473
|
+
"updated_at": parse_jira_datetime(fields.get("updated")),
|
|
395
474
|
"metadata": {
|
|
396
475
|
"jira": {
|
|
397
476
|
"id": issue.get("id"),
|
|
@@ -443,7 +522,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
443
522
|
)
|
|
444
523
|
|
|
445
524
|
def _ticket_to_issue_fields(
|
|
446
|
-
self, ticket:
|
|
525
|
+
self, ticket: Epic | Task, issue_type: str | None = None
|
|
447
526
|
) -> dict[str, Any]:
|
|
448
527
|
"""Convert universal ticket to JIRA issue fields."""
|
|
449
528
|
# Convert description to ADF format for JIRA Cloud
|
|
@@ -457,9 +536,12 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
457
536
|
"summary": ticket.title,
|
|
458
537
|
"description": description,
|
|
459
538
|
"labels": ticket.tags,
|
|
460
|
-
"priority": {"name": self._map_priority_to_jira(ticket.priority)},
|
|
461
539
|
}
|
|
462
540
|
|
|
541
|
+
# Only add priority for Tasks, not Epics (some JIRA configurations don't allow priority on Epics)
|
|
542
|
+
if isinstance(ticket, Task):
|
|
543
|
+
fields["priority"] = {"name": self._map_priority_to_jira(ticket.priority)}
|
|
544
|
+
|
|
463
545
|
# Add project if creating new issue
|
|
464
546
|
if not ticket.id and self.project_key:
|
|
465
547
|
fields["project"] = {"key": self.project_key}
|
|
@@ -489,7 +571,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
489
571
|
|
|
490
572
|
return fields
|
|
491
573
|
|
|
492
|
-
async def create(self, ticket:
|
|
574
|
+
async def create(self, ticket: Epic | Task) -> Epic | Task:
|
|
493
575
|
"""Create a new JIRA issue."""
|
|
494
576
|
# Validate credentials before attempting operation
|
|
495
577
|
is_valid, error_message = self.validate_credentials()
|
|
@@ -509,7 +591,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
509
591
|
created_issue = await self._make_request("GET", f"issue/{ticket.id}")
|
|
510
592
|
return self._issue_to_ticket(created_issue)
|
|
511
593
|
|
|
512
|
-
async def read(self, ticket_id: str) ->
|
|
594
|
+
async def read(self, ticket_id: str) -> Epic | Task | None:
|
|
513
595
|
"""Read a JIRA issue by key."""
|
|
514
596
|
# Validate credentials before attempting operation
|
|
515
597
|
is_valid, error_message = self.validate_credentials()
|
|
@@ -528,7 +610,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
528
610
|
|
|
529
611
|
async def update(
|
|
530
612
|
self, ticket_id: str, updates: dict[str, Any]
|
|
531
|
-
) ->
|
|
613
|
+
) -> Epic | Task | None:
|
|
532
614
|
"""Update a JIRA issue."""
|
|
533
615
|
# Validate credentials before attempting operation
|
|
534
616
|
is_valid, error_message = self.validate_credentials()
|
|
@@ -585,8 +667,8 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
585
667
|
raise
|
|
586
668
|
|
|
587
669
|
async def list(
|
|
588
|
-
self, limit: int = 10, offset: int = 0, filters:
|
|
589
|
-
) -> list[
|
|
670
|
+
self, limit: int = 10, offset: int = 0, filters: dict[str, Any] | None = None
|
|
671
|
+
) -> list[Epic | Task]:
|
|
590
672
|
"""List JIRA issues with pagination."""
|
|
591
673
|
# Build JQL query
|
|
592
674
|
jql_parts = []
|
|
@@ -608,16 +690,16 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
608
690
|
|
|
609
691
|
jql = " AND ".join(jql_parts) if jql_parts else "ORDER BY created DESC"
|
|
610
692
|
|
|
611
|
-
# Search issues using the
|
|
693
|
+
# Search issues using the JIRA API endpoint
|
|
612
694
|
data = await self._make_request(
|
|
613
|
-
"
|
|
614
|
-
"search/jql", #
|
|
615
|
-
|
|
695
|
+
"GET",
|
|
696
|
+
"search/jql", # JIRA search endpoint (new API v3)
|
|
697
|
+
params={
|
|
616
698
|
"jql": jql,
|
|
617
699
|
"startAt": offset,
|
|
618
700
|
"maxResults": limit,
|
|
619
|
-
"fields":
|
|
620
|
-
"expand":
|
|
701
|
+
"fields": "*all",
|
|
702
|
+
"expand": "renderedFields",
|
|
621
703
|
},
|
|
622
704
|
)
|
|
623
705
|
|
|
@@ -625,7 +707,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
625
707
|
issues = data.get("issues", [])
|
|
626
708
|
return [self._issue_to_ticket(issue) for issue in issues]
|
|
627
709
|
|
|
628
|
-
async def search(self, query: SearchQuery) -> builtins.list[
|
|
710
|
+
async def search(self, query: SearchQuery) -> builtins.list[Epic | Task]:
|
|
629
711
|
"""Search JIRA issues using JQL."""
|
|
630
712
|
# Build JQL query
|
|
631
713
|
jql_parts = []
|
|
@@ -658,16 +740,16 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
658
740
|
|
|
659
741
|
jql = " AND ".join(jql_parts) if jql_parts else "ORDER BY created DESC"
|
|
660
742
|
|
|
661
|
-
# Execute search using the
|
|
743
|
+
# Execute search using the JIRA API endpoint
|
|
662
744
|
data = await self._make_request(
|
|
663
|
-
"
|
|
664
|
-
"search/jql", #
|
|
665
|
-
|
|
745
|
+
"GET",
|
|
746
|
+
"search/jql", # JIRA search endpoint (new API v3)
|
|
747
|
+
params={
|
|
666
748
|
"jql": jql,
|
|
667
749
|
"startAt": query.offset,
|
|
668
750
|
"maxResults": query.limit,
|
|
669
|
-
"fields":
|
|
670
|
-
"expand":
|
|
751
|
+
"fields": "*all",
|
|
752
|
+
"expand": "renderedFields",
|
|
671
753
|
},
|
|
672
754
|
)
|
|
673
755
|
|
|
@@ -677,7 +759,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
677
759
|
|
|
678
760
|
async def transition_state(
|
|
679
761
|
self, ticket_id: str, target_state: TicketState
|
|
680
|
-
) ->
|
|
762
|
+
) -> Epic | Task | None:
|
|
681
763
|
"""Transition JIRA issue to a new state."""
|
|
682
764
|
# Get available transitions
|
|
683
765
|
transitions = await self._get_transitions(ticket_id)
|
|
@@ -728,8 +810,19 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
728
810
|
|
|
729
811
|
async def add_comment(self, comment: Comment) -> Comment:
|
|
730
812
|
"""Add a comment to a JIRA issue."""
|
|
731
|
-
# Prepare comment data
|
|
732
|
-
data = {
|
|
813
|
+
# Prepare comment data in Atlassian Document Format
|
|
814
|
+
data = {
|
|
815
|
+
"body": {
|
|
816
|
+
"type": "doc",
|
|
817
|
+
"version": 1,
|
|
818
|
+
"content": [
|
|
819
|
+
{
|
|
820
|
+
"type": "paragraph",
|
|
821
|
+
"content": [{"type": "text", "text": comment.content}],
|
|
822
|
+
}
|
|
823
|
+
],
|
|
824
|
+
}
|
|
825
|
+
}
|
|
733
826
|
|
|
734
827
|
# Add comment
|
|
735
828
|
result = await self._make_request(
|
|
@@ -739,9 +832,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
739
832
|
# Update comment with JIRA data
|
|
740
833
|
comment.id = result.get("id")
|
|
741
834
|
comment.created_at = (
|
|
742
|
-
|
|
743
|
-
if result.get("created")
|
|
744
|
-
else datetime.now()
|
|
835
|
+
parse_jira_datetime(result.get("created")) or datetime.now()
|
|
745
836
|
)
|
|
746
837
|
comment.author = result.get("author", {}).get("displayName", comment.author)
|
|
747
838
|
comment.metadata["jira"] = result
|
|
@@ -766,27 +857,23 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
766
857
|
# Convert to Comment objects
|
|
767
858
|
comments = []
|
|
768
859
|
for comment_data in paginated:
|
|
860
|
+
# Extract text content from ADF format
|
|
861
|
+
body_content = comment_data.get("body", "")
|
|
862
|
+
text_content = extract_text_from_adf(body_content)
|
|
863
|
+
|
|
769
864
|
comment = Comment(
|
|
770
865
|
id=comment_data.get("id"),
|
|
771
866
|
ticket_id=ticket_id,
|
|
772
867
|
author=comment_data.get("author", {}).get("displayName", "Unknown"),
|
|
773
|
-
content=
|
|
774
|
-
created_at=(
|
|
775
|
-
datetime.fromisoformat(
|
|
776
|
-
comment_data.get("created", "").replace("Z", "+00:00")
|
|
777
|
-
)
|
|
778
|
-
if comment_data.get("created")
|
|
779
|
-
else None
|
|
780
|
-
),
|
|
868
|
+
content=text_content,
|
|
869
|
+
created_at=parse_jira_datetime(comment_data.get("created")),
|
|
781
870
|
metadata={"jira": comment_data},
|
|
782
871
|
)
|
|
783
872
|
comments.append(comment)
|
|
784
873
|
|
|
785
874
|
return comments
|
|
786
875
|
|
|
787
|
-
async def get_project_info(
|
|
788
|
-
self, project_key: Optional[str] = None
|
|
789
|
-
) -> dict[str, Any]:
|
|
876
|
+
async def get_project_info(self, project_key: str | None = None) -> dict[str, Any]:
|
|
790
877
|
"""Get JIRA project information including workflows and fields."""
|
|
791
878
|
key = project_key or self.project_key
|
|
792
879
|
if not key:
|
|
@@ -808,14 +895,16 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
808
895
|
|
|
809
896
|
async def execute_jql(
|
|
810
897
|
self, jql: str, limit: int = 50
|
|
811
|
-
) -> builtins.list[
|
|
898
|
+
) -> builtins.list[Epic | Task]:
|
|
812
899
|
"""Execute a raw JQL query.
|
|
813
900
|
|
|
814
901
|
Args:
|
|
902
|
+
----
|
|
815
903
|
jql: JIRA Query Language string
|
|
816
904
|
limit: Maximum number of results
|
|
817
905
|
|
|
818
906
|
Returns:
|
|
907
|
+
-------
|
|
819
908
|
List of matching tickets
|
|
820
909
|
|
|
821
910
|
"""
|
|
@@ -834,14 +923,16 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
834
923
|
return [self._issue_to_ticket(issue) for issue in issues]
|
|
835
924
|
|
|
836
925
|
async def get_sprints(
|
|
837
|
-
self, board_id:
|
|
926
|
+
self, board_id: int | None = None
|
|
838
927
|
) -> builtins.list[dict[str, Any]]:
|
|
839
928
|
"""Get active sprints for a board (requires JIRA Software).
|
|
840
929
|
|
|
841
930
|
Args:
|
|
931
|
+
----
|
|
842
932
|
board_id: Agile board ID
|
|
843
933
|
|
|
844
934
|
Returns:
|
|
935
|
+
-------
|
|
845
936
|
List of sprint information
|
|
846
937
|
|
|
847
938
|
"""
|
|
@@ -866,6 +957,815 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
866
957
|
|
|
867
958
|
return sprints_data.get("values", [])
|
|
868
959
|
|
|
960
|
+
async def get_project_users(self) -> builtins.list[dict[str, Any]]:
|
|
961
|
+
"""Get users who have access to the project."""
|
|
962
|
+
if not self.project_key:
|
|
963
|
+
return []
|
|
964
|
+
|
|
965
|
+
try:
|
|
966
|
+
# Get project role users
|
|
967
|
+
project_data = await self._make_request(
|
|
968
|
+
"GET", f"project/{self.project_key}"
|
|
969
|
+
)
|
|
970
|
+
|
|
971
|
+
# Get users from project roles
|
|
972
|
+
users = []
|
|
973
|
+
if "roles" in project_data:
|
|
974
|
+
for _role_name, role_url in project_data["roles"].items():
|
|
975
|
+
# Extract role ID from URL
|
|
976
|
+
role_id = role_url.split("/")[-1]
|
|
977
|
+
try:
|
|
978
|
+
role_data = await self._make_request(
|
|
979
|
+
"GET", f"project/{self.project_key}/role/{role_id}"
|
|
980
|
+
)
|
|
981
|
+
if "actors" in role_data:
|
|
982
|
+
for actor in role_data["actors"]:
|
|
983
|
+
if actor.get("type") == "atlassian-user-role-actor":
|
|
984
|
+
users.append(actor.get("actorUser", {}))
|
|
985
|
+
except Exception:
|
|
986
|
+
# Skip if role access fails
|
|
987
|
+
continue
|
|
988
|
+
|
|
989
|
+
# Remove duplicates based on accountId
|
|
990
|
+
seen_ids = set()
|
|
991
|
+
unique_users = []
|
|
992
|
+
for user in users:
|
|
993
|
+
account_id = user.get("accountId")
|
|
994
|
+
if account_id and account_id not in seen_ids:
|
|
995
|
+
seen_ids.add(account_id)
|
|
996
|
+
unique_users.append(user)
|
|
997
|
+
|
|
998
|
+
return unique_users
|
|
999
|
+
|
|
1000
|
+
except Exception:
|
|
1001
|
+
# Fallback: try to get assignable users for the project
|
|
1002
|
+
try:
|
|
1003
|
+
users_data = await self._make_request(
|
|
1004
|
+
"GET",
|
|
1005
|
+
"user/assignable/search",
|
|
1006
|
+
params={"project": self.project_key, "maxResults": 50},
|
|
1007
|
+
)
|
|
1008
|
+
return users_data if isinstance(users_data, list) else []
|
|
1009
|
+
except Exception:
|
|
1010
|
+
return []
|
|
1011
|
+
|
|
1012
|
+
async def get_current_user(self) -> dict[str, Any] | None:
|
|
1013
|
+
"""Get current authenticated user information."""
|
|
1014
|
+
try:
|
|
1015
|
+
return await self._make_request("GET", "myself")
|
|
1016
|
+
except Exception:
|
|
1017
|
+
return None
|
|
1018
|
+
|
|
1019
|
+
async def list_labels(self) -> builtins.list[dict[str, Any]]:
|
|
1020
|
+
"""List all labels used in the project.
|
|
1021
|
+
|
|
1022
|
+
JIRA doesn't have a direct "list all labels" endpoint, so we query
|
|
1023
|
+
recent issues and extract unique labels from them.
|
|
1024
|
+
|
|
1025
|
+
Returns:
|
|
1026
|
+
-------
|
|
1027
|
+
List of label dictionaries with 'id' and 'name' fields
|
|
1028
|
+
|
|
1029
|
+
"""
|
|
1030
|
+
try:
|
|
1031
|
+
# Query recent issues to get labels in use
|
|
1032
|
+
jql = f"project = {self.project_key} ORDER BY updated DESC"
|
|
1033
|
+
data = await self._make_request(
|
|
1034
|
+
"GET",
|
|
1035
|
+
"search/jql",
|
|
1036
|
+
params={
|
|
1037
|
+
"jql": jql,
|
|
1038
|
+
"maxResults": 100, # Sample from recent 100 issues
|
|
1039
|
+
"fields": "labels",
|
|
1040
|
+
},
|
|
1041
|
+
)
|
|
1042
|
+
|
|
1043
|
+
# Collect unique labels
|
|
1044
|
+
unique_labels = set()
|
|
1045
|
+
for issue in data.get("issues", []):
|
|
1046
|
+
labels = issue.get("fields", {}).get("labels", [])
|
|
1047
|
+
for label in labels:
|
|
1048
|
+
if isinstance(label, dict):
|
|
1049
|
+
unique_labels.add(label.get("name", ""))
|
|
1050
|
+
else:
|
|
1051
|
+
unique_labels.add(str(label))
|
|
1052
|
+
|
|
1053
|
+
# Transform to standardized format
|
|
1054
|
+
return [
|
|
1055
|
+
{"id": label, "name": label} for label in sorted(unique_labels) if label
|
|
1056
|
+
]
|
|
1057
|
+
|
|
1058
|
+
except Exception:
|
|
1059
|
+
# Fallback: return empty list if query fails
|
|
1060
|
+
return []
|
|
1061
|
+
|
|
1062
|
+
async def create_issue_label(
|
|
1063
|
+
self, name: str, color: str | None = None
|
|
1064
|
+
) -> dict[str, Any]:
|
|
1065
|
+
"""Create a new issue label in JIRA.
|
|
1066
|
+
|
|
1067
|
+
Note: JIRA doesn't have a dedicated label creation API. Labels are
|
|
1068
|
+
created automatically when first used on an issue. This method
|
|
1069
|
+
validates the label name and returns a success response.
|
|
1070
|
+
|
|
1071
|
+
Args:
|
|
1072
|
+
----
|
|
1073
|
+
name: Label name to create
|
|
1074
|
+
color: Optional color (JIRA doesn't support colors natively, ignored)
|
|
1075
|
+
|
|
1076
|
+
Returns:
|
|
1077
|
+
-------
|
|
1078
|
+
Dict with label details:
|
|
1079
|
+
- id: Label name (same as name in JIRA)
|
|
1080
|
+
- name: Label name
|
|
1081
|
+
- status: "ready" indicating the label can be used
|
|
1082
|
+
|
|
1083
|
+
Raises:
|
|
1084
|
+
------
|
|
1085
|
+
ValueError: If credentials are invalid or label name is invalid
|
|
1086
|
+
|
|
1087
|
+
"""
|
|
1088
|
+
# Validate credentials before attempting operation
|
|
1089
|
+
is_valid, error_message = self.validate_credentials()
|
|
1090
|
+
if not is_valid:
|
|
1091
|
+
raise ValueError(error_message)
|
|
1092
|
+
|
|
1093
|
+
# Validate label name
|
|
1094
|
+
if not name or not name.strip():
|
|
1095
|
+
raise ValueError("Label name cannot be empty")
|
|
1096
|
+
|
|
1097
|
+
# JIRA label names must not contain spaces
|
|
1098
|
+
if " " in name:
|
|
1099
|
+
raise ValueError(
|
|
1100
|
+
"JIRA label names cannot contain spaces. Use underscores or hyphens instead."
|
|
1101
|
+
)
|
|
1102
|
+
|
|
1103
|
+
# Return success response
|
|
1104
|
+
# The label will be created automatically when first used on an issue
|
|
1105
|
+
return {"id": name, "name": name, "status": "ready"}
|
|
1106
|
+
|
|
1107
|
+
async def list_project_labels(
|
|
1108
|
+
self, project_key: str | None = None, limit: int = 100
|
|
1109
|
+
) -> builtins.list[dict[str, Any]]:
|
|
1110
|
+
"""List all labels used in a JIRA project.
|
|
1111
|
+
|
|
1112
|
+
JIRA doesn't have a dedicated endpoint for listing project labels.
|
|
1113
|
+
This method queries recent issues and extracts unique labels.
|
|
1114
|
+
|
|
1115
|
+
Args:
|
|
1116
|
+
----
|
|
1117
|
+
project_key: JIRA project key (e.g., 'PROJ'). If None, uses configured project.
|
|
1118
|
+
limit: Maximum number of labels to return (default: 100)
|
|
1119
|
+
|
|
1120
|
+
Returns:
|
|
1121
|
+
-------
|
|
1122
|
+
List of label dictionaries with 'id', 'name', and 'usage_count' fields
|
|
1123
|
+
|
|
1124
|
+
Raises:
|
|
1125
|
+
------
|
|
1126
|
+
ValueError: If credentials are invalid or project key not available
|
|
1127
|
+
|
|
1128
|
+
"""
|
|
1129
|
+
# Validate credentials before attempting operation
|
|
1130
|
+
is_valid, error_message = self.validate_credentials()
|
|
1131
|
+
if not is_valid:
|
|
1132
|
+
raise ValueError(error_message)
|
|
1133
|
+
|
|
1134
|
+
# Use configured project if not specified
|
|
1135
|
+
key = project_key or self.project_key
|
|
1136
|
+
if not key:
|
|
1137
|
+
raise ValueError("Project key is required")
|
|
1138
|
+
|
|
1139
|
+
try:
|
|
1140
|
+
# Query recent issues to get labels in use
|
|
1141
|
+
jql = f"project = {key} ORDER BY updated DESC"
|
|
1142
|
+
data = await self._make_request(
|
|
1143
|
+
"GET",
|
|
1144
|
+
"search/jql",
|
|
1145
|
+
params={
|
|
1146
|
+
"jql": jql,
|
|
1147
|
+
"maxResults": 500, # Sample from more issues for better coverage
|
|
1148
|
+
"fields": "labels",
|
|
1149
|
+
},
|
|
1150
|
+
)
|
|
1151
|
+
|
|
1152
|
+
# Collect labels with usage count
|
|
1153
|
+
label_counts: dict[str, int] = {}
|
|
1154
|
+
for issue in data.get("issues", []):
|
|
1155
|
+
labels = issue.get("fields", {}).get("labels", [])
|
|
1156
|
+
for label in labels:
|
|
1157
|
+
label_name = (
|
|
1158
|
+
label.get("name", "") if isinstance(label, dict) else str(label)
|
|
1159
|
+
)
|
|
1160
|
+
if label_name:
|
|
1161
|
+
label_counts[label_name] = label_counts.get(label_name, 0) + 1
|
|
1162
|
+
|
|
1163
|
+
# Transform to standardized format with usage counts
|
|
1164
|
+
result = [
|
|
1165
|
+
{"id": label, "name": label, "usage_count": count}
|
|
1166
|
+
for label, count in sorted(
|
|
1167
|
+
label_counts.items(), key=lambda x: x[1], reverse=True
|
|
1168
|
+
)
|
|
1169
|
+
]
|
|
1170
|
+
|
|
1171
|
+
return result[:limit]
|
|
1172
|
+
|
|
1173
|
+
except Exception as e:
|
|
1174
|
+
logger.error(f"Failed to list project labels: {e}")
|
|
1175
|
+
raise ValueError(f"Failed to list project labels: {e}") from e
|
|
1176
|
+
|
|
1177
|
+
async def list_cycles(
|
|
1178
|
+
self, board_id: str | None = None, state: str | None = None, limit: int = 50
|
|
1179
|
+
) -> builtins.list[dict[str, Any]]:
|
|
1180
|
+
"""List JIRA sprints (cycles) for a board.
|
|
1181
|
+
|
|
1182
|
+
Requires JIRA Agile/Software. Falls back to empty list if not available.
|
|
1183
|
+
|
|
1184
|
+
Args:
|
|
1185
|
+
----
|
|
1186
|
+
board_id: JIRA Agile board ID. If None, finds first board for project.
|
|
1187
|
+
state: Filter by state ('active', 'closed', 'future'). If None, returns all.
|
|
1188
|
+
limit: Maximum number of sprints to return (default: 50)
|
|
1189
|
+
|
|
1190
|
+
Returns:
|
|
1191
|
+
-------
|
|
1192
|
+
List of sprint dictionaries with fields:
|
|
1193
|
+
- id: Sprint ID
|
|
1194
|
+
- name: Sprint name
|
|
1195
|
+
- state: Sprint state (active, closed, future)
|
|
1196
|
+
- startDate: Start date (ISO format)
|
|
1197
|
+
- endDate: End date (ISO format)
|
|
1198
|
+
- completeDate: Completion date (ISO format, None if not completed)
|
|
1199
|
+
- goal: Sprint goal
|
|
1200
|
+
|
|
1201
|
+
Raises:
|
|
1202
|
+
------
|
|
1203
|
+
ValueError: If credentials are invalid
|
|
1204
|
+
|
|
1205
|
+
"""
|
|
1206
|
+
# Validate credentials before attempting operation
|
|
1207
|
+
is_valid, error_message = self.validate_credentials()
|
|
1208
|
+
if not is_valid:
|
|
1209
|
+
raise ValueError(error_message)
|
|
1210
|
+
|
|
1211
|
+
try:
|
|
1212
|
+
# If no board_id provided, try to find a board for the project
|
|
1213
|
+
if not board_id:
|
|
1214
|
+
boards_data = await self._make_request(
|
|
1215
|
+
"GET",
|
|
1216
|
+
"/rest/agile/1.0/board",
|
|
1217
|
+
params={"projectKeyOrId": self.project_key, "maxResults": 1},
|
|
1218
|
+
)
|
|
1219
|
+
boards = boards_data.get("values", [])
|
|
1220
|
+
if not boards:
|
|
1221
|
+
logger.warning(
|
|
1222
|
+
f"No Agile boards found for project {self.project_key}"
|
|
1223
|
+
)
|
|
1224
|
+
return []
|
|
1225
|
+
board_id = str(boards[0]["id"])
|
|
1226
|
+
|
|
1227
|
+
# Get sprints for the board
|
|
1228
|
+
params = {"maxResults": limit}
|
|
1229
|
+
if state:
|
|
1230
|
+
params["state"] = state
|
|
1231
|
+
|
|
1232
|
+
sprints_data = await self._make_request(
|
|
1233
|
+
"GET", f"/rest/agile/1.0/board/{board_id}/sprint", params=params
|
|
1234
|
+
)
|
|
1235
|
+
|
|
1236
|
+
sprints = sprints_data.get("values", [])
|
|
1237
|
+
|
|
1238
|
+
# Transform to standardized format
|
|
1239
|
+
return [
|
|
1240
|
+
{
|
|
1241
|
+
"id": sprint.get("id"),
|
|
1242
|
+
"name": sprint.get("name"),
|
|
1243
|
+
"state": sprint.get("state"),
|
|
1244
|
+
"startDate": sprint.get("startDate"),
|
|
1245
|
+
"endDate": sprint.get("endDate"),
|
|
1246
|
+
"completeDate": sprint.get("completeDate"),
|
|
1247
|
+
"goal": sprint.get("goal", ""),
|
|
1248
|
+
}
|
|
1249
|
+
for sprint in sprints
|
|
1250
|
+
]
|
|
1251
|
+
|
|
1252
|
+
except HTTPStatusError as e:
|
|
1253
|
+
if e.response.status_code == 404:
|
|
1254
|
+
logger.warning("JIRA Agile API not available (404)")
|
|
1255
|
+
return []
|
|
1256
|
+
logger.error(f"Failed to list sprints: {e}")
|
|
1257
|
+
raise ValueError(f"Failed to list sprints: {e}") from e
|
|
1258
|
+
except Exception as e:
|
|
1259
|
+
logger.warning(f"JIRA Agile may not be available: {e}")
|
|
1260
|
+
return []
|
|
1261
|
+
|
|
1262
|
+
async def list_issue_statuses(
|
|
1263
|
+
self, project_key: str | None = None
|
|
1264
|
+
) -> builtins.list[dict[str, Any]]:
|
|
1265
|
+
"""List all workflow statuses in JIRA.
|
|
1266
|
+
|
|
1267
|
+
Args:
|
|
1268
|
+
----
|
|
1269
|
+
project_key: Optional project key to filter statuses.
|
|
1270
|
+
If None, returns all statuses.
|
|
1271
|
+
|
|
1272
|
+
Returns:
|
|
1273
|
+
-------
|
|
1274
|
+
List of status dictionaries with fields:
|
|
1275
|
+
- id: Status ID
|
|
1276
|
+
- name: Status name (e.g., "To Do", "In Progress", "Done")
|
|
1277
|
+
- category: Status category key (e.g., "new", "indeterminate", "done")
|
|
1278
|
+
- categoryName: Human-readable category name
|
|
1279
|
+
- description: Status description
|
|
1280
|
+
|
|
1281
|
+
Raises:
|
|
1282
|
+
------
|
|
1283
|
+
ValueError: If credentials are invalid
|
|
1284
|
+
|
|
1285
|
+
"""
|
|
1286
|
+
# Validate credentials before attempting operation
|
|
1287
|
+
is_valid, error_message = self.validate_credentials()
|
|
1288
|
+
if not is_valid:
|
|
1289
|
+
raise ValueError(error_message)
|
|
1290
|
+
|
|
1291
|
+
try:
|
|
1292
|
+
# Use project-specific statuses if project key provided
|
|
1293
|
+
if project_key:
|
|
1294
|
+
# Get statuses for the project
|
|
1295
|
+
data = await self._make_request(
|
|
1296
|
+
"GET", f"project/{project_key}/statuses"
|
|
1297
|
+
)
|
|
1298
|
+
|
|
1299
|
+
# Extract unique statuses from all issue types
|
|
1300
|
+
status_map: dict[str, dict[str, Any]] = {}
|
|
1301
|
+
for issue_type_data in data:
|
|
1302
|
+
for status in issue_type_data.get("statuses", []):
|
|
1303
|
+
status_id = status.get("id")
|
|
1304
|
+
if status_id not in status_map:
|
|
1305
|
+
status_map[status_id] = status
|
|
1306
|
+
|
|
1307
|
+
statuses = list(status_map.values())
|
|
1308
|
+
else:
|
|
1309
|
+
# Get all statuses
|
|
1310
|
+
statuses = await self._make_request("GET", "status")
|
|
1311
|
+
|
|
1312
|
+
# Transform to standardized format
|
|
1313
|
+
return [
|
|
1314
|
+
{
|
|
1315
|
+
"id": status.get("id"),
|
|
1316
|
+
"name": status.get("name"),
|
|
1317
|
+
"category": status.get("statusCategory", {}).get("key", ""),
|
|
1318
|
+
"categoryName": status.get("statusCategory", {}).get("name", ""),
|
|
1319
|
+
"description": status.get("description", ""),
|
|
1320
|
+
}
|
|
1321
|
+
for status in statuses
|
|
1322
|
+
]
|
|
1323
|
+
|
|
1324
|
+
except Exception as e:
|
|
1325
|
+
logger.error(f"Failed to list issue statuses: {e}")
|
|
1326
|
+
raise ValueError(f"Failed to list issue statuses: {e}") from e
|
|
1327
|
+
|
|
1328
|
+
async def get_issue_status(self, issue_key: str) -> dict[str, Any] | None:
|
|
1329
|
+
"""Get rich status information for an issue.
|
|
1330
|
+
|
|
1331
|
+
Args:
|
|
1332
|
+
----
|
|
1333
|
+
issue_key: JIRA issue key (e.g., 'PROJ-123')
|
|
1334
|
+
|
|
1335
|
+
Returns:
|
|
1336
|
+
-------
|
|
1337
|
+
Dict with status details and available transitions:
|
|
1338
|
+
- id: Status ID
|
|
1339
|
+
- name: Status name
|
|
1340
|
+
- category: Status category key
|
|
1341
|
+
- categoryName: Human-readable category name
|
|
1342
|
+
- description: Status description
|
|
1343
|
+
- transitions: List of available transitions with:
|
|
1344
|
+
- id: Transition ID
|
|
1345
|
+
- name: Transition name
|
|
1346
|
+
- to: Target status info (id, name, category)
|
|
1347
|
+
Returns None if issue not found.
|
|
1348
|
+
|
|
1349
|
+
Raises:
|
|
1350
|
+
------
|
|
1351
|
+
ValueError: If credentials are invalid
|
|
1352
|
+
|
|
1353
|
+
"""
|
|
1354
|
+
# Validate credentials before attempting operation
|
|
1355
|
+
is_valid, error_message = self.validate_credentials()
|
|
1356
|
+
if not is_valid:
|
|
1357
|
+
raise ValueError(error_message)
|
|
1358
|
+
|
|
1359
|
+
try:
|
|
1360
|
+
# Get issue with status field
|
|
1361
|
+
issue = await self._make_request(
|
|
1362
|
+
"GET", f"issue/{issue_key}", params={"fields": "status"}
|
|
1363
|
+
)
|
|
1364
|
+
|
|
1365
|
+
if not issue:
|
|
1366
|
+
return None
|
|
1367
|
+
|
|
1368
|
+
status = issue.get("fields", {}).get("status", {})
|
|
1369
|
+
|
|
1370
|
+
# Get available transitions
|
|
1371
|
+
transitions_data = await self._make_request(
|
|
1372
|
+
"GET", f"issue/{issue_key}/transitions"
|
|
1373
|
+
)
|
|
1374
|
+
transitions = transitions_data.get("transitions", [])
|
|
1375
|
+
|
|
1376
|
+
# Transform transitions to simplified format
|
|
1377
|
+
transition_list = [
|
|
1378
|
+
{
|
|
1379
|
+
"id": trans.get("id"),
|
|
1380
|
+
"name": trans.get("name"),
|
|
1381
|
+
"to": {
|
|
1382
|
+
"id": trans.get("to", {}).get("id"),
|
|
1383
|
+
"name": trans.get("to", {}).get("name"),
|
|
1384
|
+
"category": trans.get("to", {})
|
|
1385
|
+
.get("statusCategory", {})
|
|
1386
|
+
.get("key", ""),
|
|
1387
|
+
},
|
|
1388
|
+
}
|
|
1389
|
+
for trans in transitions
|
|
1390
|
+
]
|
|
1391
|
+
|
|
1392
|
+
return {
|
|
1393
|
+
"id": status.get("id"),
|
|
1394
|
+
"name": status.get("name"),
|
|
1395
|
+
"category": status.get("statusCategory", {}).get("key", ""),
|
|
1396
|
+
"categoryName": status.get("statusCategory", {}).get("name", ""),
|
|
1397
|
+
"description": status.get("description", ""),
|
|
1398
|
+
"transitions": transition_list,
|
|
1399
|
+
}
|
|
1400
|
+
|
|
1401
|
+
except HTTPStatusError as e:
|
|
1402
|
+
if e.response.status_code == 404:
|
|
1403
|
+
return None
|
|
1404
|
+
logger.error(f"Failed to get issue status: {e}")
|
|
1405
|
+
raise ValueError(f"Failed to get issue status: {e}") from e
|
|
1406
|
+
except Exception as e:
|
|
1407
|
+
logger.error(f"Failed to get issue status: {e}")
|
|
1408
|
+
raise ValueError(f"Failed to get issue status: {e}") from e
|
|
1409
|
+
|
|
1410
|
+
async def create_epic(
|
|
1411
|
+
self,
|
|
1412
|
+
title: str,
|
|
1413
|
+
description: str = "",
|
|
1414
|
+
priority: Priority = Priority.MEDIUM,
|
|
1415
|
+
tags: list[str] | None = None,
|
|
1416
|
+
**kwargs: Any,
|
|
1417
|
+
) -> Epic:
|
|
1418
|
+
"""Create a new JIRA Epic.
|
|
1419
|
+
|
|
1420
|
+
Args:
|
|
1421
|
+
----
|
|
1422
|
+
title: Epic title
|
|
1423
|
+
description: Epic description
|
|
1424
|
+
priority: Priority level
|
|
1425
|
+
tags: List of labels
|
|
1426
|
+
**kwargs: Additional fields (reserved for future use)
|
|
1427
|
+
|
|
1428
|
+
Returns:
|
|
1429
|
+
-------
|
|
1430
|
+
Created Epic with ID populated
|
|
1431
|
+
|
|
1432
|
+
Raises:
|
|
1433
|
+
------
|
|
1434
|
+
ValueError: If credentials are invalid or creation fails
|
|
1435
|
+
|
|
1436
|
+
"""
|
|
1437
|
+
# Validate credentials
|
|
1438
|
+
is_valid, error_message = self.validate_credentials()
|
|
1439
|
+
if not is_valid:
|
|
1440
|
+
raise ValueError(error_message)
|
|
1441
|
+
|
|
1442
|
+
# Build epic input
|
|
1443
|
+
epic = Epic(
|
|
1444
|
+
id="", # Will be populated by JIRA
|
|
1445
|
+
title=title,
|
|
1446
|
+
description=description,
|
|
1447
|
+
priority=priority,
|
|
1448
|
+
tags=tags or [],
|
|
1449
|
+
state=TicketState.OPEN,
|
|
1450
|
+
)
|
|
1451
|
+
|
|
1452
|
+
# Create using base create method with Epic type
|
|
1453
|
+
created_epic = await self.create(epic)
|
|
1454
|
+
|
|
1455
|
+
if not isinstance(created_epic, Epic):
|
|
1456
|
+
raise ValueError("Created ticket is not an Epic")
|
|
1457
|
+
|
|
1458
|
+
return created_epic
|
|
1459
|
+
|
|
1460
|
+
async def get_epic(self, epic_id: str) -> Epic | None:
|
|
1461
|
+
"""Get a JIRA Epic by key or ID.
|
|
1462
|
+
|
|
1463
|
+
Args:
|
|
1464
|
+
----
|
|
1465
|
+
epic_id: Epic identifier (key like PROJ-123)
|
|
1466
|
+
|
|
1467
|
+
Returns:
|
|
1468
|
+
-------
|
|
1469
|
+
Epic object if found and is an Epic type, None otherwise
|
|
1470
|
+
|
|
1471
|
+
Raises:
|
|
1472
|
+
------
|
|
1473
|
+
ValueError: If credentials are invalid
|
|
1474
|
+
|
|
1475
|
+
"""
|
|
1476
|
+
# Validate credentials
|
|
1477
|
+
is_valid, error_message = self.validate_credentials()
|
|
1478
|
+
if not is_valid:
|
|
1479
|
+
raise ValueError(error_message)
|
|
1480
|
+
|
|
1481
|
+
# Read issue
|
|
1482
|
+
ticket = await self.read(epic_id)
|
|
1483
|
+
|
|
1484
|
+
if not ticket:
|
|
1485
|
+
return None
|
|
1486
|
+
|
|
1487
|
+
# Verify it's an Epic
|
|
1488
|
+
if not isinstance(ticket, Epic):
|
|
1489
|
+
return None
|
|
1490
|
+
|
|
1491
|
+
return ticket
|
|
1492
|
+
|
|
1493
|
+
async def list_epics(
|
|
1494
|
+
self, limit: int = 50, offset: int = 0, state: str | None = None, **kwargs: Any
|
|
1495
|
+
) -> builtins.list[Epic]:
|
|
1496
|
+
"""List JIRA Epics with pagination.
|
|
1497
|
+
|
|
1498
|
+
Args:
|
|
1499
|
+
----
|
|
1500
|
+
limit: Maximum number of epics to return (default: 50)
|
|
1501
|
+
offset: Number of epics to skip for pagination (default: 0)
|
|
1502
|
+
state: Filter by state/status name (e.g., "To Do", "In Progress", "Done")
|
|
1503
|
+
**kwargs: Additional filter parameters (reserved for future use)
|
|
1504
|
+
|
|
1505
|
+
Returns:
|
|
1506
|
+
-------
|
|
1507
|
+
List of Epic objects
|
|
1508
|
+
|
|
1509
|
+
Raises:
|
|
1510
|
+
------
|
|
1511
|
+
ValueError: If credentials are invalid or query fails
|
|
1512
|
+
|
|
1513
|
+
"""
|
|
1514
|
+
# Validate credentials
|
|
1515
|
+
is_valid, error_message = self.validate_credentials()
|
|
1516
|
+
if not is_valid:
|
|
1517
|
+
raise ValueError(error_message)
|
|
1518
|
+
|
|
1519
|
+
# Build JQL query for epics
|
|
1520
|
+
jql_parts = [f"project = {self.project_key}", 'issuetype = "Epic"']
|
|
1521
|
+
|
|
1522
|
+
# Add state filter if provided
|
|
1523
|
+
if state:
|
|
1524
|
+
jql_parts.append(f'status = "{state}"')
|
|
1525
|
+
|
|
1526
|
+
jql = " AND ".join(jql_parts) + " ORDER BY updated DESC"
|
|
1527
|
+
|
|
1528
|
+
try:
|
|
1529
|
+
# Execute search
|
|
1530
|
+
data = await self._make_request(
|
|
1531
|
+
"GET",
|
|
1532
|
+
"search/jql",
|
|
1533
|
+
params={
|
|
1534
|
+
"jql": jql,
|
|
1535
|
+
"startAt": offset,
|
|
1536
|
+
"maxResults": limit,
|
|
1537
|
+
"fields": "*all",
|
|
1538
|
+
"expand": "renderedFields",
|
|
1539
|
+
},
|
|
1540
|
+
)
|
|
1541
|
+
|
|
1542
|
+
# Convert issues to tickets
|
|
1543
|
+
issues = data.get("issues", [])
|
|
1544
|
+
epics = []
|
|
1545
|
+
|
|
1546
|
+
for issue in issues:
|
|
1547
|
+
ticket = self._issue_to_ticket(issue)
|
|
1548
|
+
# Only include if it's actually an Epic
|
|
1549
|
+
if isinstance(ticket, Epic):
|
|
1550
|
+
epics.append(ticket)
|
|
1551
|
+
|
|
1552
|
+
return epics
|
|
1553
|
+
|
|
1554
|
+
except Exception as e:
|
|
1555
|
+
raise ValueError(f"Failed to list JIRA epics: {e}") from e
|
|
1556
|
+
|
|
1557
|
+
async def update_epic(self, epic_id: str, updates: dict[str, Any]) -> Epic | None:
|
|
1558
|
+
"""Update a JIRA Epic with epic-specific field handling.
|
|
1559
|
+
|
|
1560
|
+
Args:
|
|
1561
|
+
----
|
|
1562
|
+
epic_id: Epic identifier (key like PROJ-123 or ID)
|
|
1563
|
+
updates: Dictionary with fields to update:
|
|
1564
|
+
- title: Epic title (maps to summary)
|
|
1565
|
+
- description: Epic description (auto-converted to ADF)
|
|
1566
|
+
- state: TicketState value (transitions via workflow)
|
|
1567
|
+
- tags: List of labels
|
|
1568
|
+
- priority: Priority level
|
|
1569
|
+
|
|
1570
|
+
Returns:
|
|
1571
|
+
-------
|
|
1572
|
+
Updated Epic object or None if not found
|
|
1573
|
+
|
|
1574
|
+
Raises:
|
|
1575
|
+
------
|
|
1576
|
+
ValueError: If no fields provided for update
|
|
1577
|
+
HTTPStatusError: If update fails
|
|
1578
|
+
|
|
1579
|
+
"""
|
|
1580
|
+
fields = {}
|
|
1581
|
+
|
|
1582
|
+
# Map title to summary
|
|
1583
|
+
if "title" in updates:
|
|
1584
|
+
fields["summary"] = updates["title"]
|
|
1585
|
+
|
|
1586
|
+
# Convert description to ADF format
|
|
1587
|
+
if "description" in updates:
|
|
1588
|
+
fields["description"] = self._convert_to_adf(updates["description"])
|
|
1589
|
+
|
|
1590
|
+
# Map tags to labels
|
|
1591
|
+
if "tags" in updates:
|
|
1592
|
+
fields["labels"] = updates["tags"]
|
|
1593
|
+
|
|
1594
|
+
# Map priority (some JIRA configs allow priority on Epics)
|
|
1595
|
+
if "priority" in updates:
|
|
1596
|
+
priority_value = updates["priority"]
|
|
1597
|
+
if isinstance(priority_value, Priority):
|
|
1598
|
+
fields["priority"] = {
|
|
1599
|
+
"name": self._map_priority_to_jira(priority_value)
|
|
1600
|
+
}
|
|
1601
|
+
else:
|
|
1602
|
+
# String priority passed directly
|
|
1603
|
+
fields["priority"] = {"name": priority_value}
|
|
1604
|
+
|
|
1605
|
+
if not fields and "state" not in updates:
|
|
1606
|
+
raise ValueError("At least one field must be updated")
|
|
1607
|
+
|
|
1608
|
+
# Apply field updates if any
|
|
1609
|
+
if fields:
|
|
1610
|
+
await self._make_request("PUT", f"issue/{epic_id}", data={"fields": fields})
|
|
1611
|
+
|
|
1612
|
+
# Handle state transitions separately (JIRA uses workflow transitions)
|
|
1613
|
+
if "state" in updates:
|
|
1614
|
+
await self.transition_state(epic_id, updates["state"])
|
|
1615
|
+
|
|
1616
|
+
# Fetch and return updated epic
|
|
1617
|
+
return await self.read(epic_id)
|
|
1618
|
+
|
|
1619
|
+
async def add_attachment(
|
|
1620
|
+
self, ticket_id: str, file_path: str, description: str | None = None
|
|
1621
|
+
) -> Attachment:
|
|
1622
|
+
"""Attach file to JIRA issue (including Epic).
|
|
1623
|
+
|
|
1624
|
+
Args:
|
|
1625
|
+
----
|
|
1626
|
+
ticket_id: Issue key (e.g., PROJ-123) or ID
|
|
1627
|
+
file_path: Path to file to attach
|
|
1628
|
+
description: Optional description (stored in metadata, not used by JIRA directly)
|
|
1629
|
+
|
|
1630
|
+
Returns:
|
|
1631
|
+
-------
|
|
1632
|
+
Attachment object with metadata
|
|
1633
|
+
|
|
1634
|
+
Raises:
|
|
1635
|
+
------
|
|
1636
|
+
FileNotFoundError: If file doesn't exist
|
|
1637
|
+
ValueError: If credentials invalid
|
|
1638
|
+
HTTPStatusError: If upload fails
|
|
1639
|
+
|
|
1640
|
+
"""
|
|
1641
|
+
from pathlib import Path
|
|
1642
|
+
|
|
1643
|
+
# Validate credentials before attempting operation
|
|
1644
|
+
is_valid, error_message = self.validate_credentials()
|
|
1645
|
+
if not is_valid:
|
|
1646
|
+
raise ValueError(error_message)
|
|
1647
|
+
|
|
1648
|
+
file_path_obj = Path(file_path)
|
|
1649
|
+
if not file_path_obj.exists():
|
|
1650
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
1651
|
+
|
|
1652
|
+
# JIRA requires special header for attachment upload
|
|
1653
|
+
headers = {
|
|
1654
|
+
"X-Atlassian-Token": "no-check",
|
|
1655
|
+
# Don't set Content-Type - let httpx handle multipart
|
|
1656
|
+
}
|
|
1657
|
+
|
|
1658
|
+
# Prepare multipart file upload
|
|
1659
|
+
with open(file_path_obj, "rb") as f:
|
|
1660
|
+
files = {"file": (file_path_obj.name, f, "application/octet-stream")}
|
|
1661
|
+
|
|
1662
|
+
url = f"{self.api_base}/issue/{ticket_id}/attachments"
|
|
1663
|
+
|
|
1664
|
+
# Use existing client infrastructure
|
|
1665
|
+
async with await self._get_client() as client:
|
|
1666
|
+
response = await client.post(
|
|
1667
|
+
url, files=files, headers={**self.headers, **headers}
|
|
1668
|
+
)
|
|
1669
|
+
response.raise_for_status()
|
|
1670
|
+
|
|
1671
|
+
# JIRA returns array with single attachment
|
|
1672
|
+
attachment_data = response.json()[0]
|
|
1673
|
+
|
|
1674
|
+
return Attachment(
|
|
1675
|
+
id=attachment_data["id"],
|
|
1676
|
+
ticket_id=ticket_id,
|
|
1677
|
+
filename=attachment_data["filename"],
|
|
1678
|
+
url=attachment_data["content"],
|
|
1679
|
+
content_type=attachment_data["mimeType"],
|
|
1680
|
+
size_bytes=attachment_data["size"],
|
|
1681
|
+
created_at=parse_jira_datetime(attachment_data["created"]),
|
|
1682
|
+
created_by=attachment_data["author"]["displayName"],
|
|
1683
|
+
description=description,
|
|
1684
|
+
metadata={"jira": attachment_data},
|
|
1685
|
+
)
|
|
1686
|
+
|
|
1687
|
+
async def get_attachments(self, ticket_id: str) -> builtins.list[Attachment]:
|
|
1688
|
+
"""Get all attachments for a JIRA issue.
|
|
1689
|
+
|
|
1690
|
+
Args:
|
|
1691
|
+
----
|
|
1692
|
+
ticket_id: Issue key or ID
|
|
1693
|
+
|
|
1694
|
+
Returns:
|
|
1695
|
+
-------
|
|
1696
|
+
List of Attachment objects
|
|
1697
|
+
|
|
1698
|
+
Raises:
|
|
1699
|
+
------
|
|
1700
|
+
ValueError: If credentials invalid
|
|
1701
|
+
HTTPStatusError: If request fails
|
|
1702
|
+
|
|
1703
|
+
"""
|
|
1704
|
+
# Validate credentials before attempting operation
|
|
1705
|
+
is_valid, error_message = self.validate_credentials()
|
|
1706
|
+
if not is_valid:
|
|
1707
|
+
raise ValueError(error_message)
|
|
1708
|
+
|
|
1709
|
+
# Fetch issue with attachment field
|
|
1710
|
+
issue = await self._make_request(
|
|
1711
|
+
"GET", f"issue/{ticket_id}", params={"fields": "attachment"}
|
|
1712
|
+
)
|
|
1713
|
+
|
|
1714
|
+
attachments = []
|
|
1715
|
+
for att_data in issue.get("fields", {}).get("attachment", []):
|
|
1716
|
+
attachments.append(
|
|
1717
|
+
Attachment(
|
|
1718
|
+
id=att_data["id"],
|
|
1719
|
+
ticket_id=ticket_id,
|
|
1720
|
+
filename=att_data["filename"],
|
|
1721
|
+
url=att_data["content"],
|
|
1722
|
+
content_type=att_data["mimeType"],
|
|
1723
|
+
size_bytes=att_data["size"],
|
|
1724
|
+
created_at=parse_jira_datetime(att_data["created"]),
|
|
1725
|
+
created_by=att_data["author"]["displayName"],
|
|
1726
|
+
metadata={"jira": att_data},
|
|
1727
|
+
)
|
|
1728
|
+
)
|
|
1729
|
+
|
|
1730
|
+
return attachments
|
|
1731
|
+
|
|
1732
|
+
async def delete_attachment(self, ticket_id: str, attachment_id: str) -> bool:
|
|
1733
|
+
"""Delete an attachment from a JIRA issue.
|
|
1734
|
+
|
|
1735
|
+
Args:
|
|
1736
|
+
----
|
|
1737
|
+
ticket_id: Issue key or ID (for validation/context)
|
|
1738
|
+
attachment_id: Attachment ID to delete
|
|
1739
|
+
|
|
1740
|
+
Returns:
|
|
1741
|
+
-------
|
|
1742
|
+
True if deleted successfully, False otherwise
|
|
1743
|
+
|
|
1744
|
+
Raises:
|
|
1745
|
+
------
|
|
1746
|
+
ValueError: If credentials invalid
|
|
1747
|
+
|
|
1748
|
+
"""
|
|
1749
|
+
# Validate credentials before attempting operation
|
|
1750
|
+
is_valid, error_message = self.validate_credentials()
|
|
1751
|
+
if not is_valid:
|
|
1752
|
+
raise ValueError(error_message)
|
|
1753
|
+
|
|
1754
|
+
try:
|
|
1755
|
+
await self._make_request("DELETE", f"attachment/{attachment_id}")
|
|
1756
|
+
return True
|
|
1757
|
+
except HTTPStatusError as e:
|
|
1758
|
+
if e.response.status_code == 404:
|
|
1759
|
+
logger.warning(f"Attachment {attachment_id} not found")
|
|
1760
|
+
return False
|
|
1761
|
+
logger.error(
|
|
1762
|
+
f"Failed to delete attachment {attachment_id}: {e.response.status_code} - {e.response.text}"
|
|
1763
|
+
)
|
|
1764
|
+
return False
|
|
1765
|
+
except Exception as e:
|
|
1766
|
+
logger.error(f"Unexpected error deleting attachment {attachment_id}: {e}")
|
|
1767
|
+
return False
|
|
1768
|
+
|
|
869
1769
|
async def close(self) -> None:
|
|
870
1770
|
"""Close the adapter and cleanup resources."""
|
|
871
1771
|
# Clear caches
|