mcp-ticketer 0.3.0__py3-none-any.whl → 2.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_ticketer/__init__.py +10 -10
- mcp_ticketer/__version__.py +3 -3
- mcp_ticketer/_version_scm.py +1 -0
- mcp_ticketer/adapters/__init__.py +2 -0
- mcp_ticketer/adapters/aitrackdown.py +930 -52
- mcp_ticketer/adapters/asana/__init__.py +15 -0
- mcp_ticketer/adapters/asana/adapter.py +1537 -0
- mcp_ticketer/adapters/asana/client.py +292 -0
- mcp_ticketer/adapters/asana/mappers.py +348 -0
- mcp_ticketer/adapters/asana/types.py +146 -0
- mcp_ticketer/adapters/github/__init__.py +26 -0
- mcp_ticketer/adapters/github/adapter.py +3229 -0
- mcp_ticketer/adapters/github/client.py +335 -0
- mcp_ticketer/adapters/github/mappers.py +797 -0
- mcp_ticketer/adapters/github/queries.py +692 -0
- mcp_ticketer/adapters/github/types.py +460 -0
- mcp_ticketer/adapters/hybrid.py +58 -16
- mcp_ticketer/adapters/jira/__init__.py +35 -0
- mcp_ticketer/adapters/jira/adapter.py +1351 -0
- mcp_ticketer/adapters/jira/client.py +271 -0
- mcp_ticketer/adapters/jira/mappers.py +246 -0
- mcp_ticketer/adapters/jira/queries.py +216 -0
- mcp_ticketer/adapters/jira/types.py +304 -0
- mcp_ticketer/adapters/linear/__init__.py +1 -1
- mcp_ticketer/adapters/linear/adapter.py +3810 -462
- mcp_ticketer/adapters/linear/client.py +312 -69
- mcp_ticketer/adapters/linear/mappers.py +305 -85
- mcp_ticketer/adapters/linear/queries.py +317 -17
- mcp_ticketer/adapters/linear/types.py +187 -64
- mcp_ticketer/adapters/linear.py +2 -2
- mcp_ticketer/analysis/__init__.py +56 -0
- mcp_ticketer/analysis/dependency_graph.py +255 -0
- mcp_ticketer/analysis/health_assessment.py +304 -0
- mcp_ticketer/analysis/orphaned.py +218 -0
- mcp_ticketer/analysis/project_status.py +594 -0
- mcp_ticketer/analysis/similarity.py +224 -0
- mcp_ticketer/analysis/staleness.py +266 -0
- mcp_ticketer/automation/__init__.py +11 -0
- mcp_ticketer/automation/project_updates.py +378 -0
- mcp_ticketer/cache/memory.py +9 -8
- mcp_ticketer/cli/adapter_diagnostics.py +91 -54
- mcp_ticketer/cli/auggie_configure.py +116 -15
- mcp_ticketer/cli/codex_configure.py +274 -82
- mcp_ticketer/cli/configure.py +1323 -151
- mcp_ticketer/cli/cursor_configure.py +314 -0
- mcp_ticketer/cli/diagnostics.py +209 -114
- mcp_ticketer/cli/discover.py +297 -26
- mcp_ticketer/cli/gemini_configure.py +119 -26
- mcp_ticketer/cli/init_command.py +880 -0
- mcp_ticketer/cli/install_mcp_server.py +418 -0
- mcp_ticketer/cli/instruction_commands.py +435 -0
- mcp_ticketer/cli/linear_commands.py +256 -130
- mcp_ticketer/cli/main.py +140 -1544
- mcp_ticketer/cli/mcp_configure.py +1013 -100
- mcp_ticketer/cli/mcp_server_commands.py +415 -0
- mcp_ticketer/cli/migrate_config.py +12 -8
- mcp_ticketer/cli/platform_commands.py +123 -0
- mcp_ticketer/cli/platform_detection.py +477 -0
- mcp_ticketer/cli/platform_installer.py +545 -0
- mcp_ticketer/cli/project_update_commands.py +350 -0
- mcp_ticketer/cli/python_detection.py +126 -0
- mcp_ticketer/cli/queue_commands.py +15 -15
- mcp_ticketer/cli/setup_command.py +794 -0
- mcp_ticketer/cli/simple_health.py +84 -59
- mcp_ticketer/cli/ticket_commands.py +1375 -0
- mcp_ticketer/cli/update_checker.py +313 -0
- mcp_ticketer/cli/utils.py +195 -72
- mcp_ticketer/core/__init__.py +64 -1
- mcp_ticketer/core/adapter.py +618 -18
- mcp_ticketer/core/config.py +77 -68
- mcp_ticketer/core/env_discovery.py +75 -16
- mcp_ticketer/core/env_loader.py +121 -97
- mcp_ticketer/core/exceptions.py +32 -24
- mcp_ticketer/core/http_client.py +26 -26
- mcp_ticketer/core/instructions.py +405 -0
- mcp_ticketer/core/label_manager.py +732 -0
- mcp_ticketer/core/mappers.py +42 -30
- mcp_ticketer/core/milestone_manager.py +252 -0
- mcp_ticketer/core/models.py +566 -19
- mcp_ticketer/core/onepassword_secrets.py +379 -0
- mcp_ticketer/core/priority_matcher.py +463 -0
- mcp_ticketer/core/project_config.py +189 -49
- mcp_ticketer/core/project_utils.py +281 -0
- mcp_ticketer/core/project_validator.py +376 -0
- mcp_ticketer/core/registry.py +3 -3
- mcp_ticketer/core/session_state.py +176 -0
- mcp_ticketer/core/state_matcher.py +592 -0
- mcp_ticketer/core/url_parser.py +425 -0
- mcp_ticketer/core/validators.py +69 -0
- mcp_ticketer/defaults/ticket_instructions.md +644 -0
- mcp_ticketer/mcp/__init__.py +29 -1
- mcp_ticketer/mcp/__main__.py +60 -0
- mcp_ticketer/mcp/server/__init__.py +25 -0
- mcp_ticketer/mcp/server/__main__.py +60 -0
- mcp_ticketer/mcp/server/constants.py +58 -0
- mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
- mcp_ticketer/mcp/server/dto.py +195 -0
- mcp_ticketer/mcp/server/main.py +1343 -0
- mcp_ticketer/mcp/server/response_builder.py +206 -0
- mcp_ticketer/mcp/server/routing.py +723 -0
- mcp_ticketer/mcp/server/server_sdk.py +151 -0
- mcp_ticketer/mcp/server/tools/__init__.py +69 -0
- mcp_ticketer/mcp/server/tools/analysis_tools.py +854 -0
- mcp_ticketer/mcp/server/tools/attachment_tools.py +224 -0
- mcp_ticketer/mcp/server/tools/bulk_tools.py +330 -0
- mcp_ticketer/mcp/server/tools/comment_tools.py +152 -0
- mcp_ticketer/mcp/server/tools/config_tools.py +1564 -0
- mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
- mcp_ticketer/mcp/server/tools/hierarchy_tools.py +942 -0
- mcp_ticketer/mcp/server/tools/instruction_tools.py +295 -0
- mcp_ticketer/mcp/server/tools/label_tools.py +942 -0
- mcp_ticketer/mcp/server/tools/milestone_tools.py +338 -0
- mcp_ticketer/mcp/server/tools/pr_tools.py +150 -0
- mcp_ticketer/mcp/server/tools/project_status_tools.py +158 -0
- mcp_ticketer/mcp/server/tools/project_update_tools.py +473 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +318 -0
- mcp_ticketer/mcp/server/tools/session_tools.py +308 -0
- mcp_ticketer/mcp/server/tools/ticket_tools.py +1413 -0
- mcp_ticketer/mcp/server/tools/user_ticket_tools.py +364 -0
- mcp_ticketer/queue/__init__.py +1 -0
- mcp_ticketer/queue/health_monitor.py +168 -136
- mcp_ticketer/queue/manager.py +78 -63
- mcp_ticketer/queue/queue.py +108 -21
- mcp_ticketer/queue/run_worker.py +2 -2
- mcp_ticketer/queue/ticket_registry.py +213 -155
- mcp_ticketer/queue/worker.py +96 -58
- mcp_ticketer/utils/__init__.py +5 -0
- mcp_ticketer/utils/token_utils.py +246 -0
- mcp_ticketer-2.2.9.dist-info/METADATA +1396 -0
- mcp_ticketer-2.2.9.dist-info/RECORD +158 -0
- mcp_ticketer-2.2.9.dist-info/top_level.txt +2 -0
- py_mcp_installer/examples/phase3_demo.py +178 -0
- py_mcp_installer/scripts/manage_version.py +54 -0
- py_mcp_installer/setup.py +6 -0
- py_mcp_installer/src/py_mcp_installer/__init__.py +153 -0
- py_mcp_installer/src/py_mcp_installer/command_builder.py +445 -0
- py_mcp_installer/src/py_mcp_installer/config_manager.py +541 -0
- py_mcp_installer/src/py_mcp_installer/exceptions.py +243 -0
- py_mcp_installer/src/py_mcp_installer/installation_strategy.py +617 -0
- py_mcp_installer/src/py_mcp_installer/installer.py +656 -0
- py_mcp_installer/src/py_mcp_installer/mcp_inspector.py +750 -0
- py_mcp_installer/src/py_mcp_installer/platform_detector.py +451 -0
- py_mcp_installer/src/py_mcp_installer/platforms/__init__.py +26 -0
- py_mcp_installer/src/py_mcp_installer/platforms/claude_code.py +225 -0
- py_mcp_installer/src/py_mcp_installer/platforms/codex.py +181 -0
- py_mcp_installer/src/py_mcp_installer/platforms/cursor.py +191 -0
- py_mcp_installer/src/py_mcp_installer/types.py +222 -0
- py_mcp_installer/src/py_mcp_installer/utils.py +463 -0
- py_mcp_installer/tests/__init__.py +0 -0
- py_mcp_installer/tests/platforms/__init__.py +0 -0
- py_mcp_installer/tests/test_platform_detector.py +17 -0
- mcp_ticketer/adapters/github.py +0 -1354
- mcp_ticketer/adapters/jira.py +0 -1011
- mcp_ticketer/mcp/server.py +0 -2030
- mcp_ticketer-0.3.0.dist-info/METADATA +0 -414
- mcp_ticketer-0.3.0.dist-info/RECORD +0 -59
- mcp_ticketer-0.3.0.dist-info/top_level.txt +0 -1
- {mcp_ticketer-0.3.0.dist-info → mcp_ticketer-2.2.9.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.3.0.dist-info → mcp_ticketer-2.2.9.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.3.0.dist-info → mcp_ticketer-2.2.9.dist-info}/licenses/LICENSE +0 -0
mcp_ticketer/adapters/jira.py
DELETED
|
@@ -1,1011 +0,0 @@
|
|
|
1
|
-
"""JIRA adapter implementation using REST API v3."""
|
|
2
|
-
|
|
3
|
-
import asyncio
|
|
4
|
-
import builtins
|
|
5
|
-
import logging
|
|
6
|
-
import os
|
|
7
|
-
import re
|
|
8
|
-
from datetime import datetime
|
|
9
|
-
from enum import Enum
|
|
10
|
-
from typing import Any, Dict, List, Optional, Union
|
|
11
|
-
|
|
12
|
-
import httpx
|
|
13
|
-
from httpx import AsyncClient, HTTPStatusError, TimeoutException
|
|
14
|
-
|
|
15
|
-
from ..core.adapter import BaseAdapter
|
|
16
|
-
from ..core.models import Comment, Epic, Priority, SearchQuery, Task, TicketState
|
|
17
|
-
from ..core.registry import AdapterRegistry
|
|
18
|
-
from ..core.env_loader import load_adapter_config, validate_adapter_config
|
|
19
|
-
|
|
20
|
-
logger = logging.getLogger(__name__)
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
def parse_jira_datetime(date_str: str) -> Optional[datetime]:
|
|
24
|
-
"""
|
|
25
|
-
Parse JIRA datetime strings which can be in various formats.
|
|
26
|
-
|
|
27
|
-
JIRA can return dates in formats like:
|
|
28
|
-
- 2025-10-24T14:12:18.771-0400
|
|
29
|
-
- 2025-10-24T14:12:18.771Z
|
|
30
|
-
- 2025-10-24T14:12:18.771+00:00
|
|
31
|
-
"""
|
|
32
|
-
if not date_str:
|
|
33
|
-
return None
|
|
34
|
-
|
|
35
|
-
try:
|
|
36
|
-
# Handle Z timezone
|
|
37
|
-
if date_str.endswith('Z'):
|
|
38
|
-
return datetime.fromisoformat(date_str.replace('Z', '+00:00'))
|
|
39
|
-
|
|
40
|
-
# Handle timezone formats like -0400, +0500 (need to add colon)
|
|
41
|
-
if re.match(r'.*[+-]\d{4}$', date_str):
|
|
42
|
-
# Insert colon in timezone: -0400 -> -04:00
|
|
43
|
-
date_str = re.sub(r'([+-]\d{2})(\d{2})$', r'\1:\2', date_str)
|
|
44
|
-
|
|
45
|
-
return datetime.fromisoformat(date_str)
|
|
46
|
-
|
|
47
|
-
except (ValueError, TypeError) as e:
|
|
48
|
-
logger.warning(f"Failed to parse JIRA datetime '{date_str}': {e}")
|
|
49
|
-
return None
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
def extract_text_from_adf(adf_content: Union[str, Dict[str, Any]]) -> str:
|
|
53
|
-
"""
|
|
54
|
-
Extract plain text from Atlassian Document Format (ADF).
|
|
55
|
-
|
|
56
|
-
Args:
|
|
57
|
-
adf_content: Either a string (already plain text) or ADF document dict
|
|
58
|
-
|
|
59
|
-
Returns:
|
|
60
|
-
Plain text string extracted from the ADF content
|
|
61
|
-
"""
|
|
62
|
-
if isinstance(adf_content, str):
|
|
63
|
-
return adf_content
|
|
64
|
-
|
|
65
|
-
if not isinstance(adf_content, dict):
|
|
66
|
-
return str(adf_content) if adf_content else ""
|
|
67
|
-
|
|
68
|
-
def extract_text_recursive(node: Dict[str, Any]) -> str:
|
|
69
|
-
"""Recursively extract text from ADF nodes."""
|
|
70
|
-
if not isinstance(node, dict):
|
|
71
|
-
return ""
|
|
72
|
-
|
|
73
|
-
# If this is a text node, return its text
|
|
74
|
-
if node.get("type") == "text":
|
|
75
|
-
return node.get("text", "")
|
|
76
|
-
|
|
77
|
-
# If this node has content, process it recursively
|
|
78
|
-
content = node.get("content", [])
|
|
79
|
-
if isinstance(content, list):
|
|
80
|
-
return "".join(extract_text_recursive(child) for child in content)
|
|
81
|
-
|
|
82
|
-
return ""
|
|
83
|
-
|
|
84
|
-
try:
|
|
85
|
-
return extract_text_recursive(adf_content)
|
|
86
|
-
except Exception as e:
|
|
87
|
-
logger.warning(f"Failed to extract text from ADF: {e}")
|
|
88
|
-
return str(adf_content) if adf_content else ""
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
class JiraIssueType(str, Enum):
|
|
92
|
-
"""Common JIRA issue types."""
|
|
93
|
-
|
|
94
|
-
EPIC = "Epic"
|
|
95
|
-
STORY = "Story"
|
|
96
|
-
TASK = "Task"
|
|
97
|
-
BUG = "Bug"
|
|
98
|
-
SUBTASK = "Sub-task"
|
|
99
|
-
IMPROVEMENT = "Improvement"
|
|
100
|
-
NEW_FEATURE = "New Feature"
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
class JiraPriority(str, Enum):
|
|
104
|
-
"""Standard JIRA priority levels."""
|
|
105
|
-
|
|
106
|
-
HIGHEST = "Highest"
|
|
107
|
-
HIGH = "High"
|
|
108
|
-
MEDIUM = "Medium"
|
|
109
|
-
LOW = "Low"
|
|
110
|
-
LOWEST = "Lowest"
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
114
|
-
"""Adapter for JIRA using REST API v3."""
|
|
115
|
-
|
|
116
|
-
def __init__(self, config: dict[str, Any]):
|
|
117
|
-
"""Initialize JIRA adapter.
|
|
118
|
-
|
|
119
|
-
Args:
|
|
120
|
-
config: Configuration with:
|
|
121
|
-
- server: JIRA server URL (e.g., https://company.atlassian.net)
|
|
122
|
-
- email: User email for authentication
|
|
123
|
-
- api_token: API token for authentication
|
|
124
|
-
- project_key: Default project key
|
|
125
|
-
- cloud: Whether this is JIRA Cloud (default: True)
|
|
126
|
-
- verify_ssl: Whether to verify SSL certificates (default: True)
|
|
127
|
-
- timeout: Request timeout in seconds (default: 30)
|
|
128
|
-
- max_retries: Maximum retry attempts (default: 3)
|
|
129
|
-
|
|
130
|
-
"""
|
|
131
|
-
super().__init__(config)
|
|
132
|
-
|
|
133
|
-
# Load configuration with environment variable resolution
|
|
134
|
-
full_config = load_adapter_config("jira", config)
|
|
135
|
-
|
|
136
|
-
# Validate required configuration
|
|
137
|
-
missing_keys = validate_adapter_config("jira", full_config)
|
|
138
|
-
if missing_keys:
|
|
139
|
-
raise ValueError(f"JIRA adapter missing required configuration: {', '.join(missing_keys)}")
|
|
140
|
-
|
|
141
|
-
# Configuration
|
|
142
|
-
self.server = full_config.get("server", "")
|
|
143
|
-
self.email = full_config.get("email", "")
|
|
144
|
-
self.api_token = full_config.get("api_token", "")
|
|
145
|
-
self.project_key = full_config.get("project_key", "")
|
|
146
|
-
self.is_cloud = full_config.get("cloud", True)
|
|
147
|
-
self.verify_ssl = full_config.get("verify_ssl", True)
|
|
148
|
-
self.timeout = full_config.get("timeout", 30)
|
|
149
|
-
self.max_retries = full_config.get("max_retries", 3)
|
|
150
|
-
|
|
151
|
-
# Clean up server URL
|
|
152
|
-
self.server = self.server.rstrip("/")
|
|
153
|
-
|
|
154
|
-
# API base URL
|
|
155
|
-
self.api_base = (
|
|
156
|
-
f"{self.server}/rest/api/3"
|
|
157
|
-
if self.is_cloud
|
|
158
|
-
else f"{self.server}/rest/api/2"
|
|
159
|
-
)
|
|
160
|
-
|
|
161
|
-
# HTTP client setup
|
|
162
|
-
self.auth = httpx.BasicAuth(self.email, self.api_token)
|
|
163
|
-
self.headers = {
|
|
164
|
-
"Accept": "application/json",
|
|
165
|
-
"Content-Type": "application/json",
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
# Cache for workflow states and transitions
|
|
169
|
-
self._workflow_cache: dict[str, Any] = {}
|
|
170
|
-
self._priority_cache: list[dict[str, Any]] = []
|
|
171
|
-
self._issue_types_cache: dict[str, Any] = {}
|
|
172
|
-
self._custom_fields_cache: dict[str, Any] = {}
|
|
173
|
-
|
|
174
|
-
def validate_credentials(self) -> tuple[bool, str]:
|
|
175
|
-
"""Validate that required credentials are present.
|
|
176
|
-
|
|
177
|
-
Returns:
|
|
178
|
-
(is_valid, error_message) - Tuple of validation result and error message
|
|
179
|
-
|
|
180
|
-
"""
|
|
181
|
-
if not self.server:
|
|
182
|
-
return (
|
|
183
|
-
False,
|
|
184
|
-
"JIRA_SERVER is required but not found. Set it in .env.local or environment.",
|
|
185
|
-
)
|
|
186
|
-
if not self.email:
|
|
187
|
-
return (
|
|
188
|
-
False,
|
|
189
|
-
"JIRA_EMAIL is required but not found. Set it in .env.local or environment.",
|
|
190
|
-
)
|
|
191
|
-
if not self.api_token:
|
|
192
|
-
return (
|
|
193
|
-
False,
|
|
194
|
-
"JIRA_API_TOKEN is required but not found. Set it in .env.local or environment.",
|
|
195
|
-
)
|
|
196
|
-
return True, ""
|
|
197
|
-
|
|
198
|
-
def _get_state_mapping(self) -> dict[TicketState, str]:
|
|
199
|
-
"""Map universal states to common JIRA workflow states."""
|
|
200
|
-
return {
|
|
201
|
-
TicketState.OPEN: "To Do",
|
|
202
|
-
TicketState.IN_PROGRESS: "In Progress",
|
|
203
|
-
TicketState.READY: "In Review",
|
|
204
|
-
TicketState.TESTED: "Testing",
|
|
205
|
-
TicketState.DONE: "Done",
|
|
206
|
-
TicketState.WAITING: "Waiting",
|
|
207
|
-
TicketState.BLOCKED: "Blocked",
|
|
208
|
-
TicketState.CLOSED: "Closed",
|
|
209
|
-
}
|
|
210
|
-
|
|
211
|
-
async def _get_client(self) -> AsyncClient:
|
|
212
|
-
"""Get configured async HTTP client."""
|
|
213
|
-
return AsyncClient(
|
|
214
|
-
auth=self.auth,
|
|
215
|
-
headers=self.headers,
|
|
216
|
-
timeout=self.timeout,
|
|
217
|
-
verify=self.verify_ssl,
|
|
218
|
-
)
|
|
219
|
-
|
|
220
|
-
async def _make_request(
|
|
221
|
-
self,
|
|
222
|
-
method: str,
|
|
223
|
-
endpoint: str,
|
|
224
|
-
data: Optional[dict[str, Any]] = None,
|
|
225
|
-
params: Optional[dict[str, Any]] = None,
|
|
226
|
-
retry_count: int = 0,
|
|
227
|
-
) -> dict[str, Any]:
|
|
228
|
-
"""Make HTTP request to JIRA API with retry logic.
|
|
229
|
-
|
|
230
|
-
Args:
|
|
231
|
-
method: HTTP method
|
|
232
|
-
endpoint: API endpoint
|
|
233
|
-
data: Request body data
|
|
234
|
-
params: Query parameters
|
|
235
|
-
retry_count: Current retry attempt
|
|
236
|
-
|
|
237
|
-
Returns:
|
|
238
|
-
Response data
|
|
239
|
-
|
|
240
|
-
Raises:
|
|
241
|
-
HTTPStatusError: On API errors
|
|
242
|
-
TimeoutException: On timeout
|
|
243
|
-
|
|
244
|
-
"""
|
|
245
|
-
url = f"{self.api_base}/{endpoint.lstrip('/')}"
|
|
246
|
-
|
|
247
|
-
async with await self._get_client() as client:
|
|
248
|
-
try:
|
|
249
|
-
response = await client.request(
|
|
250
|
-
method=method, url=url, json=data, params=params
|
|
251
|
-
)
|
|
252
|
-
response.raise_for_status()
|
|
253
|
-
|
|
254
|
-
# Handle empty responses
|
|
255
|
-
if response.status_code == 204:
|
|
256
|
-
return {}
|
|
257
|
-
|
|
258
|
-
return response.json()
|
|
259
|
-
|
|
260
|
-
except TimeoutException as e:
|
|
261
|
-
if retry_count < self.max_retries:
|
|
262
|
-
await asyncio.sleep(2**retry_count) # Exponential backoff
|
|
263
|
-
return await self._make_request(
|
|
264
|
-
method, endpoint, data, params, retry_count + 1
|
|
265
|
-
)
|
|
266
|
-
raise e
|
|
267
|
-
|
|
268
|
-
except HTTPStatusError as e:
|
|
269
|
-
# Handle rate limiting
|
|
270
|
-
if e.response.status_code == 429 and retry_count < self.max_retries:
|
|
271
|
-
retry_after = int(e.response.headers.get("Retry-After", 5))
|
|
272
|
-
await asyncio.sleep(retry_after)
|
|
273
|
-
return await self._make_request(
|
|
274
|
-
method, endpoint, data, params, retry_count + 1
|
|
275
|
-
)
|
|
276
|
-
|
|
277
|
-
# Log error details
|
|
278
|
-
logger.error(
|
|
279
|
-
f"JIRA API error: {e.response.status_code} - {e.response.text}"
|
|
280
|
-
)
|
|
281
|
-
raise e
|
|
282
|
-
|
|
283
|
-
async def _get_priorities(self) -> list[dict[str, Any]]:
|
|
284
|
-
"""Get available priorities from JIRA."""
|
|
285
|
-
if not self._priority_cache:
|
|
286
|
-
self._priority_cache = await self._make_request("GET", "priority")
|
|
287
|
-
return self._priority_cache
|
|
288
|
-
|
|
289
|
-
async def _get_issue_types(
|
|
290
|
-
self, project_key: Optional[str] = None
|
|
291
|
-
) -> list[dict[str, Any]]:
|
|
292
|
-
"""Get available issue types for a project."""
|
|
293
|
-
key = project_key or self.project_key
|
|
294
|
-
if key not in self._issue_types_cache:
|
|
295
|
-
data = await self._make_request("GET", f"project/{key}")
|
|
296
|
-
self._issue_types_cache[key] = data.get("issueTypes", [])
|
|
297
|
-
return self._issue_types_cache[key]
|
|
298
|
-
|
|
299
|
-
async def _get_transitions(self, issue_key: str) -> list[dict[str, Any]]:
|
|
300
|
-
"""Get available transitions for an issue."""
|
|
301
|
-
data = await self._make_request("GET", f"issue/{issue_key}/transitions")
|
|
302
|
-
return data.get("transitions", [])
|
|
303
|
-
|
|
304
|
-
async def _get_custom_fields(self) -> dict[str, str]:
|
|
305
|
-
"""Get custom field definitions."""
|
|
306
|
-
if not self._custom_fields_cache:
|
|
307
|
-
fields = await self._make_request("GET", "field")
|
|
308
|
-
self._custom_fields_cache = {
|
|
309
|
-
field["name"]: field["id"]
|
|
310
|
-
for field in fields
|
|
311
|
-
if field.get("custom", False)
|
|
312
|
-
}
|
|
313
|
-
return self._custom_fields_cache
|
|
314
|
-
|
|
315
|
-
def _convert_from_adf(self, adf_content: Any) -> str:
|
|
316
|
-
"""Convert Atlassian Document Format (ADF) to plain text.
|
|
317
|
-
|
|
318
|
-
This extracts text content from ADF structure for display.
|
|
319
|
-
"""
|
|
320
|
-
if not adf_content:
|
|
321
|
-
return ""
|
|
322
|
-
|
|
323
|
-
# If it's already a string, return it (JIRA Server)
|
|
324
|
-
if isinstance(adf_content, str):
|
|
325
|
-
return adf_content
|
|
326
|
-
|
|
327
|
-
# Handle ADF structure
|
|
328
|
-
if not isinstance(adf_content, dict):
|
|
329
|
-
return str(adf_content)
|
|
330
|
-
|
|
331
|
-
content_nodes = adf_content.get("content", [])
|
|
332
|
-
lines = []
|
|
333
|
-
|
|
334
|
-
for node in content_nodes:
|
|
335
|
-
if node.get("type") == "paragraph":
|
|
336
|
-
paragraph_text = ""
|
|
337
|
-
for content_item in node.get("content", []):
|
|
338
|
-
if content_item.get("type") == "text":
|
|
339
|
-
paragraph_text += content_item.get("text", "")
|
|
340
|
-
lines.append(paragraph_text)
|
|
341
|
-
elif node.get("type") == "heading":
|
|
342
|
-
heading_text = ""
|
|
343
|
-
for content_item in node.get("content", []):
|
|
344
|
-
if content_item.get("type") == "text":
|
|
345
|
-
heading_text += content_item.get("text", "")
|
|
346
|
-
lines.append(heading_text)
|
|
347
|
-
|
|
348
|
-
return "\n".join(lines)
|
|
349
|
-
|
|
350
|
-
def _convert_to_adf(self, text: str) -> dict[str, Any]:
|
|
351
|
-
"""Convert plain text to Atlassian Document Format (ADF).
|
|
352
|
-
|
|
353
|
-
ADF is required for JIRA Cloud description fields.
|
|
354
|
-
This creates a simple document with paragraphs for each line.
|
|
355
|
-
"""
|
|
356
|
-
if not text:
|
|
357
|
-
return {"type": "doc", "version": 1, "content": []}
|
|
358
|
-
|
|
359
|
-
# Split text into lines and create paragraphs
|
|
360
|
-
lines = text.split("\n")
|
|
361
|
-
content = []
|
|
362
|
-
|
|
363
|
-
for line in lines:
|
|
364
|
-
if line.strip(): # Non-empty line
|
|
365
|
-
content.append(
|
|
366
|
-
{"type": "paragraph", "content": [{"type": "text", "text": line}]}
|
|
367
|
-
)
|
|
368
|
-
else: # Empty line becomes empty paragraph
|
|
369
|
-
content.append({"type": "paragraph", "content": []})
|
|
370
|
-
|
|
371
|
-
return {"type": "doc", "version": 1, "content": content}
|
|
372
|
-
|
|
373
|
-
def _map_priority_to_jira(self, priority: Priority) -> str:
|
|
374
|
-
"""Map universal priority to JIRA priority."""
|
|
375
|
-
mapping = {
|
|
376
|
-
Priority.CRITICAL: JiraPriority.HIGHEST,
|
|
377
|
-
Priority.HIGH: JiraPriority.HIGH,
|
|
378
|
-
Priority.MEDIUM: JiraPriority.MEDIUM,
|
|
379
|
-
Priority.LOW: JiraPriority.LOW,
|
|
380
|
-
}
|
|
381
|
-
return mapping.get(priority, JiraPriority.MEDIUM)
|
|
382
|
-
|
|
383
|
-
def _map_priority_from_jira(
|
|
384
|
-
self, jira_priority: Optional[dict[str, Any]]
|
|
385
|
-
) -> Priority:
|
|
386
|
-
"""Map JIRA priority to universal priority."""
|
|
387
|
-
if not jira_priority:
|
|
388
|
-
return Priority.MEDIUM
|
|
389
|
-
|
|
390
|
-
name = jira_priority.get("name", "").lower()
|
|
391
|
-
|
|
392
|
-
if "highest" in name or "urgent" in name or "critical" in name:
|
|
393
|
-
return Priority.CRITICAL
|
|
394
|
-
elif "high" in name:
|
|
395
|
-
return Priority.HIGH
|
|
396
|
-
elif "low" in name:
|
|
397
|
-
return Priority.LOW
|
|
398
|
-
else:
|
|
399
|
-
return Priority.MEDIUM
|
|
400
|
-
|
|
401
|
-
def _map_state_from_jira(self, status: dict[str, Any]) -> TicketState:
|
|
402
|
-
"""Map JIRA status to universal state."""
|
|
403
|
-
if not status:
|
|
404
|
-
return TicketState.OPEN
|
|
405
|
-
|
|
406
|
-
name = status.get("name", "").lower()
|
|
407
|
-
category = status.get("statusCategory", {}).get("key", "").lower()
|
|
408
|
-
|
|
409
|
-
# Try to match by category first (more reliable)
|
|
410
|
-
if category == "new":
|
|
411
|
-
return TicketState.OPEN
|
|
412
|
-
elif category == "indeterminate":
|
|
413
|
-
return TicketState.IN_PROGRESS
|
|
414
|
-
elif category == "done":
|
|
415
|
-
return TicketState.DONE
|
|
416
|
-
|
|
417
|
-
# Fall back to name matching
|
|
418
|
-
if "block" in name:
|
|
419
|
-
return TicketState.BLOCKED
|
|
420
|
-
elif "wait" in name:
|
|
421
|
-
return TicketState.WAITING
|
|
422
|
-
elif "progress" in name or "doing" in name:
|
|
423
|
-
return TicketState.IN_PROGRESS
|
|
424
|
-
elif "review" in name:
|
|
425
|
-
return TicketState.READY
|
|
426
|
-
elif "test" in name:
|
|
427
|
-
return TicketState.TESTED
|
|
428
|
-
elif "done" in name or "resolved" in name:
|
|
429
|
-
return TicketState.DONE
|
|
430
|
-
elif "closed" in name:
|
|
431
|
-
return TicketState.CLOSED
|
|
432
|
-
else:
|
|
433
|
-
return TicketState.OPEN
|
|
434
|
-
|
|
435
|
-
def _issue_to_ticket(self, issue: dict[str, Any]) -> Union[Epic, Task]:
|
|
436
|
-
"""Convert JIRA issue to universal ticket model."""
|
|
437
|
-
fields = issue.get("fields", {})
|
|
438
|
-
|
|
439
|
-
# Determine ticket type
|
|
440
|
-
issue_type = fields.get("issuetype", {}).get("name", "").lower()
|
|
441
|
-
is_epic = "epic" in issue_type
|
|
442
|
-
|
|
443
|
-
# Extract common fields
|
|
444
|
-
# Convert ADF description back to plain text if needed
|
|
445
|
-
description = self._convert_from_adf(fields.get("description", ""))
|
|
446
|
-
|
|
447
|
-
base_data = {
|
|
448
|
-
"id": issue.get("key"),
|
|
449
|
-
"title": fields.get("summary", ""),
|
|
450
|
-
"description": description,
|
|
451
|
-
"state": self._map_state_from_jira(fields.get("status", {})),
|
|
452
|
-
"priority": self._map_priority_from_jira(fields.get("priority")),
|
|
453
|
-
"tags": [
|
|
454
|
-
label.get("name", "") if isinstance(label, dict) else str(label)
|
|
455
|
-
for label in fields.get("labels", [])
|
|
456
|
-
],
|
|
457
|
-
"created_at": parse_jira_datetime(fields.get("created")),
|
|
458
|
-
"updated_at": parse_jira_datetime(fields.get("updated")),
|
|
459
|
-
"metadata": {
|
|
460
|
-
"jira": {
|
|
461
|
-
"id": issue.get("id"),
|
|
462
|
-
"key": issue.get("key"),
|
|
463
|
-
"self": issue.get("self"),
|
|
464
|
-
"url": f"{self.server}/browse/{issue.get('key')}",
|
|
465
|
-
"issue_type": fields.get("issuetype", {}),
|
|
466
|
-
"project": fields.get("project", {}),
|
|
467
|
-
"components": fields.get("components", []),
|
|
468
|
-
"fix_versions": fields.get("fixVersions", []),
|
|
469
|
-
"resolution": fields.get("resolution"),
|
|
470
|
-
}
|
|
471
|
-
},
|
|
472
|
-
}
|
|
473
|
-
|
|
474
|
-
if is_epic:
|
|
475
|
-
# Create Epic
|
|
476
|
-
return Epic(
|
|
477
|
-
**base_data,
|
|
478
|
-
child_issues=[
|
|
479
|
-
subtask.get("key") for subtask in fields.get("subtasks", [])
|
|
480
|
-
],
|
|
481
|
-
)
|
|
482
|
-
else:
|
|
483
|
-
# Create Task
|
|
484
|
-
parent = fields.get("parent", {})
|
|
485
|
-
epic_link = fields.get("customfield_10014") # Common epic link field
|
|
486
|
-
|
|
487
|
-
return Task(
|
|
488
|
-
**base_data,
|
|
489
|
-
parent_issue=parent.get("key") if parent else None,
|
|
490
|
-
parent_epic=epic_link if epic_link else None,
|
|
491
|
-
assignee=(
|
|
492
|
-
fields.get("assignee", {}).get("displayName")
|
|
493
|
-
if fields.get("assignee")
|
|
494
|
-
else None
|
|
495
|
-
),
|
|
496
|
-
estimated_hours=(
|
|
497
|
-
fields.get("timetracking", {}).get("originalEstimateSeconds", 0)
|
|
498
|
-
/ 3600
|
|
499
|
-
if fields.get("timetracking")
|
|
500
|
-
else None
|
|
501
|
-
),
|
|
502
|
-
actual_hours=(
|
|
503
|
-
fields.get("timetracking", {}).get("timeSpentSeconds", 0) / 3600
|
|
504
|
-
if fields.get("timetracking")
|
|
505
|
-
else None
|
|
506
|
-
),
|
|
507
|
-
)
|
|
508
|
-
|
|
509
|
-
def _ticket_to_issue_fields(
|
|
510
|
-
self, ticket: Union[Epic, Task], issue_type: Optional[str] = None
|
|
511
|
-
) -> dict[str, Any]:
|
|
512
|
-
"""Convert universal ticket to JIRA issue fields."""
|
|
513
|
-
# Convert description to ADF format for JIRA Cloud
|
|
514
|
-
description = (
|
|
515
|
-
self._convert_to_adf(ticket.description or "")
|
|
516
|
-
if self.is_cloud
|
|
517
|
-
else (ticket.description or "")
|
|
518
|
-
)
|
|
519
|
-
|
|
520
|
-
fields = {
|
|
521
|
-
"summary": ticket.title,
|
|
522
|
-
"description": description,
|
|
523
|
-
"labels": ticket.tags,
|
|
524
|
-
}
|
|
525
|
-
|
|
526
|
-
# Only add priority for Tasks, not Epics (some JIRA configurations don't allow priority on Epics)
|
|
527
|
-
if isinstance(ticket, Task):
|
|
528
|
-
fields["priority"] = {"name": self._map_priority_to_jira(ticket.priority)}
|
|
529
|
-
|
|
530
|
-
# Add project if creating new issue
|
|
531
|
-
if not ticket.id and self.project_key:
|
|
532
|
-
fields["project"] = {"key": self.project_key}
|
|
533
|
-
|
|
534
|
-
# Set issue type
|
|
535
|
-
if issue_type:
|
|
536
|
-
fields["issuetype"] = {"name": issue_type}
|
|
537
|
-
elif isinstance(ticket, Epic):
|
|
538
|
-
fields["issuetype"] = {"name": JiraIssueType.EPIC}
|
|
539
|
-
else:
|
|
540
|
-
fields["issuetype"] = {"name": JiraIssueType.TASK}
|
|
541
|
-
|
|
542
|
-
# Add task-specific fields
|
|
543
|
-
if isinstance(ticket, Task):
|
|
544
|
-
if ticket.assignee:
|
|
545
|
-
# Note: Need to resolve user account ID
|
|
546
|
-
fields["assignee"] = {"accountId": ticket.assignee}
|
|
547
|
-
|
|
548
|
-
if ticket.parent_issue:
|
|
549
|
-
fields["parent"] = {"key": ticket.parent_issue}
|
|
550
|
-
|
|
551
|
-
# Time tracking
|
|
552
|
-
if ticket.estimated_hours:
|
|
553
|
-
fields["timetracking"] = {
|
|
554
|
-
"originalEstimate": f"{int(ticket.estimated_hours)}h"
|
|
555
|
-
}
|
|
556
|
-
|
|
557
|
-
return fields
|
|
558
|
-
|
|
559
|
-
async def create(self, ticket: Union[Epic, Task]) -> Union[Epic, Task]:
|
|
560
|
-
"""Create a new JIRA issue."""
|
|
561
|
-
# Validate credentials before attempting operation
|
|
562
|
-
is_valid, error_message = self.validate_credentials()
|
|
563
|
-
if not is_valid:
|
|
564
|
-
raise ValueError(error_message)
|
|
565
|
-
|
|
566
|
-
# Prepare issue fields
|
|
567
|
-
fields = self._ticket_to_issue_fields(ticket)
|
|
568
|
-
|
|
569
|
-
# Create issue
|
|
570
|
-
data = await self._make_request("POST", "issue", data={"fields": fields})
|
|
571
|
-
|
|
572
|
-
# Set the ID and fetch full issue data
|
|
573
|
-
ticket.id = data.get("key")
|
|
574
|
-
|
|
575
|
-
# Fetch complete issue data
|
|
576
|
-
created_issue = await self._make_request("GET", f"issue/{ticket.id}")
|
|
577
|
-
return self._issue_to_ticket(created_issue)
|
|
578
|
-
|
|
579
|
-
async def read(self, ticket_id: str) -> Optional[Union[Epic, Task]]:
|
|
580
|
-
"""Read a JIRA issue by key."""
|
|
581
|
-
# Validate credentials before attempting operation
|
|
582
|
-
is_valid, error_message = self.validate_credentials()
|
|
583
|
-
if not is_valid:
|
|
584
|
-
raise ValueError(error_message)
|
|
585
|
-
|
|
586
|
-
try:
|
|
587
|
-
issue = await self._make_request(
|
|
588
|
-
"GET", f"issue/{ticket_id}", params={"expand": "renderedFields"}
|
|
589
|
-
)
|
|
590
|
-
return self._issue_to_ticket(issue)
|
|
591
|
-
except HTTPStatusError as e:
|
|
592
|
-
if e.response.status_code == 404:
|
|
593
|
-
return None
|
|
594
|
-
raise
|
|
595
|
-
|
|
596
|
-
async def update(
|
|
597
|
-
self, ticket_id: str, updates: dict[str, Any]
|
|
598
|
-
) -> Optional[Union[Epic, Task]]:
|
|
599
|
-
"""Update a JIRA issue."""
|
|
600
|
-
# Validate credentials before attempting operation
|
|
601
|
-
is_valid, error_message = self.validate_credentials()
|
|
602
|
-
if not is_valid:
|
|
603
|
-
raise ValueError(error_message)
|
|
604
|
-
|
|
605
|
-
# Read current issue
|
|
606
|
-
current = await self.read(ticket_id)
|
|
607
|
-
if not current:
|
|
608
|
-
return None
|
|
609
|
-
|
|
610
|
-
# Prepare update fields
|
|
611
|
-
fields = {}
|
|
612
|
-
|
|
613
|
-
if "title" in updates:
|
|
614
|
-
fields["summary"] = updates["title"]
|
|
615
|
-
if "description" in updates:
|
|
616
|
-
fields["description"] = updates["description"]
|
|
617
|
-
if "priority" in updates:
|
|
618
|
-
fields["priority"] = {
|
|
619
|
-
"name": self._map_priority_to_jira(updates["priority"])
|
|
620
|
-
}
|
|
621
|
-
if "tags" in updates:
|
|
622
|
-
fields["labels"] = updates["tags"]
|
|
623
|
-
if "assignee" in updates:
|
|
624
|
-
fields["assignee"] = {"accountId": updates["assignee"]}
|
|
625
|
-
|
|
626
|
-
# Apply update
|
|
627
|
-
if fields:
|
|
628
|
-
await self._make_request(
|
|
629
|
-
"PUT", f"issue/{ticket_id}", data={"fields": fields}
|
|
630
|
-
)
|
|
631
|
-
|
|
632
|
-
# Handle state transitions separately
|
|
633
|
-
if "state" in updates:
|
|
634
|
-
await self.transition_state(ticket_id, updates["state"])
|
|
635
|
-
|
|
636
|
-
# Return updated issue
|
|
637
|
-
return await self.read(ticket_id)
|
|
638
|
-
|
|
639
|
-
async def delete(self, ticket_id: str) -> bool:
|
|
640
|
-
"""Delete a JIRA issue."""
|
|
641
|
-
# Validate credentials before attempting operation
|
|
642
|
-
is_valid, error_message = self.validate_credentials()
|
|
643
|
-
if not is_valid:
|
|
644
|
-
raise ValueError(error_message)
|
|
645
|
-
|
|
646
|
-
try:
|
|
647
|
-
await self._make_request("DELETE", f"issue/{ticket_id}")
|
|
648
|
-
return True
|
|
649
|
-
except HTTPStatusError as e:
|
|
650
|
-
if e.response.status_code == 404:
|
|
651
|
-
return False
|
|
652
|
-
raise
|
|
653
|
-
|
|
654
|
-
async def list(
|
|
655
|
-
self, limit: int = 10, offset: int = 0, filters: Optional[dict[str, Any]] = None
|
|
656
|
-
) -> list[Union[Epic, Task]]:
|
|
657
|
-
"""List JIRA issues with pagination."""
|
|
658
|
-
# Build JQL query
|
|
659
|
-
jql_parts = []
|
|
660
|
-
|
|
661
|
-
if self.project_key:
|
|
662
|
-
jql_parts.append(f"project = {self.project_key}")
|
|
663
|
-
|
|
664
|
-
if filters:
|
|
665
|
-
if "state" in filters:
|
|
666
|
-
status = self.map_state_to_system(filters["state"])
|
|
667
|
-
jql_parts.append(f'status = "{status}"')
|
|
668
|
-
if "priority" in filters:
|
|
669
|
-
priority = self._map_priority_to_jira(filters["priority"])
|
|
670
|
-
jql_parts.append(f'priority = "{priority}"')
|
|
671
|
-
if "assignee" in filters:
|
|
672
|
-
jql_parts.append(f'assignee = "{filters["assignee"]}"')
|
|
673
|
-
if "ticket_type" in filters:
|
|
674
|
-
jql_parts.append(f'issuetype = "{filters["ticket_type"]}"')
|
|
675
|
-
|
|
676
|
-
jql = " AND ".join(jql_parts) if jql_parts else "ORDER BY created DESC"
|
|
677
|
-
|
|
678
|
-
# Search issues using the JIRA API endpoint
|
|
679
|
-
data = await self._make_request(
|
|
680
|
-
"GET",
|
|
681
|
-
"search/jql", # JIRA search endpoint (new API v3)
|
|
682
|
-
params={
|
|
683
|
-
"jql": jql,
|
|
684
|
-
"startAt": offset,
|
|
685
|
-
"maxResults": limit,
|
|
686
|
-
"fields": "*all",
|
|
687
|
-
"expand": "renderedFields",
|
|
688
|
-
},
|
|
689
|
-
)
|
|
690
|
-
|
|
691
|
-
# Convert issues
|
|
692
|
-
issues = data.get("issues", [])
|
|
693
|
-
return [self._issue_to_ticket(issue) for issue in issues]
|
|
694
|
-
|
|
695
|
-
async def search(self, query: SearchQuery) -> builtins.list[Union[Epic, Task]]:
|
|
696
|
-
"""Search JIRA issues using JQL."""
|
|
697
|
-
# Build JQL query
|
|
698
|
-
jql_parts = []
|
|
699
|
-
|
|
700
|
-
if self.project_key:
|
|
701
|
-
jql_parts.append(f"project = {self.project_key}")
|
|
702
|
-
|
|
703
|
-
# Text search
|
|
704
|
-
if query.query:
|
|
705
|
-
jql_parts.append(f'text ~ "{query.query}"')
|
|
706
|
-
|
|
707
|
-
# State filter
|
|
708
|
-
if query.state:
|
|
709
|
-
status = self.map_state_to_system(query.state)
|
|
710
|
-
jql_parts.append(f'status = "{status}"')
|
|
711
|
-
|
|
712
|
-
# Priority filter
|
|
713
|
-
if query.priority:
|
|
714
|
-
priority = self._map_priority_to_jira(query.priority)
|
|
715
|
-
jql_parts.append(f'priority = "{priority}"')
|
|
716
|
-
|
|
717
|
-
# Assignee filter
|
|
718
|
-
if query.assignee:
|
|
719
|
-
jql_parts.append(f'assignee = "{query.assignee}"')
|
|
720
|
-
|
|
721
|
-
# Tags/labels filter
|
|
722
|
-
if query.tags:
|
|
723
|
-
label_conditions = [f'labels = "{tag}"' for tag in query.tags]
|
|
724
|
-
jql_parts.append(f"({' OR '.join(label_conditions)})")
|
|
725
|
-
|
|
726
|
-
jql = " AND ".join(jql_parts) if jql_parts else "ORDER BY created DESC"
|
|
727
|
-
|
|
728
|
-
# Execute search using the JIRA API endpoint
|
|
729
|
-
data = await self._make_request(
|
|
730
|
-
"GET",
|
|
731
|
-
"search/jql", # JIRA search endpoint (new API v3)
|
|
732
|
-
params={
|
|
733
|
-
"jql": jql,
|
|
734
|
-
"startAt": query.offset,
|
|
735
|
-
"maxResults": query.limit,
|
|
736
|
-
"fields": "*all",
|
|
737
|
-
"expand": "renderedFields",
|
|
738
|
-
},
|
|
739
|
-
)
|
|
740
|
-
|
|
741
|
-
# Convert and return results
|
|
742
|
-
issues = data.get("issues", [])
|
|
743
|
-
return [self._issue_to_ticket(issue) for issue in issues]
|
|
744
|
-
|
|
745
|
-
async def transition_state(
|
|
746
|
-
self, ticket_id: str, target_state: TicketState
|
|
747
|
-
) -> Optional[Union[Epic, Task]]:
|
|
748
|
-
"""Transition JIRA issue to a new state."""
|
|
749
|
-
# Get available transitions
|
|
750
|
-
transitions = await self._get_transitions(ticket_id)
|
|
751
|
-
|
|
752
|
-
# Find matching transition
|
|
753
|
-
target_name = self.map_state_to_system(target_state).lower()
|
|
754
|
-
transition = None
|
|
755
|
-
|
|
756
|
-
for trans in transitions:
|
|
757
|
-
trans_name = trans.get("to", {}).get("name", "").lower()
|
|
758
|
-
if target_name in trans_name or trans_name in target_name:
|
|
759
|
-
transition = trans
|
|
760
|
-
break
|
|
761
|
-
|
|
762
|
-
if not transition:
|
|
763
|
-
# Try to find by status category
|
|
764
|
-
for trans in transitions:
|
|
765
|
-
category = (
|
|
766
|
-
trans.get("to", {}).get("statusCategory", {}).get("key", "").lower()
|
|
767
|
-
)
|
|
768
|
-
if (
|
|
769
|
-
(target_state == TicketState.DONE and category == "done")
|
|
770
|
-
or (
|
|
771
|
-
target_state == TicketState.IN_PROGRESS
|
|
772
|
-
and category == "indeterminate"
|
|
773
|
-
)
|
|
774
|
-
or (target_state == TicketState.OPEN and category == "new")
|
|
775
|
-
):
|
|
776
|
-
transition = trans
|
|
777
|
-
break
|
|
778
|
-
|
|
779
|
-
if not transition:
|
|
780
|
-
logger.warning(
|
|
781
|
-
f"No transition found to move {ticket_id} to {target_state}. "
|
|
782
|
-
f"Available transitions: {[t.get('name') for t in transitions]}"
|
|
783
|
-
)
|
|
784
|
-
return None
|
|
785
|
-
|
|
786
|
-
# Execute transition
|
|
787
|
-
await self._make_request(
|
|
788
|
-
"POST",
|
|
789
|
-
f"issue/{ticket_id}/transitions",
|
|
790
|
-
data={"transition": {"id": transition["id"]}},
|
|
791
|
-
)
|
|
792
|
-
|
|
793
|
-
# Return updated issue
|
|
794
|
-
return await self.read(ticket_id)
|
|
795
|
-
|
|
796
|
-
async def add_comment(self, comment: Comment) -> Comment:
|
|
797
|
-
"""Add a comment to a JIRA issue."""
|
|
798
|
-
# Prepare comment data in Atlassian Document Format
|
|
799
|
-
data = {
|
|
800
|
-
"body": {
|
|
801
|
-
"type": "doc",
|
|
802
|
-
"version": 1,
|
|
803
|
-
"content": [
|
|
804
|
-
{
|
|
805
|
-
"type": "paragraph",
|
|
806
|
-
"content": [
|
|
807
|
-
{
|
|
808
|
-
"type": "text",
|
|
809
|
-
"text": comment.content
|
|
810
|
-
}
|
|
811
|
-
]
|
|
812
|
-
}
|
|
813
|
-
]
|
|
814
|
-
}
|
|
815
|
-
}
|
|
816
|
-
|
|
817
|
-
# Add comment
|
|
818
|
-
result = await self._make_request(
|
|
819
|
-
"POST", f"issue/{comment.ticket_id}/comment", data=data
|
|
820
|
-
)
|
|
821
|
-
|
|
822
|
-
# Update comment with JIRA data
|
|
823
|
-
comment.id = result.get("id")
|
|
824
|
-
comment.created_at = parse_jira_datetime(result.get("created")) or datetime.now()
|
|
825
|
-
comment.author = result.get("author", {}).get("displayName", comment.author)
|
|
826
|
-
comment.metadata["jira"] = result
|
|
827
|
-
|
|
828
|
-
return comment
|
|
829
|
-
|
|
830
|
-
async def get_comments(
|
|
831
|
-
self, ticket_id: str, limit: int = 10, offset: int = 0
|
|
832
|
-
) -> builtins.list[Comment]:
|
|
833
|
-
"""Get comments for a JIRA issue."""
|
|
834
|
-
# Fetch issue with comments
|
|
835
|
-
params = {"expand": "comments", "fields": "comment"}
|
|
836
|
-
|
|
837
|
-
issue = await self._make_request("GET", f"issue/{ticket_id}", params=params)
|
|
838
|
-
|
|
839
|
-
# Extract comments
|
|
840
|
-
comments_data = issue.get("fields", {}).get("comment", {}).get("comments", [])
|
|
841
|
-
|
|
842
|
-
# Apply pagination
|
|
843
|
-
paginated = comments_data[offset : offset + limit]
|
|
844
|
-
|
|
845
|
-
# Convert to Comment objects
|
|
846
|
-
comments = []
|
|
847
|
-
for comment_data in paginated:
|
|
848
|
-
# Extract text content from ADF format
|
|
849
|
-
body_content = comment_data.get("body", "")
|
|
850
|
-
text_content = extract_text_from_adf(body_content)
|
|
851
|
-
|
|
852
|
-
comment = Comment(
|
|
853
|
-
id=comment_data.get("id"),
|
|
854
|
-
ticket_id=ticket_id,
|
|
855
|
-
author=comment_data.get("author", {}).get("displayName", "Unknown"),
|
|
856
|
-
content=text_content,
|
|
857
|
-
created_at=parse_jira_datetime(comment_data.get("created")),
|
|
858
|
-
metadata={"jira": comment_data},
|
|
859
|
-
)
|
|
860
|
-
comments.append(comment)
|
|
861
|
-
|
|
862
|
-
return comments
|
|
863
|
-
|
|
864
|
-
async def get_project_info(
|
|
865
|
-
self, project_key: Optional[str] = None
|
|
866
|
-
) -> dict[str, Any]:
|
|
867
|
-
"""Get JIRA project information including workflows and fields."""
|
|
868
|
-
key = project_key or self.project_key
|
|
869
|
-
if not key:
|
|
870
|
-
raise ValueError("Project key is required")
|
|
871
|
-
|
|
872
|
-
project = await self._make_request("GET", f"project/{key}")
|
|
873
|
-
|
|
874
|
-
# Get additional project details
|
|
875
|
-
issue_types = await self._get_issue_types(key)
|
|
876
|
-
priorities = await self._get_priorities()
|
|
877
|
-
custom_fields = await self._get_custom_fields()
|
|
878
|
-
|
|
879
|
-
return {
|
|
880
|
-
"project": project,
|
|
881
|
-
"issue_types": issue_types,
|
|
882
|
-
"priorities": priorities,
|
|
883
|
-
"custom_fields": custom_fields,
|
|
884
|
-
}
|
|
885
|
-
|
|
886
|
-
async def execute_jql(
|
|
887
|
-
self, jql: str, limit: int = 50
|
|
888
|
-
) -> builtins.list[Union[Epic, Task]]:
|
|
889
|
-
"""Execute a raw JQL query.
|
|
890
|
-
|
|
891
|
-
Args:
|
|
892
|
-
jql: JIRA Query Language string
|
|
893
|
-
limit: Maximum number of results
|
|
894
|
-
|
|
895
|
-
Returns:
|
|
896
|
-
List of matching tickets
|
|
897
|
-
|
|
898
|
-
"""
|
|
899
|
-
data = await self._make_request(
|
|
900
|
-
"POST",
|
|
901
|
-
"search",
|
|
902
|
-
data={
|
|
903
|
-
"jql": jql,
|
|
904
|
-
"startAt": 0,
|
|
905
|
-
"maxResults": limit,
|
|
906
|
-
"fields": ["*all"],
|
|
907
|
-
},
|
|
908
|
-
)
|
|
909
|
-
|
|
910
|
-
issues = data.get("issues", [])
|
|
911
|
-
return [self._issue_to_ticket(issue) for issue in issues]
|
|
912
|
-
|
|
913
|
-
async def get_sprints(
|
|
914
|
-
self, board_id: Optional[int] = None
|
|
915
|
-
) -> builtins.list[dict[str, Any]]:
|
|
916
|
-
"""Get active sprints for a board (requires JIRA Software).
|
|
917
|
-
|
|
918
|
-
Args:
|
|
919
|
-
board_id: Agile board ID
|
|
920
|
-
|
|
921
|
-
Returns:
|
|
922
|
-
List of sprint information
|
|
923
|
-
|
|
924
|
-
"""
|
|
925
|
-
if not board_id:
|
|
926
|
-
# Try to find a board for the project
|
|
927
|
-
boards_data = await self._make_request(
|
|
928
|
-
"GET",
|
|
929
|
-
"/rest/agile/1.0/board",
|
|
930
|
-
params={"projectKeyOrId": self.project_key},
|
|
931
|
-
)
|
|
932
|
-
boards = boards_data.get("values", [])
|
|
933
|
-
if not boards:
|
|
934
|
-
return []
|
|
935
|
-
board_id = boards[0]["id"]
|
|
936
|
-
|
|
937
|
-
# Get sprints for the board
|
|
938
|
-
sprints_data = await self._make_request(
|
|
939
|
-
"GET",
|
|
940
|
-
f"/rest/agile/1.0/board/{board_id}/sprint",
|
|
941
|
-
params={"state": "active,future"},
|
|
942
|
-
)
|
|
943
|
-
|
|
944
|
-
return sprints_data.get("values", [])
|
|
945
|
-
|
|
946
|
-
async def get_project_users(self) -> List[Dict[str, Any]]:
|
|
947
|
-
"""Get users who have access to the project."""
|
|
948
|
-
if not self.project_key:
|
|
949
|
-
return []
|
|
950
|
-
|
|
951
|
-
try:
|
|
952
|
-
# Get project role users
|
|
953
|
-
project_data = await self._make_request("GET", f"project/{self.project_key}")
|
|
954
|
-
|
|
955
|
-
# Get users from project roles
|
|
956
|
-
users = []
|
|
957
|
-
if "roles" in project_data:
|
|
958
|
-
for role_name, role_url in project_data["roles"].items():
|
|
959
|
-
# Extract role ID from URL
|
|
960
|
-
role_id = role_url.split("/")[-1]
|
|
961
|
-
try:
|
|
962
|
-
role_data = await self._make_request("GET", f"project/{self.project_key}/role/{role_id}")
|
|
963
|
-
if "actors" in role_data:
|
|
964
|
-
for actor in role_data["actors"]:
|
|
965
|
-
if actor.get("type") == "atlassian-user-role-actor":
|
|
966
|
-
users.append(actor.get("actorUser", {}))
|
|
967
|
-
except Exception:
|
|
968
|
-
# Skip if role access fails
|
|
969
|
-
continue
|
|
970
|
-
|
|
971
|
-
# Remove duplicates based on accountId
|
|
972
|
-
seen_ids = set()
|
|
973
|
-
unique_users = []
|
|
974
|
-
for user in users:
|
|
975
|
-
account_id = user.get("accountId")
|
|
976
|
-
if account_id and account_id not in seen_ids:
|
|
977
|
-
seen_ids.add(account_id)
|
|
978
|
-
unique_users.append(user)
|
|
979
|
-
|
|
980
|
-
return unique_users
|
|
981
|
-
|
|
982
|
-
except Exception:
|
|
983
|
-
# Fallback: try to get assignable users for the project
|
|
984
|
-
try:
|
|
985
|
-
users_data = await self._make_request(
|
|
986
|
-
"GET",
|
|
987
|
-
"user/assignable/search",
|
|
988
|
-
params={"project": self.project_key, "maxResults": 50}
|
|
989
|
-
)
|
|
990
|
-
return users_data if isinstance(users_data, list) else []
|
|
991
|
-
except Exception:
|
|
992
|
-
return []
|
|
993
|
-
|
|
994
|
-
async def get_current_user(self) -> Optional[Dict[str, Any]]:
|
|
995
|
-
"""Get current authenticated user information."""
|
|
996
|
-
try:
|
|
997
|
-
return await self._make_request("GET", "myself")
|
|
998
|
-
except Exception:
|
|
999
|
-
return None
|
|
1000
|
-
|
|
1001
|
-
async def close(self) -> None:
|
|
1002
|
-
"""Close the adapter and cleanup resources."""
|
|
1003
|
-
# Clear caches
|
|
1004
|
-
self._workflow_cache.clear()
|
|
1005
|
-
self._priority_cache.clear()
|
|
1006
|
-
self._issue_types_cache.clear()
|
|
1007
|
-
self._custom_fields_cache.clear()
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
# Register the adapter
|
|
1011
|
-
AdapterRegistry.register("jira", JiraAdapter)
|