mcp-ticketer 0.4.11__py3-none-any.whl → 2.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__init__.py +10 -10
- mcp_ticketer/__version__.py +3 -3
- mcp_ticketer/adapters/__init__.py +2 -0
- mcp_ticketer/adapters/aitrackdown.py +394 -9
- mcp_ticketer/adapters/asana/__init__.py +15 -0
- mcp_ticketer/adapters/asana/adapter.py +1416 -0
- mcp_ticketer/adapters/asana/client.py +292 -0
- mcp_ticketer/adapters/asana/mappers.py +348 -0
- mcp_ticketer/adapters/asana/types.py +146 -0
- mcp_ticketer/adapters/github.py +836 -105
- mcp_ticketer/adapters/hybrid.py +47 -5
- mcp_ticketer/adapters/jira.py +772 -1
- mcp_ticketer/adapters/linear/adapter.py +2293 -108
- mcp_ticketer/adapters/linear/client.py +146 -12
- mcp_ticketer/adapters/linear/mappers.py +105 -11
- mcp_ticketer/adapters/linear/queries.py +168 -1
- mcp_ticketer/adapters/linear/types.py +80 -4
- mcp_ticketer/analysis/__init__.py +56 -0
- mcp_ticketer/analysis/dependency_graph.py +255 -0
- mcp_ticketer/analysis/health_assessment.py +304 -0
- mcp_ticketer/analysis/orphaned.py +218 -0
- mcp_ticketer/analysis/project_status.py +594 -0
- mcp_ticketer/analysis/similarity.py +224 -0
- mcp_ticketer/analysis/staleness.py +266 -0
- mcp_ticketer/automation/__init__.py +11 -0
- mcp_ticketer/automation/project_updates.py +378 -0
- mcp_ticketer/cache/memory.py +3 -3
- mcp_ticketer/cli/adapter_diagnostics.py +4 -2
- mcp_ticketer/cli/auggie_configure.py +18 -6
- mcp_ticketer/cli/codex_configure.py +175 -60
- mcp_ticketer/cli/configure.py +884 -146
- mcp_ticketer/cli/cursor_configure.py +314 -0
- mcp_ticketer/cli/diagnostics.py +31 -28
- mcp_ticketer/cli/discover.py +293 -21
- mcp_ticketer/cli/gemini_configure.py +18 -6
- mcp_ticketer/cli/init_command.py +880 -0
- mcp_ticketer/cli/instruction_commands.py +435 -0
- mcp_ticketer/cli/linear_commands.py +99 -15
- mcp_ticketer/cli/main.py +109 -2055
- mcp_ticketer/cli/mcp_configure.py +673 -99
- mcp_ticketer/cli/mcp_server_commands.py +415 -0
- mcp_ticketer/cli/migrate_config.py +12 -8
- mcp_ticketer/cli/platform_commands.py +6 -6
- mcp_ticketer/cli/platform_detection.py +477 -0
- mcp_ticketer/cli/platform_installer.py +536 -0
- mcp_ticketer/cli/project_update_commands.py +350 -0
- mcp_ticketer/cli/queue_commands.py +15 -15
- mcp_ticketer/cli/setup_command.py +639 -0
- mcp_ticketer/cli/simple_health.py +13 -11
- mcp_ticketer/cli/ticket_commands.py +277 -36
- mcp_ticketer/cli/update_checker.py +313 -0
- mcp_ticketer/cli/utils.py +45 -41
- mcp_ticketer/core/__init__.py +35 -1
- mcp_ticketer/core/adapter.py +170 -5
- mcp_ticketer/core/config.py +38 -31
- mcp_ticketer/core/env_discovery.py +33 -3
- mcp_ticketer/core/env_loader.py +7 -6
- mcp_ticketer/core/exceptions.py +10 -4
- mcp_ticketer/core/http_client.py +10 -10
- mcp_ticketer/core/instructions.py +405 -0
- mcp_ticketer/core/label_manager.py +732 -0
- mcp_ticketer/core/mappers.py +32 -20
- mcp_ticketer/core/models.py +136 -1
- mcp_ticketer/core/onepassword_secrets.py +379 -0
- mcp_ticketer/core/priority_matcher.py +463 -0
- mcp_ticketer/core/project_config.py +148 -14
- mcp_ticketer/core/registry.py +1 -1
- mcp_ticketer/core/session_state.py +171 -0
- mcp_ticketer/core/state_matcher.py +592 -0
- mcp_ticketer/core/url_parser.py +425 -0
- mcp_ticketer/core/validators.py +69 -0
- mcp_ticketer/defaults/ticket_instructions.md +644 -0
- mcp_ticketer/mcp/__init__.py +2 -2
- mcp_ticketer/mcp/server/__init__.py +2 -2
- mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
- mcp_ticketer/mcp/server/main.py +187 -93
- mcp_ticketer/mcp/server/routing.py +655 -0
- mcp_ticketer/mcp/server/server_sdk.py +58 -0
- mcp_ticketer/mcp/server/tools/__init__.py +37 -9
- mcp_ticketer/mcp/server/tools/analysis_tools.py +854 -0
- mcp_ticketer/mcp/server/tools/attachment_tools.py +65 -20
- mcp_ticketer/mcp/server/tools/bulk_tools.py +259 -202
- mcp_ticketer/mcp/server/tools/comment_tools.py +74 -12
- mcp_ticketer/mcp/server/tools/config_tools.py +1429 -0
- mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
- mcp_ticketer/mcp/server/tools/hierarchy_tools.py +878 -319
- mcp_ticketer/mcp/server/tools/instruction_tools.py +295 -0
- mcp_ticketer/mcp/server/tools/label_tools.py +942 -0
- mcp_ticketer/mcp/server/tools/pr_tools.py +3 -7
- mcp_ticketer/mcp/server/tools/project_status_tools.py +158 -0
- mcp_ticketer/mcp/server/tools/project_update_tools.py +473 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +180 -97
- mcp_ticketer/mcp/server/tools/session_tools.py +308 -0
- mcp_ticketer/mcp/server/tools/ticket_tools.py +1182 -82
- mcp_ticketer/mcp/server/tools/user_ticket_tools.py +364 -0
- mcp_ticketer/queue/health_monitor.py +1 -0
- mcp_ticketer/queue/manager.py +4 -4
- mcp_ticketer/queue/queue.py +3 -3
- mcp_ticketer/queue/run_worker.py +1 -1
- mcp_ticketer/queue/ticket_registry.py +2 -2
- mcp_ticketer/queue/worker.py +15 -13
- mcp_ticketer/utils/__init__.py +5 -0
- mcp_ticketer/utils/token_utils.py +246 -0
- mcp_ticketer-2.0.1.dist-info/METADATA +1366 -0
- mcp_ticketer-2.0.1.dist-info/RECORD +122 -0
- mcp_ticketer-0.4.11.dist-info/METADATA +0 -496
- mcp_ticketer-0.4.11.dist-info/RECORD +0 -77
- {mcp_ticketer-0.4.11.dist-info → mcp_ticketer-2.0.1.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.4.11.dist-info → mcp_ticketer-2.0.1.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.4.11.dist-info → mcp_ticketer-2.0.1.dist-info}/licenses/LICENSE +0 -0
- {mcp_ticketer-0.4.11.dist-info → mcp_ticketer-2.0.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
"""Asana HTTP client for REST API v1.0."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import logging
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
import httpx
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class AsanaClient:
|
|
13
|
+
"""HTTP client for Asana REST API v1.0.
|
|
14
|
+
|
|
15
|
+
Handles:
|
|
16
|
+
- Bearer token authentication
|
|
17
|
+
- Rate limiting (429 with Retry-After)
|
|
18
|
+
- Pagination (offset tokens)
|
|
19
|
+
- Error handling for all status codes
|
|
20
|
+
- Request/Response wrapping ({"data": {...}})
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
BASE_URL = "https://app.asana.com/api/1.0"
|
|
24
|
+
|
|
25
|
+
def __init__(self, api_key: str, timeout: int = 30, max_retries: int = 3):
|
|
26
|
+
"""Initialize Asana client.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
api_key: Asana Personal Access Token (PAT)
|
|
30
|
+
timeout: Request timeout in seconds
|
|
31
|
+
max_retries: Maximum retry attempts for rate limiting
|
|
32
|
+
|
|
33
|
+
"""
|
|
34
|
+
self.api_key = api_key
|
|
35
|
+
self.timeout = timeout
|
|
36
|
+
self.max_retries = max_retries
|
|
37
|
+
|
|
38
|
+
# Setup headers
|
|
39
|
+
self.headers = {
|
|
40
|
+
"Authorization": f"Bearer {api_key}",
|
|
41
|
+
"Accept": "application/json",
|
|
42
|
+
"Content-Type": "application/json",
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
# HTTP client (will be created on first use)
|
|
46
|
+
self._client: httpx.AsyncClient | None = None
|
|
47
|
+
|
|
48
|
+
async def _get_client(self) -> httpx.AsyncClient:
|
|
49
|
+
"""Get or create async HTTP client.
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
Configured async HTTP client
|
|
53
|
+
|
|
54
|
+
"""
|
|
55
|
+
if self._client is None:
|
|
56
|
+
self._client = httpx.AsyncClient(
|
|
57
|
+
headers=self.headers,
|
|
58
|
+
timeout=self.timeout,
|
|
59
|
+
follow_redirects=True,
|
|
60
|
+
)
|
|
61
|
+
return self._client
|
|
62
|
+
|
|
63
|
+
async def _handle_rate_limit(self, response: httpx.Response, attempt: int) -> None:
|
|
64
|
+
"""Handle rate limiting with exponential backoff.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
response: HTTP response with 429 status
|
|
68
|
+
attempt: Current retry attempt number
|
|
69
|
+
|
|
70
|
+
Raises:
|
|
71
|
+
ValueError: If max retries exceeded
|
|
72
|
+
|
|
73
|
+
"""
|
|
74
|
+
if attempt >= self.max_retries:
|
|
75
|
+
raise ValueError(
|
|
76
|
+
f"Max retries ({self.max_retries}) exceeded for rate limiting"
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
# Get retry-after header (in seconds)
|
|
80
|
+
retry_after = int(response.headers.get("Retry-After", 60))
|
|
81
|
+
logger.warning(
|
|
82
|
+
f"Rate limited (429). Waiting {retry_after}s before retry {attempt + 1}/{self.max_retries}"
|
|
83
|
+
)
|
|
84
|
+
await asyncio.sleep(retry_after)
|
|
85
|
+
|
|
86
|
+
async def _request(
|
|
87
|
+
self,
|
|
88
|
+
method: str,
|
|
89
|
+
endpoint: str,
|
|
90
|
+
params: dict[str, Any] | None = None,
|
|
91
|
+
json: dict[str, Any] | None = None,
|
|
92
|
+
) -> dict[str, Any]:
|
|
93
|
+
"""Make HTTP request with retry logic for rate limiting.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
method: HTTP method (GET, POST, PUT, DELETE)
|
|
97
|
+
endpoint: API endpoint (without base URL)
|
|
98
|
+
params: URL query parameters
|
|
99
|
+
json: Request body JSON data
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
Response data (unwrapped from {"data": {...}})
|
|
103
|
+
|
|
104
|
+
Raises:
|
|
105
|
+
ValueError: If request fails or max retries exceeded
|
|
106
|
+
|
|
107
|
+
"""
|
|
108
|
+
client = await self._get_client()
|
|
109
|
+
url = f"{self.BASE_URL}/{endpoint.lstrip('/')}"
|
|
110
|
+
|
|
111
|
+
# Wrap request body in {"data": {...}} for POST/PUT
|
|
112
|
+
if json is not None and method in ("POST", "PUT"):
|
|
113
|
+
json = {"data": json}
|
|
114
|
+
|
|
115
|
+
for attempt in range(self.max_retries + 1):
|
|
116
|
+
try:
|
|
117
|
+
response = await client.request(
|
|
118
|
+
method=method,
|
|
119
|
+
url=url,
|
|
120
|
+
params=params,
|
|
121
|
+
json=json,
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
# Handle rate limiting
|
|
125
|
+
if response.status_code == 429:
|
|
126
|
+
await self._handle_rate_limit(response, attempt)
|
|
127
|
+
continue
|
|
128
|
+
|
|
129
|
+
# Handle errors
|
|
130
|
+
if response.status_code >= 400:
|
|
131
|
+
error_detail = response.text
|
|
132
|
+
try:
|
|
133
|
+
error_json = response.json()
|
|
134
|
+
error_detail = error_json.get("errors", [{}])[0].get(
|
|
135
|
+
"message", error_detail
|
|
136
|
+
)
|
|
137
|
+
except Exception:
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
raise ValueError(
|
|
141
|
+
f"Asana API error ({response.status_code}): {error_detail}"
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
# Success - unwrap response
|
|
145
|
+
response_data = response.json()
|
|
146
|
+
|
|
147
|
+
# Asana wraps responses in {"data": {...}}
|
|
148
|
+
if isinstance(response_data, dict) and "data" in response_data:
|
|
149
|
+
return response_data["data"]
|
|
150
|
+
|
|
151
|
+
return response_data
|
|
152
|
+
|
|
153
|
+
except httpx.TimeoutException as e:
|
|
154
|
+
logger.error(f"Request timeout for {method} {url}: {e}")
|
|
155
|
+
if attempt < self.max_retries:
|
|
156
|
+
wait_time = 2**attempt # Exponential backoff
|
|
157
|
+
logger.info(f"Retrying in {wait_time}s...")
|
|
158
|
+
await asyncio.sleep(wait_time)
|
|
159
|
+
else:
|
|
160
|
+
raise ValueError(
|
|
161
|
+
f"Request timeout after {self.max_retries} retries"
|
|
162
|
+
) from e
|
|
163
|
+
|
|
164
|
+
except httpx.HTTPError as e:
|
|
165
|
+
logger.error(f"HTTP error for {method} {url}: {e}")
|
|
166
|
+
raise ValueError(f"HTTP error: {e}") from e
|
|
167
|
+
|
|
168
|
+
raise ValueError("Request failed after all retry attempts")
|
|
169
|
+
|
|
170
|
+
async def get(
|
|
171
|
+
self, endpoint: str, params: dict[str, Any] | None = None
|
|
172
|
+
) -> dict[str, Any]:
|
|
173
|
+
"""Make GET request.
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
endpoint: API endpoint
|
|
177
|
+
params: Query parameters
|
|
178
|
+
|
|
179
|
+
Returns:
|
|
180
|
+
Response data
|
|
181
|
+
|
|
182
|
+
"""
|
|
183
|
+
return await self._request("GET", endpoint, params=params)
|
|
184
|
+
|
|
185
|
+
async def post(self, endpoint: str, data: dict[str, Any]) -> dict[str, Any]:
|
|
186
|
+
"""Make POST request.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
endpoint: API endpoint
|
|
190
|
+
data: Request body data
|
|
191
|
+
|
|
192
|
+
Returns:
|
|
193
|
+
Response data
|
|
194
|
+
|
|
195
|
+
"""
|
|
196
|
+
return await self._request("POST", endpoint, json=data)
|
|
197
|
+
|
|
198
|
+
async def put(self, endpoint: str, data: dict[str, Any]) -> dict[str, Any]:
|
|
199
|
+
"""Make PUT request.
|
|
200
|
+
|
|
201
|
+
Args:
|
|
202
|
+
endpoint: API endpoint
|
|
203
|
+
data: Request body data
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
Response data
|
|
207
|
+
|
|
208
|
+
"""
|
|
209
|
+
return await self._request("PUT", endpoint, json=data)
|
|
210
|
+
|
|
211
|
+
async def delete(self, endpoint: str) -> dict[str, Any]:
|
|
212
|
+
"""Make DELETE request.
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
endpoint: API endpoint
|
|
216
|
+
|
|
217
|
+
Returns:
|
|
218
|
+
Response data
|
|
219
|
+
|
|
220
|
+
"""
|
|
221
|
+
return await self._request("DELETE", endpoint)
|
|
222
|
+
|
|
223
|
+
async def get_paginated(
|
|
224
|
+
self,
|
|
225
|
+
endpoint: str,
|
|
226
|
+
params: dict[str, Any] | None = None,
|
|
227
|
+
limit: int = 100,
|
|
228
|
+
) -> list[dict[str, Any]]:
|
|
229
|
+
"""Get all pages of results using offset-based pagination.
|
|
230
|
+
|
|
231
|
+
Args:
|
|
232
|
+
endpoint: API endpoint
|
|
233
|
+
params: Query parameters
|
|
234
|
+
limit: Items per page (max 100)
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
List of all results from all pages
|
|
238
|
+
|
|
239
|
+
"""
|
|
240
|
+
if params is None:
|
|
241
|
+
params = {}
|
|
242
|
+
|
|
243
|
+
all_results = []
|
|
244
|
+
offset = None
|
|
245
|
+
|
|
246
|
+
while True:
|
|
247
|
+
# Set pagination params
|
|
248
|
+
page_params = params.copy()
|
|
249
|
+
page_params["limit"] = min(limit, 100) # Max 100 per page
|
|
250
|
+
if offset:
|
|
251
|
+
page_params["offset"] = offset
|
|
252
|
+
|
|
253
|
+
# Get page
|
|
254
|
+
response = await self.get(endpoint, params=page_params)
|
|
255
|
+
|
|
256
|
+
# Handle both array and object responses
|
|
257
|
+
if isinstance(response, list):
|
|
258
|
+
results = response
|
|
259
|
+
next_page = None
|
|
260
|
+
else:
|
|
261
|
+
results = response.get("data", [])
|
|
262
|
+
next_page = response.get("next_page")
|
|
263
|
+
|
|
264
|
+
all_results.extend(results)
|
|
265
|
+
|
|
266
|
+
# Check if more pages
|
|
267
|
+
if not next_page or not next_page.get("offset"):
|
|
268
|
+
break
|
|
269
|
+
|
|
270
|
+
offset = next_page["offset"]
|
|
271
|
+
|
|
272
|
+
return all_results
|
|
273
|
+
|
|
274
|
+
async def test_connection(self) -> bool:
|
|
275
|
+
"""Test API connection and credentials.
|
|
276
|
+
|
|
277
|
+
Returns:
|
|
278
|
+
True if connection successful
|
|
279
|
+
|
|
280
|
+
"""
|
|
281
|
+
try:
|
|
282
|
+
await self.get("/users/me")
|
|
283
|
+
return True
|
|
284
|
+
except Exception as e:
|
|
285
|
+
logger.error(f"Connection test failed: {e}")
|
|
286
|
+
return False
|
|
287
|
+
|
|
288
|
+
async def close(self) -> None:
|
|
289
|
+
"""Close HTTP client and cleanup resources."""
|
|
290
|
+
if self._client:
|
|
291
|
+
await self._client.aclose()
|
|
292
|
+
self._client = None
|
|
@@ -0,0 +1,348 @@
|
|
|
1
|
+
"""Data mappers for converting between Asana and mcp-ticketer models."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from ...core.models import (
|
|
8
|
+
Attachment,
|
|
9
|
+
Comment,
|
|
10
|
+
Epic,
|
|
11
|
+
Priority,
|
|
12
|
+
Task,
|
|
13
|
+
TicketState,
|
|
14
|
+
TicketType,
|
|
15
|
+
)
|
|
16
|
+
from .types import map_priority_from_asana, map_state_from_asana, map_state_to_asana
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def parse_asana_datetime(date_str: str | None) -> datetime | None:
|
|
22
|
+
"""Parse Asana datetime string to datetime object.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
----
|
|
26
|
+
date_str: ISO 8601 datetime string or None
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
-------
|
|
30
|
+
Parsed datetime or None
|
|
31
|
+
|
|
32
|
+
"""
|
|
33
|
+
if not date_str:
|
|
34
|
+
return None
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
# Asana returns ISO 8601 format: "2024-11-15T10:30:00.000Z"
|
|
38
|
+
return datetime.fromisoformat(date_str.replace("Z", "+00:00"))
|
|
39
|
+
except (ValueError, AttributeError) as e:
|
|
40
|
+
logger.warning(f"Failed to parse Asana datetime '{date_str}': {e}")
|
|
41
|
+
return None
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def map_asana_project_to_epic(project: dict[str, Any]) -> Epic:
|
|
45
|
+
"""Map Asana project to Epic.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
----
|
|
49
|
+
project: Asana project data
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
-------
|
|
53
|
+
Epic model instance
|
|
54
|
+
|
|
55
|
+
"""
|
|
56
|
+
# Extract custom field for priority if exists
|
|
57
|
+
priority = Priority.MEDIUM
|
|
58
|
+
custom_fields = project.get("custom_fields", [])
|
|
59
|
+
for field in custom_fields:
|
|
60
|
+
if field.get("name", "").lower() == "priority" and field.get("enum_value"):
|
|
61
|
+
priority = map_priority_from_asana(field["enum_value"].get("name"))
|
|
62
|
+
break
|
|
63
|
+
|
|
64
|
+
# Map project state (archived, current, on_hold) to TicketState
|
|
65
|
+
archived = project.get("archived", False)
|
|
66
|
+
state = TicketState.CLOSED if archived else TicketState.OPEN
|
|
67
|
+
|
|
68
|
+
return Epic(
|
|
69
|
+
id=project.get("gid"),
|
|
70
|
+
title=project.get("name", ""),
|
|
71
|
+
description=project.get("notes", ""),
|
|
72
|
+
state=state,
|
|
73
|
+
priority=priority,
|
|
74
|
+
created_at=parse_asana_datetime(project.get("created_at")),
|
|
75
|
+
updated_at=parse_asana_datetime(project.get("modified_at")),
|
|
76
|
+
metadata={
|
|
77
|
+
"asana_gid": project.get("gid"),
|
|
78
|
+
"asana_permalink_url": project.get("permalink_url"),
|
|
79
|
+
"asana_workspace_gid": project.get("workspace", {}).get("gid"),
|
|
80
|
+
"asana_team_gid": (
|
|
81
|
+
project.get("team", {}).get("gid") if project.get("team") else None
|
|
82
|
+
),
|
|
83
|
+
"asana_color": project.get("color"),
|
|
84
|
+
"asana_archived": archived,
|
|
85
|
+
"asana_public": project.get("public", False),
|
|
86
|
+
},
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def map_asana_task_to_task(task: dict[str, Any]) -> Task:
|
|
91
|
+
"""Map Asana task to Task.
|
|
92
|
+
|
|
93
|
+
Detects task type based on hierarchy:
|
|
94
|
+
- Has parent task → TASK (subtask)
|
|
95
|
+
- No parent task → ISSUE (standard task)
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
----
|
|
99
|
+
task: Asana task data
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
-------
|
|
103
|
+
Task model instance
|
|
104
|
+
|
|
105
|
+
"""
|
|
106
|
+
# Determine ticket type based on parent
|
|
107
|
+
parent_task = task.get("parent")
|
|
108
|
+
ticket_type = TicketType.TASK if parent_task else TicketType.ISSUE
|
|
109
|
+
|
|
110
|
+
# Extract state from completed field AND Status custom field (Bug Fix #3)
|
|
111
|
+
completed = task.get("completed", False)
|
|
112
|
+
state = TicketState.OPEN
|
|
113
|
+
custom_state = None
|
|
114
|
+
|
|
115
|
+
# Check Status custom field first (if present)
|
|
116
|
+
custom_fields = task.get("custom_fields", [])
|
|
117
|
+
for field in custom_fields:
|
|
118
|
+
if field.get("name", "").lower() == "status":
|
|
119
|
+
enum_value = field.get("enum_value")
|
|
120
|
+
if enum_value:
|
|
121
|
+
custom_state = enum_value.get("name", "")
|
|
122
|
+
break
|
|
123
|
+
|
|
124
|
+
# Use enhanced state mapping that considers both Status field and completed boolean
|
|
125
|
+
state = map_state_from_asana(completed, custom_state)
|
|
126
|
+
|
|
127
|
+
# Extract priority from custom fields
|
|
128
|
+
priority = Priority.MEDIUM
|
|
129
|
+
for field in custom_fields:
|
|
130
|
+
if field.get("name", "").lower() == "priority" and field.get("enum_value"):
|
|
131
|
+
priority = map_priority_from_asana(field["enum_value"].get("name"))
|
|
132
|
+
break
|
|
133
|
+
|
|
134
|
+
# Extract tags
|
|
135
|
+
tags = [tag.get("name", "") for tag in task.get("tags", []) if tag.get("name")]
|
|
136
|
+
|
|
137
|
+
# Extract assignee
|
|
138
|
+
assignee = None
|
|
139
|
+
if task.get("assignee"):
|
|
140
|
+
assignee = task["assignee"].get("gid")
|
|
141
|
+
|
|
142
|
+
# Extract project (parent_epic for issues)
|
|
143
|
+
parent_epic = None
|
|
144
|
+
projects = task.get("projects", [])
|
|
145
|
+
if projects and ticket_type == TicketType.ISSUE:
|
|
146
|
+
# Use first project as parent epic
|
|
147
|
+
parent_epic = projects[0].get("gid")
|
|
148
|
+
|
|
149
|
+
# Extract parent task (parent_issue for subtasks)
|
|
150
|
+
parent_issue = None
|
|
151
|
+
if parent_task:
|
|
152
|
+
parent_issue = parent_task.get("gid")
|
|
153
|
+
|
|
154
|
+
return Task(
|
|
155
|
+
id=task.get("gid"),
|
|
156
|
+
title=task.get("name", ""),
|
|
157
|
+
description=task.get("notes", ""),
|
|
158
|
+
state=state,
|
|
159
|
+
priority=priority,
|
|
160
|
+
tags=tags,
|
|
161
|
+
assignee=assignee,
|
|
162
|
+
ticket_type=ticket_type,
|
|
163
|
+
parent_epic=parent_epic,
|
|
164
|
+
parent_issue=parent_issue,
|
|
165
|
+
created_at=parse_asana_datetime(task.get("created_at")),
|
|
166
|
+
updated_at=parse_asana_datetime(task.get("modified_at")),
|
|
167
|
+
metadata={
|
|
168
|
+
"asana_gid": task.get("gid"),
|
|
169
|
+
"asana_permalink_url": task.get("permalink_url"),
|
|
170
|
+
"asana_workspace_gid": task.get("workspace", {}).get("gid"),
|
|
171
|
+
"asana_completed": completed,
|
|
172
|
+
"asana_completed_at": task.get("completed_at"),
|
|
173
|
+
"asana_due_on": task.get("due_on"),
|
|
174
|
+
"asana_due_at": task.get("due_at"),
|
|
175
|
+
"asana_num_subtasks": task.get("num_subtasks", 0),
|
|
176
|
+
"asana_num_hearts": task.get("num_hearts", 0),
|
|
177
|
+
"asana_num_likes": task.get("num_likes", 0),
|
|
178
|
+
},
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def map_epic_to_asana_project(
|
|
183
|
+
epic: Epic,
|
|
184
|
+
workspace_gid: str,
|
|
185
|
+
team_gid: str | None = None,
|
|
186
|
+
) -> dict[str, Any]:
|
|
187
|
+
"""Map Epic to Asana project create/update data.
|
|
188
|
+
|
|
189
|
+
Args:
|
|
190
|
+
----
|
|
191
|
+
epic: Epic model instance
|
|
192
|
+
workspace_gid: Asana workspace GID
|
|
193
|
+
team_gid: Asana team GID (optional, required for organization workspaces)
|
|
194
|
+
|
|
195
|
+
Returns:
|
|
196
|
+
-------
|
|
197
|
+
Asana project data for create/update
|
|
198
|
+
|
|
199
|
+
"""
|
|
200
|
+
project_data: dict[str, Any] = {
|
|
201
|
+
"name": epic.title,
|
|
202
|
+
"workspace": workspace_gid,
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
# Add team if provided (required for organization workspaces)
|
|
206
|
+
if team_gid:
|
|
207
|
+
project_data["team"] = team_gid
|
|
208
|
+
|
|
209
|
+
if epic.description:
|
|
210
|
+
project_data["notes"] = epic.description
|
|
211
|
+
|
|
212
|
+
# Map state to archived
|
|
213
|
+
if epic.state in (TicketState.CLOSED, TicketState.DONE):
|
|
214
|
+
project_data["archived"] = True
|
|
215
|
+
|
|
216
|
+
return project_data
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def map_task_to_asana_task(
|
|
220
|
+
task: Task,
|
|
221
|
+
workspace_gid: str,
|
|
222
|
+
project_gids: list[str] | None = None,
|
|
223
|
+
) -> dict[str, Any]:
|
|
224
|
+
"""Map Task to Asana task create/update data.
|
|
225
|
+
|
|
226
|
+
Args:
|
|
227
|
+
----
|
|
228
|
+
task: Task model instance
|
|
229
|
+
workspace_gid: Asana workspace GID
|
|
230
|
+
project_gids: List of project GIDs to add task to (optional)
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
-------
|
|
234
|
+
Asana task data for create/update
|
|
235
|
+
|
|
236
|
+
"""
|
|
237
|
+
task_data: dict[str, Any] = {
|
|
238
|
+
"name": task.title,
|
|
239
|
+
"workspace": workspace_gid,
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
if task.description:
|
|
243
|
+
task_data["notes"] = task.description
|
|
244
|
+
|
|
245
|
+
# Map state to completed
|
|
246
|
+
task_data["completed"] = map_state_to_asana(task.state)
|
|
247
|
+
|
|
248
|
+
# Add to projects if provided
|
|
249
|
+
if project_gids:
|
|
250
|
+
task_data["projects"] = project_gids
|
|
251
|
+
|
|
252
|
+
# Add parent if subtask
|
|
253
|
+
if task.parent_issue:
|
|
254
|
+
task_data["parent"] = task.parent_issue
|
|
255
|
+
|
|
256
|
+
# Add assignee if provided
|
|
257
|
+
if task.assignee:
|
|
258
|
+
task_data["assignee"] = task.assignee
|
|
259
|
+
|
|
260
|
+
# Due date mapping
|
|
261
|
+
if task.metadata.get("asana_due_on"):
|
|
262
|
+
task_data["due_on"] = task.metadata["asana_due_on"]
|
|
263
|
+
elif task.metadata.get("asana_due_at"):
|
|
264
|
+
task_data["due_at"] = task.metadata["asana_due_at"]
|
|
265
|
+
|
|
266
|
+
return task_data
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def map_asana_story_to_comment(story: dict[str, Any], task_gid: str) -> Comment | None:
|
|
270
|
+
"""Map Asana story to Comment.
|
|
271
|
+
|
|
272
|
+
Only maps stories of type 'comment'. Other story types (system events) are filtered out.
|
|
273
|
+
|
|
274
|
+
Args:
|
|
275
|
+
----
|
|
276
|
+
story: Asana story data
|
|
277
|
+
task_gid: Parent task GID
|
|
278
|
+
|
|
279
|
+
Returns:
|
|
280
|
+
-------
|
|
281
|
+
Comment model instance or None if not a comment type
|
|
282
|
+
|
|
283
|
+
"""
|
|
284
|
+
# Filter: only return actual comments, not system stories
|
|
285
|
+
story_type = story.get("type", "")
|
|
286
|
+
if story_type != "comment":
|
|
287
|
+
return None
|
|
288
|
+
|
|
289
|
+
# Extract author
|
|
290
|
+
created_by = story.get("created_by", {})
|
|
291
|
+
author = created_by.get("gid") or created_by.get("name", "Unknown")
|
|
292
|
+
|
|
293
|
+
return Comment(
|
|
294
|
+
id=story.get("gid"),
|
|
295
|
+
ticket_id=task_gid,
|
|
296
|
+
author=author,
|
|
297
|
+
content=story.get("text", ""),
|
|
298
|
+
created_at=parse_asana_datetime(story.get("created_at")),
|
|
299
|
+
metadata={
|
|
300
|
+
"asana_gid": story.get("gid"),
|
|
301
|
+
"asana_type": story_type,
|
|
302
|
+
"asana_created_by_name": created_by.get("name"),
|
|
303
|
+
},
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def map_asana_attachment_to_attachment(
|
|
308
|
+
attachment: dict[str, Any], task_gid: str
|
|
309
|
+
) -> Attachment:
|
|
310
|
+
"""Map Asana attachment to Attachment.
|
|
311
|
+
|
|
312
|
+
IMPORTANT: Use permanent_url for reliable access, not download_url which expires.
|
|
313
|
+
|
|
314
|
+
Args:
|
|
315
|
+
----
|
|
316
|
+
attachment: Asana attachment data
|
|
317
|
+
task_gid: Parent task GID
|
|
318
|
+
|
|
319
|
+
Returns:
|
|
320
|
+
-------
|
|
321
|
+
Attachment model instance
|
|
322
|
+
|
|
323
|
+
"""
|
|
324
|
+
# Extract creator info
|
|
325
|
+
created_by_data = attachment.get("created_by", {})
|
|
326
|
+
created_by = created_by_data.get("gid") or created_by_data.get("name", "Unknown")
|
|
327
|
+
|
|
328
|
+
# Use permanent_url (not download_url which expires)
|
|
329
|
+
url = attachment.get("permanent_url") or attachment.get("view_url")
|
|
330
|
+
|
|
331
|
+
return Attachment(
|
|
332
|
+
id=attachment.get("gid"),
|
|
333
|
+
ticket_id=task_gid,
|
|
334
|
+
filename=attachment.get("name", ""),
|
|
335
|
+
url=url,
|
|
336
|
+
content_type=attachment.get("resource_subtype"), # e.g., "external", "asana"
|
|
337
|
+
size_bytes=attachment.get("size"),
|
|
338
|
+
created_at=parse_asana_datetime(attachment.get("created_at")),
|
|
339
|
+
created_by=created_by,
|
|
340
|
+
metadata={
|
|
341
|
+
"asana_gid": attachment.get("gid"),
|
|
342
|
+
"asana_host": attachment.get("host"), # e.g., "asana", "dropbox", "google"
|
|
343
|
+
"asana_resource_subtype": attachment.get("resource_subtype"),
|
|
344
|
+
"asana_view_url": attachment.get("view_url"),
|
|
345
|
+
"asana_download_url": attachment.get("download_url"), # Expires!
|
|
346
|
+
"asana_permanent_url": attachment.get("permanent_url"), # Stable URL
|
|
347
|
+
},
|
|
348
|
+
)
|