mcp-ticketer 0.4.11__py3-none-any.whl → 2.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__init__.py +10 -10
- mcp_ticketer/__version__.py +3 -3
- mcp_ticketer/adapters/__init__.py +2 -0
- mcp_ticketer/adapters/aitrackdown.py +394 -9
- mcp_ticketer/adapters/asana/__init__.py +15 -0
- mcp_ticketer/adapters/asana/adapter.py +1416 -0
- mcp_ticketer/adapters/asana/client.py +292 -0
- mcp_ticketer/adapters/asana/mappers.py +348 -0
- mcp_ticketer/adapters/asana/types.py +146 -0
- mcp_ticketer/adapters/github.py +836 -105
- mcp_ticketer/adapters/hybrid.py +47 -5
- mcp_ticketer/adapters/jira.py +772 -1
- mcp_ticketer/adapters/linear/adapter.py +2293 -108
- mcp_ticketer/adapters/linear/client.py +146 -12
- mcp_ticketer/adapters/linear/mappers.py +105 -11
- mcp_ticketer/adapters/linear/queries.py +168 -1
- mcp_ticketer/adapters/linear/types.py +80 -4
- mcp_ticketer/analysis/__init__.py +56 -0
- mcp_ticketer/analysis/dependency_graph.py +255 -0
- mcp_ticketer/analysis/health_assessment.py +304 -0
- mcp_ticketer/analysis/orphaned.py +218 -0
- mcp_ticketer/analysis/project_status.py +594 -0
- mcp_ticketer/analysis/similarity.py +224 -0
- mcp_ticketer/analysis/staleness.py +266 -0
- mcp_ticketer/automation/__init__.py +11 -0
- mcp_ticketer/automation/project_updates.py +378 -0
- mcp_ticketer/cache/memory.py +3 -3
- mcp_ticketer/cli/adapter_diagnostics.py +4 -2
- mcp_ticketer/cli/auggie_configure.py +18 -6
- mcp_ticketer/cli/codex_configure.py +175 -60
- mcp_ticketer/cli/configure.py +884 -146
- mcp_ticketer/cli/cursor_configure.py +314 -0
- mcp_ticketer/cli/diagnostics.py +31 -28
- mcp_ticketer/cli/discover.py +293 -21
- mcp_ticketer/cli/gemini_configure.py +18 -6
- mcp_ticketer/cli/init_command.py +880 -0
- mcp_ticketer/cli/instruction_commands.py +435 -0
- mcp_ticketer/cli/linear_commands.py +99 -15
- mcp_ticketer/cli/main.py +109 -2055
- mcp_ticketer/cli/mcp_configure.py +673 -99
- mcp_ticketer/cli/mcp_server_commands.py +415 -0
- mcp_ticketer/cli/migrate_config.py +12 -8
- mcp_ticketer/cli/platform_commands.py +6 -6
- mcp_ticketer/cli/platform_detection.py +477 -0
- mcp_ticketer/cli/platform_installer.py +536 -0
- mcp_ticketer/cli/project_update_commands.py +350 -0
- mcp_ticketer/cli/queue_commands.py +15 -15
- mcp_ticketer/cli/setup_command.py +639 -0
- mcp_ticketer/cli/simple_health.py +13 -11
- mcp_ticketer/cli/ticket_commands.py +277 -36
- mcp_ticketer/cli/update_checker.py +313 -0
- mcp_ticketer/cli/utils.py +45 -41
- mcp_ticketer/core/__init__.py +35 -1
- mcp_ticketer/core/adapter.py +170 -5
- mcp_ticketer/core/config.py +38 -31
- mcp_ticketer/core/env_discovery.py +33 -3
- mcp_ticketer/core/env_loader.py +7 -6
- mcp_ticketer/core/exceptions.py +10 -4
- mcp_ticketer/core/http_client.py +10 -10
- mcp_ticketer/core/instructions.py +405 -0
- mcp_ticketer/core/label_manager.py +732 -0
- mcp_ticketer/core/mappers.py +32 -20
- mcp_ticketer/core/models.py +136 -1
- mcp_ticketer/core/onepassword_secrets.py +379 -0
- mcp_ticketer/core/priority_matcher.py +463 -0
- mcp_ticketer/core/project_config.py +148 -14
- mcp_ticketer/core/registry.py +1 -1
- mcp_ticketer/core/session_state.py +171 -0
- mcp_ticketer/core/state_matcher.py +592 -0
- mcp_ticketer/core/url_parser.py +425 -0
- mcp_ticketer/core/validators.py +69 -0
- mcp_ticketer/defaults/ticket_instructions.md +644 -0
- mcp_ticketer/mcp/__init__.py +2 -2
- mcp_ticketer/mcp/server/__init__.py +2 -2
- mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
- mcp_ticketer/mcp/server/main.py +187 -93
- mcp_ticketer/mcp/server/routing.py +655 -0
- mcp_ticketer/mcp/server/server_sdk.py +58 -0
- mcp_ticketer/mcp/server/tools/__init__.py +37 -9
- mcp_ticketer/mcp/server/tools/analysis_tools.py +854 -0
- mcp_ticketer/mcp/server/tools/attachment_tools.py +65 -20
- mcp_ticketer/mcp/server/tools/bulk_tools.py +259 -202
- mcp_ticketer/mcp/server/tools/comment_tools.py +74 -12
- mcp_ticketer/mcp/server/tools/config_tools.py +1429 -0
- mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
- mcp_ticketer/mcp/server/tools/hierarchy_tools.py +878 -319
- mcp_ticketer/mcp/server/tools/instruction_tools.py +295 -0
- mcp_ticketer/mcp/server/tools/label_tools.py +942 -0
- mcp_ticketer/mcp/server/tools/pr_tools.py +3 -7
- mcp_ticketer/mcp/server/tools/project_status_tools.py +158 -0
- mcp_ticketer/mcp/server/tools/project_update_tools.py +473 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +180 -97
- mcp_ticketer/mcp/server/tools/session_tools.py +308 -0
- mcp_ticketer/mcp/server/tools/ticket_tools.py +1182 -82
- mcp_ticketer/mcp/server/tools/user_ticket_tools.py +364 -0
- mcp_ticketer/queue/health_monitor.py +1 -0
- mcp_ticketer/queue/manager.py +4 -4
- mcp_ticketer/queue/queue.py +3 -3
- mcp_ticketer/queue/run_worker.py +1 -1
- mcp_ticketer/queue/ticket_registry.py +2 -2
- mcp_ticketer/queue/worker.py +15 -13
- mcp_ticketer/utils/__init__.py +5 -0
- mcp_ticketer/utils/token_utils.py +246 -0
- mcp_ticketer-2.0.1.dist-info/METADATA +1366 -0
- mcp_ticketer-2.0.1.dist-info/RECORD +122 -0
- mcp_ticketer-0.4.11.dist-info/METADATA +0 -496
- mcp_ticketer-0.4.11.dist-info/RECORD +0 -77
- {mcp_ticketer-0.4.11.dist-info → mcp_ticketer-2.0.1.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.4.11.dist-info → mcp_ticketer-2.0.1.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.4.11.dist-info → mcp_ticketer-2.0.1.dist-info}/licenses/LICENSE +0 -0
- {mcp_ticketer-0.4.11.dist-info → mcp_ticketer-2.0.1.dist-info}/top_level.txt +0 -0
|
@@ -4,25 +4,41 @@ from __future__ import annotations
|
|
|
4
4
|
|
|
5
5
|
import asyncio
|
|
6
6
|
import logging
|
|
7
|
+
import mimetypes
|
|
7
8
|
import os
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
from pathlib import Path
|
|
8
11
|
from typing import Any
|
|
9
12
|
|
|
10
13
|
try:
|
|
14
|
+
import httpx
|
|
11
15
|
from gql import gql
|
|
12
16
|
from gql.transport.exceptions import TransportQueryError
|
|
13
17
|
except ImportError:
|
|
14
18
|
gql = None
|
|
15
19
|
TransportQueryError = Exception
|
|
20
|
+
httpx = None
|
|
16
21
|
|
|
17
22
|
import builtins
|
|
18
23
|
|
|
19
24
|
from ...core.adapter import BaseAdapter
|
|
20
|
-
from ...core.models import
|
|
25
|
+
from ...core.models import (
|
|
26
|
+
Attachment,
|
|
27
|
+
Comment,
|
|
28
|
+
Epic,
|
|
29
|
+
ProjectUpdate,
|
|
30
|
+
ProjectUpdateHealth,
|
|
31
|
+
SearchQuery,
|
|
32
|
+
Task,
|
|
33
|
+
TicketState,
|
|
34
|
+
)
|
|
21
35
|
from ...core.registry import AdapterRegistry
|
|
36
|
+
from ...core.url_parser import URLParserError, normalize_project_id
|
|
22
37
|
from .client import LinearGraphQLClient
|
|
23
38
|
from .mappers import (
|
|
24
39
|
build_linear_issue_input,
|
|
25
40
|
build_linear_issue_update_input,
|
|
41
|
+
map_linear_attachment_to_attachment,
|
|
26
42
|
map_linear_comment_to_comment,
|
|
27
43
|
map_linear_issue_to_task,
|
|
28
44
|
map_linear_project_to_epic,
|
|
@@ -30,7 +46,16 @@ from .mappers import (
|
|
|
30
46
|
from .queries import (
|
|
31
47
|
ALL_FRAGMENTS,
|
|
32
48
|
CREATE_ISSUE_MUTATION,
|
|
49
|
+
CREATE_LABEL_MUTATION,
|
|
50
|
+
CREATE_PROJECT_UPDATE_MUTATION,
|
|
51
|
+
GET_CUSTOM_VIEW_QUERY,
|
|
52
|
+
GET_ISSUE_STATUS_QUERY,
|
|
53
|
+
GET_PROJECT_UPDATE_QUERY,
|
|
54
|
+
LIST_CYCLES_QUERY,
|
|
55
|
+
LIST_ISSUE_STATUSES_QUERY,
|
|
33
56
|
LIST_ISSUES_QUERY,
|
|
57
|
+
LIST_PROJECT_UPDATES_QUERY,
|
|
58
|
+
LIST_PROJECTS_QUERY,
|
|
34
59
|
SEARCH_ISSUES_QUERY,
|
|
35
60
|
UPDATE_ISSUE_MUTATION,
|
|
36
61
|
WORKFLOW_STATES_QUERY,
|
|
@@ -66,6 +91,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
66
91
|
"""Initialize Linear adapter.
|
|
67
92
|
|
|
68
93
|
Args:
|
|
94
|
+
----
|
|
69
95
|
config: Configuration with:
|
|
70
96
|
- api_key: Linear API key (or LINEAR_API_KEY env var)
|
|
71
97
|
- workspace: Linear workspace name (optional, for documentation)
|
|
@@ -74,6 +100,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
74
100
|
- api_url: Optional Linear API URL (defaults to https://api.linear.app/graphql)
|
|
75
101
|
|
|
76
102
|
Raises:
|
|
103
|
+
------
|
|
77
104
|
ValueError: If required configuration is missing
|
|
78
105
|
|
|
79
106
|
"""
|
|
@@ -120,6 +147,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
120
147
|
self.workspace = config.get("workspace", "")
|
|
121
148
|
self.team_key = config.get("team_key")
|
|
122
149
|
self.team_id = config.get("team_id")
|
|
150
|
+
self.user_email = config.get("user_email") # Optional default assignee
|
|
123
151
|
self.api_url = config.get("api_url", "https://api.linear.app/graphql")
|
|
124
152
|
|
|
125
153
|
# Validate team configuration
|
|
@@ -133,6 +161,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
133
161
|
"""Validate Linear API credentials.
|
|
134
162
|
|
|
135
163
|
Returns:
|
|
164
|
+
-------
|
|
136
165
|
Tuple of (is_valid, error_message)
|
|
137
166
|
|
|
138
167
|
"""
|
|
@@ -145,16 +174,49 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
145
174
|
return True, ""
|
|
146
175
|
|
|
147
176
|
async def initialize(self) -> None:
|
|
148
|
-
"""Initialize adapter by preloading team, states, and labels data concurrently.
|
|
177
|
+
"""Initialize adapter by preloading team, states, and labels data concurrently.
|
|
178
|
+
|
|
179
|
+
Design Decision: Enhanced Error Handling (1M-431)
|
|
180
|
+
--------------------------------------------------
|
|
181
|
+
Improved error messages to provide actionable troubleshooting guidance.
|
|
182
|
+
Added logging to track initialization progress and identify failure points.
|
|
183
|
+
Preserves original ValueError type for backward compatibility.
|
|
184
|
+
|
|
185
|
+
Raises:
|
|
186
|
+
------
|
|
187
|
+
ValueError: If connection fails or initialization encounters errors
|
|
188
|
+
with detailed troubleshooting information
|
|
189
|
+
|
|
190
|
+
"""
|
|
149
191
|
if self._initialized:
|
|
150
192
|
return
|
|
151
193
|
|
|
194
|
+
import logging
|
|
195
|
+
|
|
196
|
+
logger = logging.getLogger(__name__)
|
|
197
|
+
|
|
152
198
|
try:
|
|
153
199
|
# Test connection first
|
|
154
|
-
|
|
155
|
-
|
|
200
|
+
logger.info(
|
|
201
|
+
f"Testing Linear API connection for team {self.team_key or self.team_id}..."
|
|
202
|
+
)
|
|
203
|
+
connection_ok = await self.client.test_connection()
|
|
204
|
+
|
|
205
|
+
if not connection_ok:
|
|
206
|
+
raise ValueError(
|
|
207
|
+
"Failed to connect to Linear API. Troubleshooting:\n"
|
|
208
|
+
"1. Verify API key is valid (starts with 'lin_api_')\n"
|
|
209
|
+
"2. Check team_key matches your Linear workspace\n"
|
|
210
|
+
"3. Ensure API key has proper permissions\n"
|
|
211
|
+
"4. Review logs for detailed error information\n"
|
|
212
|
+
f" API key preview: {self.api_key[:20] if self.api_key else 'None'}...\n"
|
|
213
|
+
f" Team: {self.team_key or self.team_id}"
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
logger.info("Linear API connection successful")
|
|
156
217
|
|
|
157
218
|
# Load team data and workflow states concurrently
|
|
219
|
+
logger.debug("Loading team data and workflow states...")
|
|
158
220
|
team_id = await self._ensure_team_id()
|
|
159
221
|
|
|
160
222
|
# Load workflow states and labels for the team
|
|
@@ -162,69 +224,330 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
162
224
|
await self._load_team_labels(team_id)
|
|
163
225
|
|
|
164
226
|
self._initialized = True
|
|
227
|
+
logger.info("Linear adapter initialized successfully")
|
|
165
228
|
|
|
229
|
+
except ValueError:
|
|
230
|
+
# Re-raise ValueError with original message (for connection failures)
|
|
231
|
+
raise
|
|
166
232
|
except Exception as e:
|
|
167
|
-
|
|
233
|
+
logger.error(
|
|
234
|
+
f"Linear adapter initialization failed: {type(e).__name__}: {e}",
|
|
235
|
+
exc_info=True,
|
|
236
|
+
)
|
|
237
|
+
raise ValueError(
|
|
238
|
+
f"Failed to initialize Linear adapter: {type(e).__name__}: {e}\n"
|
|
239
|
+
"Check your credentials and network connection."
|
|
240
|
+
) from e
|
|
168
241
|
|
|
169
242
|
async def _ensure_team_id(self) -> str:
|
|
170
243
|
"""Ensure we have a team ID, resolving from team_key if needed.
|
|
171
244
|
|
|
245
|
+
Validates that team_id is a UUID. If it looks like a team_key,
|
|
246
|
+
resolves it to the actual UUID.
|
|
247
|
+
|
|
172
248
|
Returns:
|
|
173
|
-
|
|
249
|
+
-------
|
|
250
|
+
Valid Linear team UUID
|
|
174
251
|
|
|
175
252
|
Raises:
|
|
176
|
-
|
|
253
|
+
------
|
|
254
|
+
ValueError: If neither team_id nor team_key provided, or resolution fails
|
|
177
255
|
|
|
178
256
|
"""
|
|
257
|
+
logger = logging.getLogger(__name__)
|
|
258
|
+
|
|
259
|
+
# If we have a team_id, validate it's actually a UUID
|
|
179
260
|
if self.team_id:
|
|
180
|
-
|
|
261
|
+
# Check if it looks like a UUID (36 chars with hyphens)
|
|
262
|
+
import re
|
|
263
|
+
|
|
264
|
+
uuid_pattern = re.compile(
|
|
265
|
+
r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$",
|
|
266
|
+
re.IGNORECASE,
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
if uuid_pattern.match(self.team_id):
|
|
270
|
+
# Already a valid UUID
|
|
271
|
+
return str(self.team_id)
|
|
272
|
+
# Looks like a team_key string - need to resolve it
|
|
273
|
+
logger.warning(
|
|
274
|
+
f"team_id '{self.team_id}' is not a UUID - treating as team_key and resolving"
|
|
275
|
+
)
|
|
276
|
+
teams = await self._get_team_by_key(self.team_id)
|
|
277
|
+
if teams and len(teams) > 0:
|
|
278
|
+
resolved_id = teams[0]["id"]
|
|
279
|
+
logger.info(
|
|
280
|
+
f"Resolved team_key '{self.team_id}' to UUID: {resolved_id}"
|
|
281
|
+
)
|
|
282
|
+
# Cache the resolved UUID
|
|
283
|
+
self.team_id = resolved_id
|
|
284
|
+
return resolved_id
|
|
285
|
+
raise ValueError(
|
|
286
|
+
f"Cannot resolve team_id '{self.team_id}' to a valid Linear team UUID. "
|
|
287
|
+
f"Please use team_key instead for team short codes like 'ENG'."
|
|
288
|
+
)
|
|
181
289
|
|
|
290
|
+
# No team_id, must have team_key
|
|
182
291
|
if not self.team_key:
|
|
183
|
-
raise ValueError(
|
|
292
|
+
raise ValueError(
|
|
293
|
+
"Either team_id (UUID) or team_key (short code) must be provided"
|
|
294
|
+
)
|
|
184
295
|
|
|
185
296
|
# Query team by key
|
|
297
|
+
teams = await self._get_team_by_key(self.team_key)
|
|
298
|
+
|
|
299
|
+
if not teams or len(teams) == 0:
|
|
300
|
+
raise ValueError(f"Team with key '{self.team_key}' not found")
|
|
301
|
+
|
|
302
|
+
team = teams[0]
|
|
303
|
+
team_id = team["id"]
|
|
304
|
+
|
|
305
|
+
# Cache the resolved team_id
|
|
306
|
+
self.team_id = team_id
|
|
307
|
+
self._team_data = team
|
|
308
|
+
logger.info(f"Resolved team_key '{self.team_key}' to team_id: {team_id}")
|
|
309
|
+
|
|
310
|
+
return team_id
|
|
311
|
+
|
|
312
|
+
async def _get_team_by_key(self, team_key: str) -> list[dict[str, Any]]:
|
|
313
|
+
"""Query Linear API to get team by key.
|
|
314
|
+
|
|
315
|
+
Args:
|
|
316
|
+
----
|
|
317
|
+
team_key: Short team identifier (e.g., 'ENG', 'BTA')
|
|
318
|
+
|
|
319
|
+
Returns:
|
|
320
|
+
-------
|
|
321
|
+
List of matching teams
|
|
322
|
+
|
|
323
|
+
"""
|
|
186
324
|
query = """
|
|
187
325
|
query GetTeamByKey($key: String!) {
|
|
188
326
|
teams(filter: { key: { eq: $key } }) {
|
|
189
327
|
nodes {
|
|
190
328
|
id
|
|
191
|
-
name
|
|
192
329
|
key
|
|
193
|
-
|
|
330
|
+
name
|
|
194
331
|
}
|
|
195
332
|
}
|
|
196
333
|
}
|
|
197
334
|
"""
|
|
198
335
|
|
|
336
|
+
result = await self.client.execute_query(query, {"key": team_key})
|
|
337
|
+
|
|
338
|
+
if "teams" in result and "nodes" in result["teams"]:
|
|
339
|
+
return result["teams"]["nodes"]
|
|
340
|
+
|
|
341
|
+
return []
|
|
342
|
+
|
|
343
|
+
async def _get_custom_view(self, view_id: str) -> dict[str, Any] | None:
|
|
344
|
+
"""Get a Linear custom view by ID to check if it exists.
|
|
345
|
+
|
|
346
|
+
Args:
|
|
347
|
+
----
|
|
348
|
+
view_id: View identifier (slug-uuid format)
|
|
349
|
+
|
|
350
|
+
Returns:
|
|
351
|
+
-------
|
|
352
|
+
View dict with fields (id, name, description, issues) or None if not found
|
|
353
|
+
|
|
354
|
+
"""
|
|
355
|
+
logging.debug(f"[VIEW DEBUG] _get_custom_view called with view_id: {view_id}")
|
|
356
|
+
|
|
357
|
+
if not view_id:
|
|
358
|
+
logging.debug("[VIEW DEBUG] view_id is empty, returning None")
|
|
359
|
+
return None
|
|
360
|
+
|
|
199
361
|
try:
|
|
200
|
-
|
|
201
|
-
|
|
362
|
+
logging.debug(
|
|
363
|
+
f"[VIEW DEBUG] Executing GET_CUSTOM_VIEW_QUERY for view_id: {view_id}"
|
|
364
|
+
)
|
|
365
|
+
result = await self.client.execute_query(
|
|
366
|
+
GET_CUSTOM_VIEW_QUERY, {"viewId": view_id, "first": 10}
|
|
367
|
+
)
|
|
368
|
+
logging.debug(f"[VIEW DEBUG] Query result: {result}")
|
|
369
|
+
|
|
370
|
+
if result.get("customView"):
|
|
371
|
+
logging.debug(
|
|
372
|
+
f"[VIEW DEBUG] customView found in result: {result.get('customView')}"
|
|
373
|
+
)
|
|
374
|
+
return result["customView"]
|
|
202
375
|
|
|
203
|
-
|
|
204
|
-
|
|
376
|
+
logging.debug(
|
|
377
|
+
f"[VIEW DEBUG] No customView in result. Checking pattern: has_hyphen={'-' in view_id}, length={len(view_id)}"
|
|
378
|
+
)
|
|
205
379
|
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
380
|
+
# API query failed but check if this looks like a view identifier
|
|
381
|
+
# View IDs from URLs have format: slug-uuid (e.g., "mcp-skills-issues-0d0359fabcf9")
|
|
382
|
+
# If it has hyphens and is longer than 12 chars, it's likely a view URL identifier
|
|
383
|
+
if "-" in view_id and len(view_id) > 12:
|
|
384
|
+
logging.debug(
|
|
385
|
+
"[VIEW DEBUG] Pattern matched! Returning minimal view object"
|
|
386
|
+
)
|
|
387
|
+
# Return minimal view object to trigger helpful error message
|
|
388
|
+
# We can't fetch the actual name, so use generic "Linear View"
|
|
389
|
+
return {
|
|
390
|
+
"id": view_id,
|
|
391
|
+
"name": "Linear View",
|
|
392
|
+
"issues": {"nodes": [], "pageInfo": {"hasNextPage": False}},
|
|
393
|
+
}
|
|
209
394
|
|
|
210
|
-
|
|
395
|
+
logging.debug("[VIEW DEBUG] Pattern did not match, returning None")
|
|
396
|
+
return None
|
|
211
397
|
|
|
212
398
|
except Exception as e:
|
|
213
|
-
|
|
399
|
+
logging.debug(
|
|
400
|
+
f"[VIEW DEBUG] Exception caught: {type(e).__name__}: {str(e)}"
|
|
401
|
+
)
|
|
402
|
+
# Linear returns error if view not found
|
|
403
|
+
# Check if this looks like a view identifier to provide helpful error
|
|
404
|
+
if "-" in view_id and len(view_id) > 12:
|
|
405
|
+
logging.debug(
|
|
406
|
+
"[VIEW DEBUG] Exception handler: Pattern matched! Returning minimal view object"
|
|
407
|
+
)
|
|
408
|
+
# Return minimal view object to trigger helpful error message
|
|
409
|
+
return {
|
|
410
|
+
"id": view_id,
|
|
411
|
+
"name": "Linear View",
|
|
412
|
+
"issues": {"nodes": [], "pageInfo": {"hasNextPage": False}},
|
|
413
|
+
}
|
|
414
|
+
logging.debug(
|
|
415
|
+
"[VIEW DEBUG] Exception handler: Pattern did not match, returning None"
|
|
416
|
+
)
|
|
417
|
+
return None
|
|
418
|
+
|
|
419
|
+
async def get_project(self, project_id: str) -> dict[str, Any] | None:
|
|
420
|
+
"""Get a Linear project by ID using direct query.
|
|
421
|
+
|
|
422
|
+
This method uses Linear's direct project(id:) GraphQL query for efficient lookups.
|
|
423
|
+
Supports UUID, slugId, or short ID formats.
|
|
424
|
+
|
|
425
|
+
Args:
|
|
426
|
+
----
|
|
427
|
+
project_id: Project UUID, slugId, or short ID
|
|
428
|
+
|
|
429
|
+
Returns:
|
|
430
|
+
-------
|
|
431
|
+
Project dict with fields (id, name, description, state, etc.) or None if not found
|
|
432
|
+
|
|
433
|
+
Examples:
|
|
434
|
+
--------
|
|
435
|
+
- "a1b2c3d4-e5f6-7890-abcd-ef1234567890" (UUID)
|
|
436
|
+
- "crm-smart-monitoring-system-f59a41a96c52" (slugId)
|
|
437
|
+
- "6cf55cfcfad4" (short ID - 12 hex chars)
|
|
438
|
+
|
|
439
|
+
"""
|
|
440
|
+
if not project_id:
|
|
441
|
+
return None
|
|
442
|
+
|
|
443
|
+
# Direct query using Linear's project(id:) endpoint
|
|
444
|
+
query = """
|
|
445
|
+
query GetProject($id: String!) {
|
|
446
|
+
project(id: $id) {
|
|
447
|
+
id
|
|
448
|
+
name
|
|
449
|
+
description
|
|
450
|
+
state
|
|
451
|
+
slugId
|
|
452
|
+
createdAt
|
|
453
|
+
updatedAt
|
|
454
|
+
url
|
|
455
|
+
icon
|
|
456
|
+
color
|
|
457
|
+
targetDate
|
|
458
|
+
startedAt
|
|
459
|
+
completedAt
|
|
460
|
+
teams {
|
|
461
|
+
nodes {
|
|
462
|
+
id
|
|
463
|
+
name
|
|
464
|
+
key
|
|
465
|
+
description
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
"""
|
|
471
|
+
|
|
472
|
+
try:
|
|
473
|
+
result = await self.client.execute_query(query, {"id": project_id})
|
|
474
|
+
|
|
475
|
+
if result.get("project"):
|
|
476
|
+
return result["project"]
|
|
477
|
+
|
|
478
|
+
# No match found
|
|
479
|
+
return None
|
|
480
|
+
|
|
481
|
+
except Exception:
|
|
482
|
+
# Linear returns error if project not found - return None instead of raising
|
|
483
|
+
return None
|
|
484
|
+
|
|
485
|
+
async def get_epic(self, epic_id: str, include_issues: bool = True) -> Epic | None:
|
|
486
|
+
"""Get Linear project as Epic with optional issue loading.
|
|
487
|
+
|
|
488
|
+
This is the preferred method for reading projects/epics as it provides
|
|
489
|
+
explicit control over whether to load child issues.
|
|
490
|
+
|
|
491
|
+
Args:
|
|
492
|
+
----
|
|
493
|
+
epic_id: Project UUID, slugId, or short ID
|
|
494
|
+
include_issues: Whether to fetch and populate child_issues (default True)
|
|
495
|
+
|
|
496
|
+
Returns:
|
|
497
|
+
-------
|
|
498
|
+
Epic object with child_issues populated if include_issues=True,
|
|
499
|
+
or None if project not found
|
|
500
|
+
|
|
501
|
+
Raises:
|
|
502
|
+
------
|
|
503
|
+
ValueError: If credentials invalid
|
|
504
|
+
|
|
505
|
+
Example:
|
|
506
|
+
-------
|
|
507
|
+
# Get project with issues
|
|
508
|
+
epic = await adapter.get_epic("c0e6db5a-03b6-479f-8796-5070b8fb7895")
|
|
509
|
+
|
|
510
|
+
# Get project metadata only (faster)
|
|
511
|
+
epic = await adapter.get_epic("c0e6db5a-03b6-479f-8796-5070b8fb7895", include_issues=False)
|
|
512
|
+
|
|
513
|
+
"""
|
|
514
|
+
# Validate credentials
|
|
515
|
+
is_valid, error_message = self.validate_credentials()
|
|
516
|
+
if not is_valid:
|
|
517
|
+
raise ValueError(error_message)
|
|
518
|
+
|
|
519
|
+
# Fetch project data
|
|
520
|
+
project_data = await self.get_project(epic_id)
|
|
521
|
+
if not project_data:
|
|
522
|
+
return None
|
|
523
|
+
|
|
524
|
+
# Map to Epic
|
|
525
|
+
epic = map_linear_project_to_epic(project_data)
|
|
526
|
+
|
|
527
|
+
# Optionally fetch and populate child issues
|
|
528
|
+
if include_issues:
|
|
529
|
+
issues = await self._get_project_issues(epic_id)
|
|
530
|
+
epic.child_issues = [issue.id for issue in issues if issue.id is not None]
|
|
531
|
+
|
|
532
|
+
return epic
|
|
214
533
|
|
|
215
534
|
async def _resolve_project_id(self, project_identifier: str) -> str | None:
|
|
216
535
|
"""Resolve project identifier (slug, name, short ID, or URL) to full UUID.
|
|
217
536
|
|
|
218
537
|
Args:
|
|
538
|
+
----
|
|
219
539
|
project_identifier: Project slug, name, short ID, or URL
|
|
220
540
|
|
|
221
541
|
Returns:
|
|
542
|
+
-------
|
|
222
543
|
Full Linear project UUID, or None if not found
|
|
223
544
|
|
|
224
545
|
Raises:
|
|
546
|
+
------
|
|
225
547
|
ValueError: If project lookup fails
|
|
226
548
|
|
|
227
549
|
Examples:
|
|
550
|
+
--------
|
|
228
551
|
- "crm-smart-monitoring-system" (slug)
|
|
229
552
|
- "CRM Smart Monitoring System" (name)
|
|
230
553
|
- "f59a41a96c52" (short ID from URL)
|
|
@@ -234,55 +557,120 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
234
557
|
if not project_identifier:
|
|
235
558
|
return None
|
|
236
559
|
|
|
237
|
-
#
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
560
|
+
# Use tested URL parser to normalize the identifier
|
|
561
|
+
# This correctly extracts project IDs from URLs and handles:
|
|
562
|
+
# - Full URLs: https://linear.app/team/project/slug-id/overview
|
|
563
|
+
# - Slug-ID format: slug-id
|
|
564
|
+
# - Plain identifiers: id
|
|
565
|
+
try:
|
|
566
|
+
project_identifier = normalize_project_id(
|
|
567
|
+
project_identifier, adapter_type="linear"
|
|
568
|
+
)
|
|
569
|
+
except URLParserError as e:
|
|
570
|
+
logging.getLogger(__name__).warning(
|
|
571
|
+
f"Failed to parse project identifier: {e}"
|
|
572
|
+
)
|
|
573
|
+
# Continue with original identifier - may still work if it's a name
|
|
249
574
|
|
|
250
575
|
# If it looks like a full UUID already (exactly 36 chars with exactly 4 dashes), return it
|
|
251
576
|
# UUID format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
|
|
252
577
|
if len(project_identifier) == 36 and project_identifier.count("-") == 4:
|
|
253
578
|
return project_identifier
|
|
254
579
|
|
|
255
|
-
#
|
|
580
|
+
# OPTIMIZATION: Try direct query first if it looks like a UUID, slugId, or short ID
|
|
581
|
+
# This is more efficient than listing all projects
|
|
582
|
+
should_try_direct_query = False
|
|
583
|
+
|
|
584
|
+
# Check if it looks like a short ID (exactly 12 hex characters)
|
|
585
|
+
if len(project_identifier) == 12 and all(
|
|
586
|
+
c in "0123456789abcdefABCDEF" for c in project_identifier
|
|
587
|
+
):
|
|
588
|
+
should_try_direct_query = True
|
|
589
|
+
|
|
590
|
+
# Check if it looks like a slugId format (contains dashes and ends with 12 hex chars)
|
|
591
|
+
if "-" in project_identifier:
|
|
592
|
+
parts = project_identifier.rsplit("-", 1)
|
|
593
|
+
if len(parts) > 1:
|
|
594
|
+
potential_short_id = parts[1]
|
|
595
|
+
if len(potential_short_id) == 12 and all(
|
|
596
|
+
c in "0123456789abcdefABCDEF" for c in potential_short_id
|
|
597
|
+
):
|
|
598
|
+
should_try_direct_query = True
|
|
599
|
+
|
|
600
|
+
# Try direct query first if identifier format suggests it might work
|
|
601
|
+
if should_try_direct_query:
|
|
602
|
+
try:
|
|
603
|
+
project = await self.get_project(project_identifier)
|
|
604
|
+
if project:
|
|
605
|
+
return project["id"]
|
|
606
|
+
except Exception as e:
|
|
607
|
+
# Direct query failed - fall through to list-based search
|
|
608
|
+
logging.getLogger(__name__).debug(
|
|
609
|
+
f"Direct project query failed for '{project_identifier}': {e}. "
|
|
610
|
+
f"Falling back to listing all projects."
|
|
611
|
+
)
|
|
612
|
+
|
|
613
|
+
# FALLBACK: Query all projects with pagination support
|
|
614
|
+
# This is less efficient but handles name-based lookups and edge cases
|
|
256
615
|
query = """
|
|
257
|
-
query GetProjects {
|
|
258
|
-
projects(first:
|
|
616
|
+
query GetProjects($first: Int!, $after: String) {
|
|
617
|
+
projects(first: $first, after: $after) {
|
|
259
618
|
nodes {
|
|
260
619
|
id
|
|
261
620
|
name
|
|
262
621
|
slugId
|
|
263
622
|
}
|
|
623
|
+
pageInfo {
|
|
624
|
+
hasNextPage
|
|
625
|
+
endCursor
|
|
626
|
+
}
|
|
264
627
|
}
|
|
265
628
|
}
|
|
266
629
|
"""
|
|
267
630
|
|
|
268
631
|
try:
|
|
269
|
-
|
|
270
|
-
|
|
632
|
+
# Fetch all projects across multiple pages
|
|
633
|
+
all_projects = []
|
|
634
|
+
has_next_page = True
|
|
635
|
+
after_cursor = None
|
|
636
|
+
|
|
637
|
+
while has_next_page:
|
|
638
|
+
variables = {"first": 100}
|
|
639
|
+
if after_cursor:
|
|
640
|
+
variables["after"] = after_cursor
|
|
641
|
+
|
|
642
|
+
result = await self.client.execute_query(query, variables)
|
|
643
|
+
projects_data = result.get("projects", {})
|
|
644
|
+
page_projects = projects_data.get("nodes", [])
|
|
645
|
+
page_info = projects_data.get("pageInfo", {})
|
|
646
|
+
|
|
647
|
+
all_projects.extend(page_projects)
|
|
648
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
649
|
+
after_cursor = page_info.get("endCursor")
|
|
271
650
|
|
|
272
651
|
# Search for match by slug, slugId, name (case-insensitive)
|
|
273
652
|
project_lower = project_identifier.lower()
|
|
274
|
-
for project in
|
|
653
|
+
for project in all_projects:
|
|
275
654
|
# Check if identifier matches slug pattern (extracted from slugId)
|
|
276
655
|
slug_id = project.get("slugId", "")
|
|
277
656
|
if slug_id:
|
|
278
657
|
# slugId format: "crm-smart-monitoring-system-f59a41a96c52"
|
|
658
|
+
# Linear short IDs are always exactly 12 hexadecimal characters
|
|
279
659
|
# Extract both the slug part and short ID
|
|
280
660
|
if "-" in slug_id:
|
|
281
|
-
parts = slug_id.rsplit(
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
661
|
+
parts = slug_id.rsplit("-", 1)
|
|
662
|
+
potential_short_id = parts[1] if len(parts) > 1 else ""
|
|
663
|
+
|
|
664
|
+
# Validate it's exactly 12 hex characters
|
|
665
|
+
if len(potential_short_id) == 12 and all(
|
|
666
|
+
c in "0123456789abcdefABCDEF" for c in potential_short_id
|
|
667
|
+
):
|
|
668
|
+
slug_part = parts[0]
|
|
669
|
+
short_id = potential_short_id
|
|
670
|
+
else:
|
|
671
|
+
# Fallback: treat entire slugId as slug if last part isn't valid
|
|
672
|
+
slug_part = slug_id
|
|
673
|
+
short_id = ""
|
|
286
674
|
|
|
287
675
|
# Match full slugId, slug part, or short ID
|
|
288
676
|
if (
|
|
@@ -300,12 +688,204 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
300
688
|
return None
|
|
301
689
|
|
|
302
690
|
except Exception as e:
|
|
303
|
-
raise ValueError(
|
|
691
|
+
raise ValueError(
|
|
692
|
+
f"Failed to resolve project '{project_identifier}': {e}"
|
|
693
|
+
) from e
|
|
694
|
+
|
|
695
|
+
async def _validate_project_team_association(
|
|
696
|
+
self, project_id: str, team_id: str
|
|
697
|
+
) -> tuple[bool, list[str]]:
|
|
698
|
+
"""Check if team is associated with project.
|
|
699
|
+
|
|
700
|
+
Args:
|
|
701
|
+
----
|
|
702
|
+
project_id: Linear project UUID
|
|
703
|
+
team_id: Linear team UUID
|
|
704
|
+
|
|
705
|
+
Returns:
|
|
706
|
+
-------
|
|
707
|
+
Tuple of (is_associated, list_of_project_team_ids)
|
|
708
|
+
|
|
709
|
+
"""
|
|
710
|
+
project = await self.get_project(project_id)
|
|
711
|
+
if not project:
|
|
712
|
+
return False, []
|
|
713
|
+
|
|
714
|
+
# Extract team IDs from project's teams
|
|
715
|
+
project_team_ids = [
|
|
716
|
+
team["id"] for team in project.get("teams", {}).get("nodes", [])
|
|
717
|
+
]
|
|
718
|
+
|
|
719
|
+
return team_id in project_team_ids, project_team_ids
|
|
720
|
+
|
|
721
|
+
async def _ensure_team_in_project(self, project_id: str, team_id: str) -> bool:
|
|
722
|
+
"""Add team to project if not already associated.
|
|
723
|
+
|
|
724
|
+
Args:
|
|
725
|
+
----
|
|
726
|
+
project_id: Linear project UUID
|
|
727
|
+
team_id: Linear team UUID to add
|
|
728
|
+
|
|
729
|
+
Returns:
|
|
730
|
+
-------
|
|
731
|
+
True if successful, False otherwise
|
|
732
|
+
|
|
733
|
+
"""
|
|
734
|
+
# First check current association
|
|
735
|
+
is_associated, existing_team_ids = (
|
|
736
|
+
await self._validate_project_team_association(project_id, team_id)
|
|
737
|
+
)
|
|
738
|
+
|
|
739
|
+
if is_associated:
|
|
740
|
+
return True # Already associated, nothing to do
|
|
741
|
+
|
|
742
|
+
# Add team to project by updating project's teamIds
|
|
743
|
+
update_query = """
|
|
744
|
+
mutation UpdateProject($id: String!, $input: ProjectUpdateInput!) {
|
|
745
|
+
projectUpdate(id: $id, input: $input) {
|
|
746
|
+
success
|
|
747
|
+
project {
|
|
748
|
+
id
|
|
749
|
+
teams {
|
|
750
|
+
nodes {
|
|
751
|
+
id
|
|
752
|
+
name
|
|
753
|
+
}
|
|
754
|
+
}
|
|
755
|
+
}
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
"""
|
|
759
|
+
|
|
760
|
+
# Include existing teams + new team
|
|
761
|
+
all_team_ids = existing_team_ids + [team_id]
|
|
762
|
+
|
|
763
|
+
try:
|
|
764
|
+
result = await self.client.execute_mutation(
|
|
765
|
+
update_query, {"id": project_id, "input": {"teamIds": all_team_ids}}
|
|
766
|
+
)
|
|
767
|
+
success = result.get("projectUpdate", {}).get("success", False)
|
|
768
|
+
|
|
769
|
+
if success:
|
|
770
|
+
logging.getLogger(__name__).info(
|
|
771
|
+
f"Successfully added team {team_id} to project {project_id}"
|
|
772
|
+
)
|
|
773
|
+
else:
|
|
774
|
+
logging.getLogger(__name__).warning(
|
|
775
|
+
f"Failed to add team {team_id} to project {project_id}"
|
|
776
|
+
)
|
|
777
|
+
|
|
778
|
+
return success
|
|
779
|
+
except Exception as e:
|
|
780
|
+
logging.getLogger(__name__).error(
|
|
781
|
+
f"Error adding team {team_id} to project {project_id}: {e}"
|
|
782
|
+
)
|
|
783
|
+
return False
|
|
784
|
+
|
|
785
|
+
async def _get_project_issues(
|
|
786
|
+
self, project_id: str, limit: int = 100
|
|
787
|
+
) -> list[Task]:
|
|
788
|
+
"""Fetch all issues belonging to a Linear project.
|
|
789
|
+
|
|
790
|
+
Uses existing build_issue_filter() and LIST_ISSUES_QUERY infrastructure
|
|
791
|
+
to fetch issues filtered by project_id.
|
|
792
|
+
|
|
793
|
+
Args:
|
|
794
|
+
----
|
|
795
|
+
project_id: Project UUID, slugId, or short ID
|
|
796
|
+
limit: Maximum issues to return (default 100, max 250)
|
|
797
|
+
|
|
798
|
+
Returns:
|
|
799
|
+
-------
|
|
800
|
+
List of Task objects representing project's issues
|
|
801
|
+
|
|
802
|
+
Raises:
|
|
803
|
+
------
|
|
804
|
+
ValueError: If credentials invalid or query fails
|
|
805
|
+
|
|
806
|
+
"""
|
|
807
|
+
logger = logging.getLogger(__name__)
|
|
808
|
+
|
|
809
|
+
# Build filter for issues belonging to this project
|
|
810
|
+
issue_filter = build_issue_filter(project_id=project_id)
|
|
811
|
+
|
|
812
|
+
variables = {
|
|
813
|
+
"filter": issue_filter,
|
|
814
|
+
"first": min(limit, 250), # Linear API max per page
|
|
815
|
+
}
|
|
816
|
+
|
|
817
|
+
try:
|
|
818
|
+
result = await self.client.execute_query(LIST_ISSUES_QUERY, variables)
|
|
819
|
+
issues = result.get("issues", {}).get("nodes", [])
|
|
820
|
+
|
|
821
|
+
# Map Linear issues to Task objects
|
|
822
|
+
return [map_linear_issue_to_task(issue) for issue in issues]
|
|
823
|
+
|
|
824
|
+
except Exception as e:
|
|
825
|
+
# Log but don't fail - return empty list if issues can't be fetched
|
|
826
|
+
logger.warning(f"Failed to fetch project issues for {project_id}: {e}")
|
|
827
|
+
return []
|
|
828
|
+
|
|
829
|
+
async def _resolve_issue_id(self, issue_identifier: str) -> str | None:
|
|
830
|
+
"""Resolve issue identifier (like "ENG-842") to full UUID.
|
|
831
|
+
|
|
832
|
+
Args:
|
|
833
|
+
----
|
|
834
|
+
issue_identifier: Issue identifier (e.g., "ENG-842") or UUID
|
|
835
|
+
|
|
836
|
+
Returns:
|
|
837
|
+
-------
|
|
838
|
+
Full Linear issue UUID, or None if not found
|
|
839
|
+
|
|
840
|
+
Raises:
|
|
841
|
+
------
|
|
842
|
+
ValueError: If issue lookup fails
|
|
843
|
+
|
|
844
|
+
Examples:
|
|
845
|
+
--------
|
|
846
|
+
- "ENG-842" (issue identifier)
|
|
847
|
+
- "BTA-123" (issue identifier)
|
|
848
|
+
- "a1b2c3d4-e5f6-7890-abcd-ef1234567890" (already a UUID)
|
|
849
|
+
|
|
850
|
+
"""
|
|
851
|
+
if not issue_identifier:
|
|
852
|
+
return None
|
|
853
|
+
|
|
854
|
+
# If it looks like a full UUID already (exactly 36 chars with exactly 4 dashes), return it
|
|
855
|
+
# UUID format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
|
|
856
|
+
if len(issue_identifier) == 36 and issue_identifier.count("-") == 4:
|
|
857
|
+
return issue_identifier
|
|
858
|
+
|
|
859
|
+
# Query issue by identifier to get its UUID
|
|
860
|
+
query = """
|
|
861
|
+
query GetIssueId($identifier: String!) {
|
|
862
|
+
issue(id: $identifier) {
|
|
863
|
+
id
|
|
864
|
+
}
|
|
865
|
+
}
|
|
866
|
+
"""
|
|
867
|
+
|
|
868
|
+
try:
|
|
869
|
+
result = await self.client.execute_query(
|
|
870
|
+
query, {"identifier": issue_identifier}
|
|
871
|
+
)
|
|
872
|
+
|
|
873
|
+
if result.get("issue"):
|
|
874
|
+
return result["issue"]["id"]
|
|
875
|
+
|
|
876
|
+
# No match found
|
|
877
|
+
return None
|
|
878
|
+
|
|
879
|
+
except Exception as e:
|
|
880
|
+
raise ValueError(
|
|
881
|
+
f"Failed to resolve issue '{issue_identifier}': {e}"
|
|
882
|
+
) from e
|
|
304
883
|
|
|
305
884
|
async def _load_workflow_states(self, team_id: str) -> None:
|
|
306
885
|
"""Load and cache workflow states for the team.
|
|
307
886
|
|
|
308
887
|
Args:
|
|
888
|
+
----
|
|
309
889
|
team_id: Linear team ID
|
|
310
890
|
|
|
311
891
|
"""
|
|
@@ -325,12 +905,13 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
325
905
|
self._workflow_states = workflow_states
|
|
326
906
|
|
|
327
907
|
except Exception as e:
|
|
328
|
-
raise ValueError(f"Failed to load workflow states: {e}")
|
|
908
|
+
raise ValueError(f"Failed to load workflow states: {e}") from e
|
|
329
909
|
|
|
330
910
|
async def _load_team_labels(self, team_id: str) -> None:
|
|
331
911
|
"""Load and cache labels for the team with retry logic.
|
|
332
912
|
|
|
333
913
|
Args:
|
|
914
|
+
----
|
|
334
915
|
team_id: Linear team ID
|
|
335
916
|
|
|
336
917
|
"""
|
|
@@ -375,69 +956,342 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
375
956
|
)
|
|
376
957
|
self._labels_cache = [] # Explicitly empty on failure
|
|
377
958
|
|
|
378
|
-
async def
|
|
379
|
-
|
|
959
|
+
async def _find_label_by_name(
|
|
960
|
+
self, name: str, team_id: str, max_retries: int = 3
|
|
961
|
+
) -> dict | None:
|
|
962
|
+
"""Find a label by name using Linear API (server-side check) with retry logic.
|
|
963
|
+
|
|
964
|
+
Handles cache staleness by checking Linear's server-side state.
|
|
965
|
+
This method is used when cache lookup misses to prevent duplicate
|
|
966
|
+
label creation attempts.
|
|
967
|
+
|
|
968
|
+
Implements retry logic with exponential backoff to handle transient
|
|
969
|
+
network failures and distinguish between "label not found" (None) and
|
|
970
|
+
"check failed" (exception).
|
|
380
971
|
|
|
381
972
|
Args:
|
|
382
|
-
|
|
973
|
+
----
|
|
974
|
+
name: Label name to search for (case-insensitive)
|
|
975
|
+
team_id: Linear team ID
|
|
976
|
+
max_retries: Maximum retry attempts for transient failures (default: 3)
|
|
383
977
|
|
|
384
978
|
Returns:
|
|
385
|
-
|
|
979
|
+
-------
|
|
980
|
+
dict: Label data if found (with id, name, color, description)
|
|
981
|
+
None: Label definitively doesn't exist (checked successfully)
|
|
982
|
+
|
|
983
|
+
Raises:
|
|
984
|
+
------
|
|
985
|
+
Exception: Unable to check label existence after retries exhausted
|
|
986
|
+
(network/API failure). Caller must handle to prevent
|
|
987
|
+
duplicate label creation.
|
|
988
|
+
|
|
989
|
+
Note:
|
|
990
|
+
----
|
|
991
|
+
This method queries Linear's API and returns the first 250 labels.
|
|
992
|
+
For teams with >250 labels, pagination would be needed (future enhancement).
|
|
993
|
+
|
|
994
|
+
Related:
|
|
995
|
+
-------
|
|
996
|
+
1M-443: Fix duplicate label error when setting existing labels
|
|
997
|
+
1M-443 hotfix: Add retry logic to prevent ambiguous error handling
|
|
386
998
|
|
|
387
999
|
"""
|
|
388
1000
|
logger = logging.getLogger(__name__)
|
|
389
1001
|
|
|
390
|
-
|
|
391
|
-
|
|
1002
|
+
query = """
|
|
1003
|
+
query GetTeamLabels($teamId: String!) {
|
|
1004
|
+
team(id: $teamId) {
|
|
1005
|
+
labels(first: 250) {
|
|
1006
|
+
nodes {
|
|
1007
|
+
id
|
|
1008
|
+
name
|
|
1009
|
+
color
|
|
1010
|
+
description
|
|
1011
|
+
}
|
|
1012
|
+
}
|
|
1013
|
+
}
|
|
1014
|
+
}
|
|
1015
|
+
"""
|
|
1016
|
+
|
|
1017
|
+
for attempt in range(max_retries):
|
|
1018
|
+
try:
|
|
1019
|
+
result = await self.client.execute_query(query, {"teamId": team_id})
|
|
1020
|
+
labels = result.get("team", {}).get("labels", {}).get("nodes", [])
|
|
1021
|
+
|
|
1022
|
+
# Case-insensitive search
|
|
1023
|
+
name_lower = name.lower()
|
|
1024
|
+
for label in labels:
|
|
1025
|
+
if label["name"].lower() == name_lower:
|
|
1026
|
+
logger.debug(
|
|
1027
|
+
f"Found label '{name}' via server-side search (ID: {label['id']})"
|
|
1028
|
+
)
|
|
1029
|
+
return label
|
|
1030
|
+
|
|
1031
|
+
# Label definitively doesn't exist (successful check)
|
|
1032
|
+
logger.debug(f"Label '{name}' not found in {len(labels)} team labels")
|
|
1033
|
+
return None
|
|
1034
|
+
|
|
1035
|
+
except Exception as e:
|
|
1036
|
+
if attempt < max_retries - 1:
|
|
1037
|
+
# Transient failure, retry with exponential backoff
|
|
1038
|
+
wait_time = 2**attempt
|
|
1039
|
+
await asyncio.sleep(wait_time)
|
|
1040
|
+
logger.debug(
|
|
1041
|
+
f"Retry {attempt + 1}/{max_retries} for label '{name}' search: {e}"
|
|
1042
|
+
)
|
|
1043
|
+
continue
|
|
1044
|
+
else:
|
|
1045
|
+
# All retries exhausted, propagate exception
|
|
1046
|
+
# CRITICAL: Caller must handle to prevent duplicate creation
|
|
1047
|
+
logger.error(
|
|
1048
|
+
f"Failed to check label '{name}' after {max_retries} attempts: {e}"
|
|
1049
|
+
)
|
|
1050
|
+
raise
|
|
1051
|
+
|
|
1052
|
+
# This should never be reached (all paths return/raise in loop)
|
|
1053
|
+
return None
|
|
1054
|
+
|
|
1055
|
+
async def _create_label(
|
|
1056
|
+
self, name: str, team_id: str, color: str = "#0366d6"
|
|
1057
|
+
) -> str:
|
|
1058
|
+
"""Create a new label in Linear.
|
|
1059
|
+
|
|
1060
|
+
Implements race condition recovery: if creation fails due to duplicate,
|
|
1061
|
+
retry lookup from server (Tier 2) to get the existing label ID.
|
|
1062
|
+
|
|
1063
|
+
Related: 1M-398 - Label duplicate error handling
|
|
1064
|
+
|
|
1065
|
+
Args:
|
|
1066
|
+
----
|
|
1067
|
+
name: Label name
|
|
1068
|
+
team_id: Linear team ID
|
|
1069
|
+
color: Label color (hex format, default: blue)
|
|
1070
|
+
|
|
1071
|
+
Returns:
|
|
1072
|
+
-------
|
|
1073
|
+
str: Label ID (either newly created or existing after recovery)
|
|
1074
|
+
|
|
1075
|
+
Raises:
|
|
1076
|
+
------
|
|
1077
|
+
ValueError: If label creation fails and recovery lookup also fails
|
|
1078
|
+
|
|
1079
|
+
"""
|
|
1080
|
+
logger = logging.getLogger(__name__)
|
|
1081
|
+
|
|
1082
|
+
label_input = {
|
|
1083
|
+
"name": name,
|
|
1084
|
+
"teamId": team_id,
|
|
1085
|
+
"color": color,
|
|
1086
|
+
}
|
|
1087
|
+
|
|
1088
|
+
try:
|
|
1089
|
+
result = await self.client.execute_mutation(
|
|
1090
|
+
CREATE_LABEL_MUTATION, {"input": label_input}
|
|
1091
|
+
)
|
|
1092
|
+
|
|
1093
|
+
if not result["issueLabelCreate"]["success"]:
|
|
1094
|
+
raise ValueError(f"Failed to create label '{name}'")
|
|
1095
|
+
|
|
1096
|
+
created_label = result["issueLabelCreate"]["issueLabel"]
|
|
1097
|
+
label_id = created_label["id"]
|
|
1098
|
+
|
|
1099
|
+
# Update cache with new label
|
|
1100
|
+
if self._labels_cache is not None:
|
|
1101
|
+
self._labels_cache.append(created_label)
|
|
1102
|
+
|
|
1103
|
+
logger.info(f"Created new label '{name}' with ID: {label_id}")
|
|
1104
|
+
return label_id
|
|
1105
|
+
|
|
1106
|
+
except Exception as e:
|
|
1107
|
+
"""
|
|
1108
|
+
Race condition recovery: Another process may have created this label
|
|
1109
|
+
between our Tier 2 lookup and creation attempt.
|
|
1110
|
+
|
|
1111
|
+
Graceful recovery:
|
|
1112
|
+
1. Check if error is duplicate label error
|
|
1113
|
+
2. Retry Tier 2 lookup (query server)
|
|
1114
|
+
3. Return existing label ID if found
|
|
1115
|
+
4. Raise error if recovery fails
|
|
1116
|
+
"""
|
|
1117
|
+
error_str = str(e).lower()
|
|
1118
|
+
|
|
1119
|
+
# Check if this is a duplicate label error
|
|
1120
|
+
if "duplicate" in error_str and "label" in error_str:
|
|
1121
|
+
logger.debug(
|
|
1122
|
+
f"Duplicate label detected for '{name}', attempting recovery lookup"
|
|
1123
|
+
)
|
|
1124
|
+
|
|
1125
|
+
# Retry Tier 2: Query server for existing label
|
|
1126
|
+
server_label = await self._find_label_by_name(name, team_id)
|
|
1127
|
+
|
|
1128
|
+
if server_label:
|
|
1129
|
+
label_id = server_label["id"]
|
|
1130
|
+
|
|
1131
|
+
# Update cache with recovered label
|
|
1132
|
+
if self._labels_cache is not None:
|
|
1133
|
+
self._labels_cache.append(server_label)
|
|
1134
|
+
|
|
1135
|
+
logger.info(
|
|
1136
|
+
f"Successfully recovered from duplicate label error: '{name}' "
|
|
1137
|
+
f"(ID: {label_id})"
|
|
1138
|
+
)
|
|
1139
|
+
return label_id
|
|
1140
|
+
|
|
1141
|
+
# Recovery failed - label exists but we can't retrieve it
|
|
1142
|
+
raise ValueError(
|
|
1143
|
+
f"Label '{name}' already exists but could not retrieve ID. "
|
|
1144
|
+
f"This may indicate a permissions issue or API inconsistency."
|
|
1145
|
+
) from e
|
|
1146
|
+
|
|
1147
|
+
# Not a duplicate error - re-raise original exception
|
|
1148
|
+
logger.error(f"Failed to create label '{name}': {e}")
|
|
1149
|
+
raise ValueError(f"Failed to create label '{name}': {e}") from e
|
|
1150
|
+
|
|
1151
|
+
async def _ensure_labels_exist(self, label_names: list[str]) -> list[str]:
|
|
1152
|
+
"""Ensure labels exist, creating them if necessary.
|
|
1153
|
+
|
|
1154
|
+
This method implements a three-tier label resolution flow to prevent
|
|
1155
|
+
duplicate label creation errors:
|
|
1156
|
+
|
|
1157
|
+
1. **Tier 1 (Cache)**: Check local cache (fast, 0 API calls)
|
|
1158
|
+
2. **Tier 2 (Server)**: Query Linear API for label (handles staleness, +1 API call)
|
|
1159
|
+
3. **Tier 3 (Create)**: Create new label only if truly doesn't exist
|
|
1160
|
+
|
|
1161
|
+
The three-tier approach solves cache staleness issues where labels exist
|
|
1162
|
+
in Linear but not in local cache, preventing "label already exists" errors.
|
|
1163
|
+
|
|
1164
|
+
Behavior (1M-396):
|
|
1165
|
+
- Fail-fast: If any label creation fails, the exception is propagated
|
|
1166
|
+
- All-or-nothing: Partial label updates are not allowed
|
|
1167
|
+
- Clear errors: Callers receive actionable error messages
|
|
1168
|
+
|
|
1169
|
+
Performance:
|
|
1170
|
+
- Cached labels: 0 additional API calls (Tier 1 hit)
|
|
1171
|
+
- New labels: +1 API call for existence check (Tier 2) + 1 for creation (Tier 3)
|
|
1172
|
+
- Trade-off: Accepts +1 API call to prevent duplicate errors
|
|
1173
|
+
|
|
1174
|
+
Args:
|
|
1175
|
+
----
|
|
1176
|
+
label_names: List of label names (strings)
|
|
1177
|
+
|
|
1178
|
+
Returns:
|
|
1179
|
+
-------
|
|
1180
|
+
List of Linear label IDs (UUIDs)
|
|
1181
|
+
|
|
1182
|
+
Raises:
|
|
1183
|
+
------
|
|
1184
|
+
ValueError: If any label creation fails
|
|
1185
|
+
|
|
1186
|
+
Related:
|
|
1187
|
+
-------
|
|
1188
|
+
1M-443: Fix duplicate label error when setting existing labels
|
|
1189
|
+
1M-396: Fail-fast label creation behavior
|
|
1190
|
+
|
|
1191
|
+
"""
|
|
1192
|
+
logger = logging.getLogger(__name__)
|
|
1193
|
+
|
|
1194
|
+
if not label_names:
|
|
1195
|
+
return []
|
|
1196
|
+
|
|
1197
|
+
# Ensure labels are loaded
|
|
1198
|
+
if self._labels_cache is None:
|
|
392
1199
|
team_id = await self._ensure_team_id()
|
|
393
1200
|
await self._load_team_labels(team_id)
|
|
394
1201
|
|
|
395
1202
|
if self._labels_cache is None:
|
|
396
|
-
# Still None after load attempt - should not happen
|
|
397
1203
|
logger.error(
|
|
398
1204
|
"Label cache is None after load attempt. Tags will be skipped."
|
|
399
1205
|
)
|
|
400
1206
|
return []
|
|
401
1207
|
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
logger.warning(
|
|
405
|
-
f"Team has no labels available. Cannot resolve tags: {label_names}"
|
|
406
|
-
)
|
|
407
|
-
return []
|
|
1208
|
+
# Get team ID for creating new labels
|
|
1209
|
+
team_id = await self._ensure_team_id()
|
|
408
1210
|
|
|
409
1211
|
# Create name -> ID mapping (case-insensitive)
|
|
410
|
-
label_map = {
|
|
1212
|
+
label_map = {
|
|
1213
|
+
label["name"].lower(): label["id"] for label in (self._labels_cache or [])
|
|
1214
|
+
}
|
|
411
1215
|
|
|
412
1216
|
logger.debug(f"Available labels in team: {list(label_map.keys())}")
|
|
413
1217
|
|
|
414
|
-
#
|
|
1218
|
+
# Map or create each label
|
|
415
1219
|
label_ids = []
|
|
416
|
-
unmatched_labels = []
|
|
417
|
-
|
|
418
1220
|
for name in label_names:
|
|
419
|
-
|
|
420
|
-
|
|
1221
|
+
name_lower = name.lower()
|
|
1222
|
+
|
|
1223
|
+
# Tier 1: Check cache (fast path, 0 API calls)
|
|
1224
|
+
if name_lower in label_map:
|
|
1225
|
+
label_id = label_map[name_lower]
|
|
421
1226
|
label_ids.append(label_id)
|
|
422
|
-
logger.debug(
|
|
423
|
-
|
|
424
|
-
unmatched_labels.append(name)
|
|
425
|
-
logger.warning(
|
|
426
|
-
f"Label '{name}' not found in team. Available labels: {list(label_map.keys())}"
|
|
1227
|
+
logger.debug(
|
|
1228
|
+
f"[Tier 1] Resolved cached label '{name}' to ID: {label_id}"
|
|
427
1229
|
)
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
1230
|
+
else:
|
|
1231
|
+
# Tier 2: Check server for label (handles cache staleness)
|
|
1232
|
+
try:
|
|
1233
|
+
server_label = await self._find_label_by_name(name, team_id)
|
|
1234
|
+
except Exception as e:
|
|
1235
|
+
# Server check failed after retries (1M-443 hotfix)
|
|
1236
|
+
# CRITICAL: Do NOT proceed to creation to prevent duplicates
|
|
1237
|
+
# Re-raise to signal failure to verify label existence
|
|
1238
|
+
logger.error(
|
|
1239
|
+
f"Unable to verify label '{name}' existence. "
|
|
1240
|
+
f"Cannot safely create to avoid duplicates. Error: {e}"
|
|
1241
|
+
)
|
|
1242
|
+
raise ValueError(
|
|
1243
|
+
f"Unable to verify label '{name}' existence. "
|
|
1244
|
+
f"Cannot safely create to avoid duplicates. Error: {e}"
|
|
1245
|
+
) from e
|
|
1246
|
+
|
|
1247
|
+
if server_label:
|
|
1248
|
+
# Label exists on server but not in cache - update cache
|
|
1249
|
+
label_id = server_label["id"]
|
|
1250
|
+
label_ids.append(label_id)
|
|
1251
|
+
label_map[name_lower] = label_id
|
|
1252
|
+
|
|
1253
|
+
# Update cache to prevent future misses
|
|
1254
|
+
if self._labels_cache is not None:
|
|
1255
|
+
self._labels_cache.append(server_label)
|
|
1256
|
+
|
|
1257
|
+
logger.info(
|
|
1258
|
+
f"[Tier 2] Found stale label '{name}' on server (ID: {label_id}), "
|
|
1259
|
+
"updated cache"
|
|
1260
|
+
)
|
|
1261
|
+
else:
|
|
1262
|
+
# Tier 3: Label truly doesn't exist - create it
|
|
1263
|
+
# Propagate exceptions for fail-fast behavior (1M-396)
|
|
1264
|
+
new_label_id = await self._create_label(name, team_id)
|
|
1265
|
+
label_ids.append(new_label_id)
|
|
1266
|
+
# Update local map for subsequent labels in same call
|
|
1267
|
+
label_map[name_lower] = new_label_id
|
|
1268
|
+
logger.info(
|
|
1269
|
+
f"[Tier 3] Created new label '{name}' with ID: {new_label_id}"
|
|
1270
|
+
)
|
|
434
1271
|
|
|
435
1272
|
return label_ids
|
|
436
1273
|
|
|
1274
|
+
async def _resolve_label_ids(self, label_names: list[str]) -> list[str]:
|
|
1275
|
+
"""Resolve label names to Linear label IDs, creating labels if needed.
|
|
1276
|
+
|
|
1277
|
+
This method wraps _ensure_labels_exist for backward compatibility.
|
|
1278
|
+
|
|
1279
|
+
Args:
|
|
1280
|
+
----
|
|
1281
|
+
label_names: List of label names
|
|
1282
|
+
|
|
1283
|
+
Returns:
|
|
1284
|
+
-------
|
|
1285
|
+
List of Linear label IDs
|
|
1286
|
+
|
|
1287
|
+
"""
|
|
1288
|
+
return await self._ensure_labels_exist(label_names)
|
|
1289
|
+
|
|
437
1290
|
def _get_state_mapping(self) -> dict[TicketState, str]:
|
|
438
1291
|
"""Get mapping from universal states to Linear workflow state IDs.
|
|
439
1292
|
|
|
440
1293
|
Returns:
|
|
1294
|
+
-------
|
|
441
1295
|
Dictionary mapping TicketState to Linear state ID
|
|
442
1296
|
|
|
443
1297
|
"""
|
|
@@ -466,23 +1320,50 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
466
1320
|
return mapping
|
|
467
1321
|
|
|
468
1322
|
async def _get_user_id(self, user_identifier: str) -> str | None:
|
|
469
|
-
"""Get Linear user ID from email or
|
|
1323
|
+
"""Get Linear user ID from email, display name, or user ID.
|
|
470
1324
|
|
|
471
1325
|
Args:
|
|
472
|
-
|
|
1326
|
+
----
|
|
1327
|
+
user_identifier: Email, display name, or user ID
|
|
473
1328
|
|
|
474
1329
|
Returns:
|
|
1330
|
+
-------
|
|
475
1331
|
Linear user ID or None if not found
|
|
476
1332
|
|
|
477
1333
|
"""
|
|
478
|
-
|
|
1334
|
+
if not user_identifier:
|
|
1335
|
+
return None
|
|
1336
|
+
|
|
1337
|
+
# Try email lookup first (most specific)
|
|
479
1338
|
user = await self.client.get_user_by_email(user_identifier)
|
|
480
1339
|
if user:
|
|
481
1340
|
return user["id"]
|
|
482
1341
|
|
|
483
|
-
#
|
|
484
|
-
|
|
485
|
-
|
|
1342
|
+
# Try name search (displayName or full name)
|
|
1343
|
+
users = await self.client.get_users_by_name(user_identifier)
|
|
1344
|
+
if users:
|
|
1345
|
+
if len(users) == 1:
|
|
1346
|
+
# Exact match found
|
|
1347
|
+
return users[0]["id"]
|
|
1348
|
+
else:
|
|
1349
|
+
# Multiple matches - try exact match
|
|
1350
|
+
for u in users:
|
|
1351
|
+
if (
|
|
1352
|
+
u.get("displayName", "").lower() == user_identifier.lower()
|
|
1353
|
+
or u.get("name", "").lower() == user_identifier.lower()
|
|
1354
|
+
):
|
|
1355
|
+
return u["id"]
|
|
1356
|
+
|
|
1357
|
+
# No exact match - log ambiguity and return first
|
|
1358
|
+
logging.getLogger(__name__).warning(
|
|
1359
|
+
f"Multiple users match '{user_identifier}': "
|
|
1360
|
+
f"{[u.get('displayName', u.get('name')) for u in users]}. "
|
|
1361
|
+
f"Using first match: {users[0].get('displayName')}"
|
|
1362
|
+
)
|
|
1363
|
+
return users[0]["id"]
|
|
1364
|
+
|
|
1365
|
+
# Assume it's already a user ID
|
|
1366
|
+
return user_identifier
|
|
486
1367
|
|
|
487
1368
|
# CRUD Operations
|
|
488
1369
|
|
|
@@ -490,12 +1371,15 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
490
1371
|
"""Create a new Linear issue or project with full field support.
|
|
491
1372
|
|
|
492
1373
|
Args:
|
|
1374
|
+
----
|
|
493
1375
|
ticket: Epic or Task to create
|
|
494
1376
|
|
|
495
1377
|
Returns:
|
|
1378
|
+
-------
|
|
496
1379
|
Created ticket with populated ID and metadata
|
|
497
1380
|
|
|
498
1381
|
Raises:
|
|
1382
|
+
------
|
|
499
1383
|
ValueError: If credentials are invalid or creation fails
|
|
500
1384
|
|
|
501
1385
|
"""
|
|
@@ -515,15 +1399,24 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
515
1399
|
return await self._create_task(ticket)
|
|
516
1400
|
|
|
517
1401
|
async def _create_task(self, task: Task) -> Task:
|
|
518
|
-
"""Create a Linear issue from a Task.
|
|
1402
|
+
"""Create a Linear issue or sub-issue from a Task.
|
|
1403
|
+
|
|
1404
|
+
Creates a top-level issue when task.parent_issue is not set, or a
|
|
1405
|
+
sub-issue (child of another issue) when task.parent_issue is provided.
|
|
1406
|
+
In Linear terminology:
|
|
1407
|
+
- Issue: Top-level work item (no parent)
|
|
1408
|
+
- Sub-issue: Child work item (has parent issue)
|
|
519
1409
|
|
|
520
1410
|
Args:
|
|
1411
|
+
----
|
|
521
1412
|
task: Task to create
|
|
522
1413
|
|
|
523
1414
|
Returns:
|
|
1415
|
+
-------
|
|
524
1416
|
Created task with Linear metadata
|
|
525
1417
|
|
|
526
1418
|
"""
|
|
1419
|
+
logger = logging.getLogger(__name__)
|
|
527
1420
|
team_id = await self._ensure_team_id()
|
|
528
1421
|
|
|
529
1422
|
# Build issue input using mapper
|
|
@@ -537,8 +1430,14 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
537
1430
|
issue_input["stateId"] = state_mapping[TicketState.OPEN]
|
|
538
1431
|
|
|
539
1432
|
# Resolve assignee to user ID if provided
|
|
540
|
-
if
|
|
541
|
-
|
|
1433
|
+
# Use configured default user if no assignee specified
|
|
1434
|
+
assignee = task.assignee
|
|
1435
|
+
if not assignee and self.user_email:
|
|
1436
|
+
assignee = self.user_email
|
|
1437
|
+
logger.debug(f"Using default assignee from config: {assignee}")
|
|
1438
|
+
|
|
1439
|
+
if assignee:
|
|
1440
|
+
user_id = await self._get_user_id(assignee)
|
|
542
1441
|
if user_id:
|
|
543
1442
|
issue_input["assigneeId"] = user_id
|
|
544
1443
|
|
|
@@ -555,7 +1454,35 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
555
1454
|
if task.parent_epic:
|
|
556
1455
|
project_id = await self._resolve_project_id(task.parent_epic)
|
|
557
1456
|
if project_id:
|
|
558
|
-
|
|
1457
|
+
# Validate team-project association before assigning
|
|
1458
|
+
is_valid, _ = await self._validate_project_team_association(
|
|
1459
|
+
project_id, team_id
|
|
1460
|
+
)
|
|
1461
|
+
|
|
1462
|
+
if not is_valid:
|
|
1463
|
+
# Attempt to add team to project automatically
|
|
1464
|
+
logging.getLogger(__name__).info(
|
|
1465
|
+
f"Team {team_id} not associated with project {project_id}. "
|
|
1466
|
+
f"Attempting to add team to project..."
|
|
1467
|
+
)
|
|
1468
|
+
success = await self._ensure_team_in_project(project_id, team_id)
|
|
1469
|
+
|
|
1470
|
+
if success:
|
|
1471
|
+
issue_input["projectId"] = project_id
|
|
1472
|
+
logging.getLogger(__name__).info(
|
|
1473
|
+
"Successfully associated team with project. "
|
|
1474
|
+
"Issue will be assigned to project."
|
|
1475
|
+
)
|
|
1476
|
+
else:
|
|
1477
|
+
logging.getLogger(__name__).warning(
|
|
1478
|
+
"Could not associate team with project. "
|
|
1479
|
+
"Issue will be created without project assignment. "
|
|
1480
|
+
"Manual assignment required."
|
|
1481
|
+
)
|
|
1482
|
+
issue_input.pop("projectId", None)
|
|
1483
|
+
else:
|
|
1484
|
+
# Team already associated - safe to assign
|
|
1485
|
+
issue_input["projectId"] = project_id
|
|
559
1486
|
else:
|
|
560
1487
|
# Log warning but don't fail - user may have provided invalid project
|
|
561
1488
|
logging.getLogger(__name__).warning(
|
|
@@ -565,27 +1492,68 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
565
1492
|
# Remove projectId if we couldn't resolve it
|
|
566
1493
|
issue_input.pop("projectId", None)
|
|
567
1494
|
|
|
1495
|
+
# Resolve parent issue ID if provided (creates a sub-issue when parent is set)
|
|
1496
|
+
# Supports identifiers like "ENG-842" or UUIDs
|
|
1497
|
+
if task.parent_issue:
|
|
1498
|
+
issue_id = await self._resolve_issue_id(task.parent_issue)
|
|
1499
|
+
if issue_id:
|
|
1500
|
+
issue_input["parentId"] = issue_id
|
|
1501
|
+
else:
|
|
1502
|
+
# Log warning but don't fail - user may have provided invalid issue
|
|
1503
|
+
logging.getLogger(__name__).warning(
|
|
1504
|
+
f"Could not resolve issue identifier '{task.parent_issue}' to UUID. "
|
|
1505
|
+
"Sub-issue will be created without parent assignment."
|
|
1506
|
+
)
|
|
1507
|
+
# Remove parentId if we couldn't resolve it
|
|
1508
|
+
issue_input.pop("parentId", None)
|
|
1509
|
+
|
|
1510
|
+
# Validate labelIds are proper UUIDs before sending to Linear API
|
|
1511
|
+
# Bug Fix (v1.1.1): This validation prevents "Argument Validation Error"
|
|
1512
|
+
# by ensuring labelIds contains UUIDs (e.g., "uuid-1"), not names (e.g., "bug").
|
|
1513
|
+
# Linear's GraphQL API requires labelIds to be [String!]! (non-null array of
|
|
1514
|
+
# non-null UUID strings). If tag names leak through, we detect and remove them
|
|
1515
|
+
# here to prevent API errors.
|
|
1516
|
+
#
|
|
1517
|
+
# See: docs/TROUBLESHOOTING.md#issue-argument-validation-error-when-creating-issues-with-labels
|
|
1518
|
+
if "labelIds" in issue_input:
|
|
1519
|
+
invalid_labels = []
|
|
1520
|
+
for label_id in issue_input["labelIds"]:
|
|
1521
|
+
# Linear UUIDs are 36 characters with hyphens (8-4-4-4-12 format)
|
|
1522
|
+
if not isinstance(label_id, str) or len(label_id) != 36:
|
|
1523
|
+
invalid_labels.append(label_id)
|
|
1524
|
+
|
|
1525
|
+
if invalid_labels:
|
|
1526
|
+
logging.getLogger(__name__).error(
|
|
1527
|
+
f"Invalid label ID format detected: {invalid_labels}. "
|
|
1528
|
+
f"Labels must be UUIDs (36 chars), not names. Removing labelIds from request."
|
|
1529
|
+
)
|
|
1530
|
+
issue_input.pop("labelIds")
|
|
1531
|
+
|
|
568
1532
|
try:
|
|
569
1533
|
result = await self.client.execute_mutation(
|
|
570
1534
|
CREATE_ISSUE_MUTATION, {"input": issue_input}
|
|
571
1535
|
)
|
|
572
1536
|
|
|
573
1537
|
if not result["issueCreate"]["success"]:
|
|
574
|
-
|
|
1538
|
+
item_type = "sub-issue" if task.parent_issue else "issue"
|
|
1539
|
+
raise ValueError(f"Failed to create Linear {item_type}")
|
|
575
1540
|
|
|
576
1541
|
created_issue = result["issueCreate"]["issue"]
|
|
577
1542
|
return map_linear_issue_to_task(created_issue)
|
|
578
1543
|
|
|
579
1544
|
except Exception as e:
|
|
580
|
-
|
|
1545
|
+
item_type = "sub-issue" if task.parent_issue else "issue"
|
|
1546
|
+
raise ValueError(f"Failed to create Linear {item_type}: {e}") from e
|
|
581
1547
|
|
|
582
1548
|
async def _create_epic(self, epic: Epic) -> Epic:
|
|
583
1549
|
"""Create a Linear project from an Epic.
|
|
584
1550
|
|
|
585
1551
|
Args:
|
|
1552
|
+
----
|
|
586
1553
|
epic: Epic to create
|
|
587
1554
|
|
|
588
1555
|
Returns:
|
|
1556
|
+
-------
|
|
589
1557
|
Created epic with Linear metadata
|
|
590
1558
|
|
|
591
1559
|
"""
|
|
@@ -642,16 +1610,134 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
642
1610
|
return map_linear_project_to_epic(created_project)
|
|
643
1611
|
|
|
644
1612
|
except Exception as e:
|
|
645
|
-
raise ValueError(f"Failed to create Linear project: {e}")
|
|
1613
|
+
raise ValueError(f"Failed to create Linear project: {e}") from e
|
|
1614
|
+
|
|
1615
|
+
async def update_epic(self, epic_id: str, updates: dict[str, Any]) -> Epic | None:
|
|
1616
|
+
"""Update a Linear project (Epic) with specified fields.
|
|
1617
|
+
|
|
1618
|
+
Args:
|
|
1619
|
+
----
|
|
1620
|
+
epic_id: Linear project UUID or slug-shortid
|
|
1621
|
+
updates: Dictionary of fields to update. Supported fields:
|
|
1622
|
+
- title: Project name
|
|
1623
|
+
- description: Project description
|
|
1624
|
+
- state: Project state (e.g., "planned", "started", "completed", "canceled")
|
|
1625
|
+
- target_date: Target completion date (ISO format YYYY-MM-DD)
|
|
1626
|
+
- color: Project color
|
|
1627
|
+
- icon: Project icon
|
|
1628
|
+
|
|
1629
|
+
Returns:
|
|
1630
|
+
-------
|
|
1631
|
+
Updated Epic object or None if not found
|
|
1632
|
+
|
|
1633
|
+
Raises:
|
|
1634
|
+
------
|
|
1635
|
+
ValueError: If update fails or project not found
|
|
1636
|
+
|
|
1637
|
+
"""
|
|
1638
|
+
# Validate credentials before attempting operation
|
|
1639
|
+
is_valid, error_message = self.validate_credentials()
|
|
1640
|
+
if not is_valid:
|
|
1641
|
+
raise ValueError(error_message)
|
|
1642
|
+
|
|
1643
|
+
# Resolve project identifier to UUID if needed
|
|
1644
|
+
project_uuid = await self._resolve_project_id(epic_id)
|
|
1645
|
+
if not project_uuid:
|
|
1646
|
+
raise ValueError(f"Project '{epic_id}' not found")
|
|
1647
|
+
|
|
1648
|
+
# Validate field lengths before building update input
|
|
1649
|
+
from mcp_ticketer.core.validators import FieldValidator, ValidationError
|
|
1650
|
+
|
|
1651
|
+
# Build update input from updates dict
|
|
1652
|
+
update_input = {}
|
|
1653
|
+
|
|
1654
|
+
if "title" in updates:
|
|
1655
|
+
try:
|
|
1656
|
+
validated_title = FieldValidator.validate_field(
|
|
1657
|
+
"linear", "epic_name", updates["title"], truncate=False
|
|
1658
|
+
)
|
|
1659
|
+
update_input["name"] = validated_title
|
|
1660
|
+
except ValidationError as e:
|
|
1661
|
+
raise ValueError(str(e)) from e
|
|
1662
|
+
|
|
1663
|
+
if "description" in updates:
|
|
1664
|
+
try:
|
|
1665
|
+
validated_description = FieldValidator.validate_field(
|
|
1666
|
+
"linear", "epic_description", updates["description"], truncate=False
|
|
1667
|
+
)
|
|
1668
|
+
update_input["description"] = validated_description
|
|
1669
|
+
except ValidationError as e:
|
|
1670
|
+
raise ValueError(str(e)) from e
|
|
1671
|
+
if "state" in updates:
|
|
1672
|
+
update_input["state"] = updates["state"]
|
|
1673
|
+
if "target_date" in updates:
|
|
1674
|
+
update_input["targetDate"] = updates["target_date"]
|
|
1675
|
+
if "color" in updates:
|
|
1676
|
+
update_input["color"] = updates["color"]
|
|
1677
|
+
if "icon" in updates:
|
|
1678
|
+
update_input["icon"] = updates["icon"]
|
|
1679
|
+
|
|
1680
|
+
# ProjectUpdate mutation
|
|
1681
|
+
update_query = """
|
|
1682
|
+
mutation UpdateProject($id: String!, $input: ProjectUpdateInput!) {
|
|
1683
|
+
projectUpdate(id: $id, input: $input) {
|
|
1684
|
+
success
|
|
1685
|
+
project {
|
|
1686
|
+
id
|
|
1687
|
+
name
|
|
1688
|
+
description
|
|
1689
|
+
state
|
|
1690
|
+
createdAt
|
|
1691
|
+
updatedAt
|
|
1692
|
+
url
|
|
1693
|
+
icon
|
|
1694
|
+
color
|
|
1695
|
+
targetDate
|
|
1696
|
+
startedAt
|
|
1697
|
+
completedAt
|
|
1698
|
+
teams {
|
|
1699
|
+
nodes {
|
|
1700
|
+
id
|
|
1701
|
+
name
|
|
1702
|
+
key
|
|
1703
|
+
description
|
|
1704
|
+
}
|
|
1705
|
+
}
|
|
1706
|
+
}
|
|
1707
|
+
}
|
|
1708
|
+
}
|
|
1709
|
+
"""
|
|
1710
|
+
|
|
1711
|
+
try:
|
|
1712
|
+
result = await self.client.execute_mutation(
|
|
1713
|
+
update_query, {"id": project_uuid, "input": update_input}
|
|
1714
|
+
)
|
|
1715
|
+
|
|
1716
|
+
if not result["projectUpdate"]["success"]:
|
|
1717
|
+
raise ValueError(f"Failed to update Linear project '{epic_id}'")
|
|
1718
|
+
|
|
1719
|
+
updated_project = result["projectUpdate"]["project"]
|
|
1720
|
+
return map_linear_project_to_epic(updated_project)
|
|
1721
|
+
|
|
1722
|
+
except Exception as e:
|
|
1723
|
+
raise ValueError(f"Failed to update Linear project: {e}") from e
|
|
646
1724
|
|
|
647
|
-
async def read(self, ticket_id: str) -> Task | None:
|
|
648
|
-
"""Read a Linear issue by identifier with full details.
|
|
1725
|
+
async def read(self, ticket_id: str) -> Task | Epic | None:
|
|
1726
|
+
"""Read a Linear issue OR project by identifier with full details.
|
|
649
1727
|
|
|
650
1728
|
Args:
|
|
651
|
-
|
|
1729
|
+
----
|
|
1730
|
+
ticket_id: Linear issue identifier (e.g., 'BTA-123') or project UUID
|
|
652
1731
|
|
|
653
1732
|
Returns:
|
|
654
|
-
|
|
1733
|
+
-------
|
|
1734
|
+
Task with full details if issue found,
|
|
1735
|
+
Epic with full details if project found,
|
|
1736
|
+
None if not found
|
|
1737
|
+
|
|
1738
|
+
Raises:
|
|
1739
|
+
------
|
|
1740
|
+
ValueError: If ticket_id is a view URL (views are not supported in ticket_read)
|
|
655
1741
|
|
|
656
1742
|
"""
|
|
657
1743
|
# Validate credentials before attempting operation
|
|
@@ -659,6 +1745,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
659
1745
|
if not is_valid:
|
|
660
1746
|
raise ValueError(error_message)
|
|
661
1747
|
|
|
1748
|
+
# Try reading as an issue first (most common case)
|
|
662
1749
|
query = (
|
|
663
1750
|
ALL_FRAGMENTS
|
|
664
1751
|
+ """
|
|
@@ -676,20 +1763,88 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
676
1763
|
if result.get("issue"):
|
|
677
1764
|
return map_linear_issue_to_task(result["issue"])
|
|
678
1765
|
|
|
679
|
-
except
|
|
680
|
-
#
|
|
1766
|
+
except Exception:
|
|
1767
|
+
# Not found as issue, continue to project/view check
|
|
1768
|
+
pass
|
|
1769
|
+
|
|
1770
|
+
# If not found as issue, try reading as project
|
|
1771
|
+
try:
|
|
1772
|
+
project_data = await self.get_project(ticket_id)
|
|
1773
|
+
if project_data:
|
|
1774
|
+
# Fetch project's issues to populate child_issues field
|
|
1775
|
+
issues = await self._get_project_issues(ticket_id)
|
|
1776
|
+
|
|
1777
|
+
# Map to Epic
|
|
1778
|
+
epic = map_linear_project_to_epic(project_data)
|
|
1779
|
+
|
|
1780
|
+
# Populate child_issues with issue IDs
|
|
1781
|
+
epic.child_issues = [issue.id for issue in issues]
|
|
1782
|
+
|
|
1783
|
+
return epic
|
|
1784
|
+
except Exception:
|
|
1785
|
+
# Not found as project either
|
|
1786
|
+
pass
|
|
1787
|
+
|
|
1788
|
+
# If not found as issue or project, check if it's a view URL
|
|
1789
|
+
# Views are collections of issues, not individual tickets
|
|
1790
|
+
logging.debug(
|
|
1791
|
+
f"[VIEW DEBUG] read() checking if ticket_id is a view: {ticket_id}"
|
|
1792
|
+
)
|
|
1793
|
+
try:
|
|
1794
|
+
view_data = await self._get_custom_view(ticket_id)
|
|
1795
|
+
logging.debug(f"[VIEW DEBUG] read() _get_custom_view returned: {view_data}")
|
|
1796
|
+
|
|
1797
|
+
if view_data:
|
|
1798
|
+
logging.debug(
|
|
1799
|
+
"[VIEW DEBUG] read() view_data is truthy, preparing to raise ValueError"
|
|
1800
|
+
)
|
|
1801
|
+
# View found - raise informative error
|
|
1802
|
+
view_name = view_data.get("name", "Unknown")
|
|
1803
|
+
issues_data = view_data.get("issues", {})
|
|
1804
|
+
issue_count = len(issues_data.get("nodes", []))
|
|
1805
|
+
has_more = issues_data.get("pageInfo", {}).get("hasNextPage", False)
|
|
1806
|
+
count_str = f"{issue_count}+" if has_more else str(issue_count)
|
|
1807
|
+
|
|
1808
|
+
logging.debug(
|
|
1809
|
+
f"[VIEW DEBUG] read() raising ValueError with view_name={view_name}, count={count_str}"
|
|
1810
|
+
)
|
|
1811
|
+
raise ValueError(
|
|
1812
|
+
f"Linear view URLs are not supported in ticket_read.\n"
|
|
1813
|
+
f"\n"
|
|
1814
|
+
f"View: '{view_name}' ({ticket_id})\n"
|
|
1815
|
+
f"This view contains {count_str} issues.\n"
|
|
1816
|
+
f"\n"
|
|
1817
|
+
f"Use ticket_list or ticket_search to query issues instead."
|
|
1818
|
+
)
|
|
1819
|
+
else:
|
|
1820
|
+
logging.debug("[VIEW DEBUG] read() view_data is falsy (None or empty)")
|
|
1821
|
+
except ValueError:
|
|
1822
|
+
# Re-raise ValueError (our informative error message)
|
|
1823
|
+
logging.debug("[VIEW DEBUG] read() re-raising ValueError")
|
|
1824
|
+
raise
|
|
1825
|
+
except Exception as e:
|
|
1826
|
+
# View query failed - not a view
|
|
1827
|
+
logging.debug(
|
|
1828
|
+
f"[VIEW DEBUG] read() caught exception in view check: {type(e).__name__}: {str(e)}"
|
|
1829
|
+
)
|
|
681
1830
|
pass
|
|
682
1831
|
|
|
1832
|
+
# Not found as either issue, project, or view
|
|
1833
|
+
logging.debug(
|
|
1834
|
+
"[VIEW DEBUG] read() returning None - not found as issue, project, or view"
|
|
1835
|
+
)
|
|
683
1836
|
return None
|
|
684
1837
|
|
|
685
1838
|
async def update(self, ticket_id: str, updates: dict[str, Any]) -> Task | None:
|
|
686
1839
|
"""Update a Linear issue with comprehensive field support.
|
|
687
1840
|
|
|
688
1841
|
Args:
|
|
1842
|
+
----
|
|
689
1843
|
ticket_id: Linear issue identifier
|
|
690
1844
|
updates: Dictionary of fields to update
|
|
691
1845
|
|
|
692
1846
|
Returns:
|
|
1847
|
+
-------
|
|
693
1848
|
Updated task or None if not found
|
|
694
1849
|
|
|
695
1850
|
"""
|
|
@@ -698,6 +1853,9 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
698
1853
|
if not is_valid:
|
|
699
1854
|
raise ValueError(error_message)
|
|
700
1855
|
|
|
1856
|
+
# Ensure adapter is initialized (loads workflow states for state transitions)
|
|
1857
|
+
await self.initialize()
|
|
1858
|
+
|
|
701
1859
|
# First get the Linear internal ID
|
|
702
1860
|
id_query = """
|
|
703
1861
|
query GetIssueId($identifier: String!) {
|
|
@@ -738,10 +1896,47 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
738
1896
|
update_input["assigneeId"] = user_id
|
|
739
1897
|
|
|
740
1898
|
# Resolve label names to IDs if provided
|
|
741
|
-
if "tags" in updates
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
1899
|
+
if "tags" in updates:
|
|
1900
|
+
if updates["tags"]: # Non-empty list
|
|
1901
|
+
try:
|
|
1902
|
+
label_ids = await self._resolve_label_ids(updates["tags"])
|
|
1903
|
+
if label_ids:
|
|
1904
|
+
update_input["labelIds"] = label_ids
|
|
1905
|
+
except ValueError as e:
|
|
1906
|
+
# Label creation failed - provide clear error message (1M-396)
|
|
1907
|
+
raise ValueError(
|
|
1908
|
+
f"Failed to update labels for issue {ticket_id}. "
|
|
1909
|
+
f"Label creation error: {e}. "
|
|
1910
|
+
f"Tip: Use the 'label_list' tool to check existing labels, "
|
|
1911
|
+
f"or verify you have permissions to create new labels."
|
|
1912
|
+
) from e
|
|
1913
|
+
else: # Empty list = remove all labels
|
|
1914
|
+
update_input["labelIds"] = []
|
|
1915
|
+
|
|
1916
|
+
# Resolve project ID if parent_epic is provided (supports slug, name, short ID, or URL)
|
|
1917
|
+
if "parent_epic" in updates and updates["parent_epic"]:
|
|
1918
|
+
project_id = await self._resolve_project_id(updates["parent_epic"])
|
|
1919
|
+
if project_id:
|
|
1920
|
+
update_input["projectId"] = project_id
|
|
1921
|
+
else:
|
|
1922
|
+
logging.getLogger(__name__).warning(
|
|
1923
|
+
f"Could not resolve project identifier '{updates['parent_epic']}'"
|
|
1924
|
+
)
|
|
1925
|
+
|
|
1926
|
+
# Validate labelIds are proper UUIDs before sending to Linear API
|
|
1927
|
+
if "labelIds" in update_input and update_input["labelIds"]:
|
|
1928
|
+
invalid_labels = []
|
|
1929
|
+
for label_id in update_input["labelIds"]:
|
|
1930
|
+
# Linear UUIDs are 36 characters with hyphens (8-4-4-4-12 format)
|
|
1931
|
+
if not isinstance(label_id, str) or len(label_id) != 36:
|
|
1932
|
+
invalid_labels.append(label_id)
|
|
1933
|
+
|
|
1934
|
+
if invalid_labels:
|
|
1935
|
+
logging.getLogger(__name__).error(
|
|
1936
|
+
f"Invalid label ID format detected in update: {invalid_labels}. "
|
|
1937
|
+
f"Labels must be UUIDs (36 chars), not names. Removing labelIds from request."
|
|
1938
|
+
)
|
|
1939
|
+
update_input.pop("labelIds")
|
|
745
1940
|
|
|
746
1941
|
# Execute update
|
|
747
1942
|
result = await self.client.execute_mutation(
|
|
@@ -755,15 +1950,17 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
755
1950
|
return map_linear_issue_to_task(updated_issue)
|
|
756
1951
|
|
|
757
1952
|
except Exception as e:
|
|
758
|
-
raise ValueError(f"Failed to update Linear issue: {e}")
|
|
1953
|
+
raise ValueError(f"Failed to update Linear issue: {e}") from e
|
|
759
1954
|
|
|
760
1955
|
async def delete(self, ticket_id: str) -> bool:
|
|
761
1956
|
"""Delete a Linear issue (archive it).
|
|
762
1957
|
|
|
763
1958
|
Args:
|
|
1959
|
+
----
|
|
764
1960
|
ticket_id: Linear issue identifier
|
|
765
1961
|
|
|
766
1962
|
Returns:
|
|
1963
|
+
-------
|
|
767
1964
|
True if successfully deleted/archived
|
|
768
1965
|
|
|
769
1966
|
"""
|
|
@@ -780,11 +1977,13 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
780
1977
|
"""List Linear issues with optional filtering.
|
|
781
1978
|
|
|
782
1979
|
Args:
|
|
1980
|
+
----
|
|
783
1981
|
limit: Maximum number of issues to return
|
|
784
1982
|
offset: Number of issues to skip (Note: Linear uses cursor-based pagination)
|
|
785
1983
|
filters: Optional filters (state, assignee, priority, etc.)
|
|
786
1984
|
|
|
787
1985
|
Returns:
|
|
1986
|
+
-------
|
|
788
1987
|
List of tasks matching the criteria
|
|
789
1988
|
|
|
790
1989
|
"""
|
|
@@ -813,6 +2012,12 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
813
2012
|
if user_id:
|
|
814
2013
|
issue_filter["assignee"] = {"id": {"eq": user_id}}
|
|
815
2014
|
|
|
2015
|
+
# Support parent_issue filter for listing children (critical for parent state constraints)
|
|
2016
|
+
if "parent_issue" in filters:
|
|
2017
|
+
parent_id = await self._resolve_issue_id(filters["parent_issue"])
|
|
2018
|
+
if parent_id:
|
|
2019
|
+
issue_filter["parent"] = {"id": {"eq": parent_id}}
|
|
2020
|
+
|
|
816
2021
|
if "created_after" in filters:
|
|
817
2022
|
issue_filter["createdAt"] = {"gte": filters["created_after"]}
|
|
818
2023
|
if "updated_after" in filters:
|
|
@@ -832,15 +2037,17 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
832
2037
|
return tasks
|
|
833
2038
|
|
|
834
2039
|
except Exception as e:
|
|
835
|
-
raise ValueError(f"Failed to list Linear issues: {e}")
|
|
2040
|
+
raise ValueError(f"Failed to list Linear issues: {e}") from e
|
|
836
2041
|
|
|
837
2042
|
async def search(self, query: SearchQuery) -> builtins.list[Task]:
|
|
838
2043
|
"""Search Linear issues using comprehensive filters.
|
|
839
2044
|
|
|
840
2045
|
Args:
|
|
2046
|
+
----
|
|
841
2047
|
query: Search query with filters and criteria
|
|
842
2048
|
|
|
843
2049
|
Returns:
|
|
2050
|
+
-------
|
|
844
2051
|
List of tasks matching the search criteria
|
|
845
2052
|
|
|
846
2053
|
"""
|
|
@@ -862,9 +2069,15 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
862
2069
|
issue_filter["title"] = {"containsIgnoreCase": query.query}
|
|
863
2070
|
|
|
864
2071
|
# State filter
|
|
2072
|
+
# Bug fix: Handle OPEN state specially to include both unstarted AND backlog
|
|
2073
|
+
# tickets, as both Linear states map to TicketState.OPEN
|
|
865
2074
|
if query.state:
|
|
866
|
-
|
|
867
|
-
|
|
2075
|
+
if query.state == TicketState.OPEN:
|
|
2076
|
+
# Include both "unstarted" and "backlog" states for OPEN
|
|
2077
|
+
issue_filter["state"] = {"type": {"in": ["unstarted", "backlog"]}}
|
|
2078
|
+
else:
|
|
2079
|
+
state_type = get_linear_state_type(query.state)
|
|
2080
|
+
issue_filter["state"] = {"type": {"eq": state_type}}
|
|
868
2081
|
|
|
869
2082
|
# Priority filter
|
|
870
2083
|
if query.priority:
|
|
@@ -877,6 +2090,13 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
877
2090
|
if user_id:
|
|
878
2091
|
issue_filter["assignee"] = {"id": {"eq": user_id}}
|
|
879
2092
|
|
|
2093
|
+
# Project filter (Bug fix: Add support for filtering by project/epic)
|
|
2094
|
+
if query.project:
|
|
2095
|
+
# Resolve project ID (supports ID, name, or URL)
|
|
2096
|
+
project_id = await self._resolve_project_id(query.project)
|
|
2097
|
+
if project_id:
|
|
2098
|
+
issue_filter["project"] = {"id": {"eq": project_id}}
|
|
2099
|
+
|
|
880
2100
|
# Tags filter (labels in Linear)
|
|
881
2101
|
if query.tags:
|
|
882
2102
|
issue_filter["labels"] = {"some": {"name": {"in": query.tags}}}
|
|
@@ -896,7 +2116,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
896
2116
|
return tasks
|
|
897
2117
|
|
|
898
2118
|
except Exception as e:
|
|
899
|
-
raise ValueError(f"Failed to search Linear issues: {e}")
|
|
2119
|
+
raise ValueError(f"Failed to search Linear issues: {e}") from e
|
|
900
2120
|
|
|
901
2121
|
async def transition_state(
|
|
902
2122
|
self, ticket_id: str, target_state: TicketState
|
|
@@ -904,10 +2124,12 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
904
2124
|
"""Transition Linear issue to new state with workflow validation.
|
|
905
2125
|
|
|
906
2126
|
Args:
|
|
2127
|
+
----
|
|
907
2128
|
ticket_id: Linear issue identifier
|
|
908
2129
|
target_state: Target state to transition to
|
|
909
2130
|
|
|
910
2131
|
Returns:
|
|
2132
|
+
-------
|
|
911
2133
|
Updated task or None if transition failed
|
|
912
2134
|
|
|
913
2135
|
"""
|
|
@@ -923,25 +2145,36 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
923
2145
|
) -> bool:
|
|
924
2146
|
"""Validate if state transition is allowed.
|
|
925
2147
|
|
|
2148
|
+
Delegates to BaseAdapter for:
|
|
2149
|
+
- Workflow state machine validation
|
|
2150
|
+
- Parent/child state constraint validation (from 1M-93 requirement)
|
|
2151
|
+
|
|
2152
|
+
The BaseAdapter implementation (core/adapter.py lines 312-370) ensures:
|
|
2153
|
+
1. Valid workflow state transitions (OPEN → IN_PROGRESS → READY → etc.)
|
|
2154
|
+
2. Parent issues maintain completion level ≥ max child completion level
|
|
2155
|
+
|
|
926
2156
|
Args:
|
|
2157
|
+
----
|
|
927
2158
|
ticket_id: Linear issue identifier
|
|
928
2159
|
target_state: Target state to validate
|
|
929
2160
|
|
|
930
2161
|
Returns:
|
|
931
|
-
|
|
2162
|
+
-------
|
|
2163
|
+
True if transition is valid, False otherwise
|
|
932
2164
|
|
|
933
2165
|
"""
|
|
934
|
-
#
|
|
935
|
-
|
|
936
|
-
return True
|
|
2166
|
+
# Call parent implementation for all validation logic
|
|
2167
|
+
return await super().validate_transition(ticket_id, target_state)
|
|
937
2168
|
|
|
938
2169
|
async def add_comment(self, comment: Comment) -> Comment:
|
|
939
2170
|
"""Add a comment to a Linear issue.
|
|
940
2171
|
|
|
941
2172
|
Args:
|
|
2173
|
+
----
|
|
942
2174
|
comment: Comment to add
|
|
943
2175
|
|
|
944
2176
|
Returns:
|
|
2177
|
+
-------
|
|
945
2178
|
Created comment with ID
|
|
946
2179
|
|
|
947
2180
|
"""
|
|
@@ -1001,7 +2234,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1001
2234
|
return map_linear_comment_to_comment(created_comment, comment.ticket_id)
|
|
1002
2235
|
|
|
1003
2236
|
except Exception as e:
|
|
1004
|
-
raise ValueError(f"Failed to add comment: {e}")
|
|
2237
|
+
raise ValueError(f"Failed to add comment: {e}") from e
|
|
1005
2238
|
|
|
1006
2239
|
async def get_comments(
|
|
1007
2240
|
self, ticket_id: str, limit: int = 10, offset: int = 0
|
|
@@ -1009,11 +2242,13 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1009
2242
|
"""Get comments for a Linear issue.
|
|
1010
2243
|
|
|
1011
2244
|
Args:
|
|
2245
|
+
----
|
|
1012
2246
|
ticket_id: Linear issue identifier
|
|
1013
2247
|
limit: Maximum number of comments to return
|
|
1014
2248
|
offset: Number of comments to skip
|
|
1015
2249
|
|
|
1016
2250
|
Returns:
|
|
2251
|
+
-------
|
|
1017
2252
|
List of comments for the issue
|
|
1018
2253
|
|
|
1019
2254
|
"""
|
|
@@ -1059,6 +2294,956 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1059
2294
|
except Exception:
|
|
1060
2295
|
return []
|
|
1061
2296
|
|
|
2297
|
+
async def list_labels(self) -> builtins.list[dict[str, Any]]:
|
|
2298
|
+
"""List all labels available in the Linear team.
|
|
2299
|
+
|
|
2300
|
+
Returns:
|
|
2301
|
+
-------
|
|
2302
|
+
List of label dictionaries with 'id', 'name', and 'color' fields
|
|
2303
|
+
|
|
2304
|
+
"""
|
|
2305
|
+
# Ensure labels are loaded
|
|
2306
|
+
if self._labels_cache is None:
|
|
2307
|
+
team_id = await self._ensure_team_id()
|
|
2308
|
+
await self._load_team_labels(team_id)
|
|
2309
|
+
|
|
2310
|
+
# Return cached labels or empty list if not available
|
|
2311
|
+
if not self._labels_cache:
|
|
2312
|
+
return []
|
|
2313
|
+
|
|
2314
|
+
# Transform to standardized format
|
|
2315
|
+
return [
|
|
2316
|
+
{
|
|
2317
|
+
"id": label["id"],
|
|
2318
|
+
"name": label["name"],
|
|
2319
|
+
"color": label.get("color", ""),
|
|
2320
|
+
}
|
|
2321
|
+
for label in self._labels_cache
|
|
2322
|
+
]
|
|
2323
|
+
|
|
2324
|
+
async def upload_file(self, file_path: str, mime_type: str | None = None) -> str:
|
|
2325
|
+
"""Upload a file to Linear's storage and return the asset URL.
|
|
2326
|
+
|
|
2327
|
+
This method implements Linear's three-step file upload process:
|
|
2328
|
+
1. Request a pre-signed upload URL via fileUpload mutation
|
|
2329
|
+
2. Upload the file to S3 using the pre-signed URL
|
|
2330
|
+
3. Return the asset URL for use in attachments
|
|
2331
|
+
|
|
2332
|
+
Args:
|
|
2333
|
+
----
|
|
2334
|
+
file_path: Path to the file to upload
|
|
2335
|
+
mime_type: MIME type of the file. If None, will be auto-detected.
|
|
2336
|
+
|
|
2337
|
+
Returns:
|
|
2338
|
+
-------
|
|
2339
|
+
Asset URL that can be used with attachmentCreate mutation
|
|
2340
|
+
|
|
2341
|
+
Raises:
|
|
2342
|
+
------
|
|
2343
|
+
ValueError: If file doesn't exist, upload fails, or httpx not available
|
|
2344
|
+
FileNotFoundError: If the specified file doesn't exist
|
|
2345
|
+
|
|
2346
|
+
"""
|
|
2347
|
+
if httpx is None:
|
|
2348
|
+
raise ValueError(
|
|
2349
|
+
"httpx library not installed. Install with: pip install httpx"
|
|
2350
|
+
)
|
|
2351
|
+
|
|
2352
|
+
# Validate file exists
|
|
2353
|
+
file_path_obj = Path(file_path)
|
|
2354
|
+
if not file_path_obj.exists():
|
|
2355
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
2356
|
+
if not file_path_obj.is_file():
|
|
2357
|
+
raise ValueError(f"Path is not a file: {file_path}")
|
|
2358
|
+
|
|
2359
|
+
# Get file info
|
|
2360
|
+
file_size = file_path_obj.stat().st_size
|
|
2361
|
+
filename = file_path_obj.name
|
|
2362
|
+
|
|
2363
|
+
# Auto-detect MIME type if not provided
|
|
2364
|
+
if mime_type is None:
|
|
2365
|
+
mime_type, _ = mimetypes.guess_type(file_path)
|
|
2366
|
+
if mime_type is None:
|
|
2367
|
+
# Default to binary if can't detect
|
|
2368
|
+
mime_type = "application/octet-stream"
|
|
2369
|
+
|
|
2370
|
+
# Step 1: Request pre-signed upload URL
|
|
2371
|
+
upload_mutation = """
|
|
2372
|
+
mutation FileUpload($contentType: String!, $filename: String!, $size: Int!) {
|
|
2373
|
+
fileUpload(contentType: $contentType, filename: $filename, size: $size) {
|
|
2374
|
+
success
|
|
2375
|
+
uploadFile {
|
|
2376
|
+
uploadUrl
|
|
2377
|
+
assetUrl
|
|
2378
|
+
headers {
|
|
2379
|
+
key
|
|
2380
|
+
value
|
|
2381
|
+
}
|
|
2382
|
+
}
|
|
2383
|
+
}
|
|
2384
|
+
}
|
|
2385
|
+
"""
|
|
2386
|
+
|
|
2387
|
+
try:
|
|
2388
|
+
result = await self.client.execute_mutation(
|
|
2389
|
+
upload_mutation,
|
|
2390
|
+
{
|
|
2391
|
+
"contentType": mime_type,
|
|
2392
|
+
"filename": filename,
|
|
2393
|
+
"size": file_size,
|
|
2394
|
+
},
|
|
2395
|
+
)
|
|
2396
|
+
|
|
2397
|
+
if not result["fileUpload"]["success"]:
|
|
2398
|
+
raise ValueError("Failed to get upload URL from Linear API")
|
|
2399
|
+
|
|
2400
|
+
upload_file_data = result["fileUpload"]["uploadFile"]
|
|
2401
|
+
upload_url = upload_file_data["uploadUrl"]
|
|
2402
|
+
asset_url = upload_file_data["assetUrl"]
|
|
2403
|
+
headers_list = upload_file_data.get("headers", [])
|
|
2404
|
+
|
|
2405
|
+
# Convert headers list to dict
|
|
2406
|
+
upload_headers = {h["key"]: h["value"] for h in headers_list}
|
|
2407
|
+
# Add Content-Type header
|
|
2408
|
+
upload_headers["Content-Type"] = mime_type
|
|
2409
|
+
|
|
2410
|
+
# Step 2: Upload file to S3 using pre-signed URL
|
|
2411
|
+
async with httpx.AsyncClient() as http_client:
|
|
2412
|
+
with open(file_path, "rb") as f:
|
|
2413
|
+
file_content = f.read()
|
|
2414
|
+
|
|
2415
|
+
response = await http_client.put(
|
|
2416
|
+
upload_url,
|
|
2417
|
+
content=file_content,
|
|
2418
|
+
headers=upload_headers,
|
|
2419
|
+
timeout=60.0, # 60 second timeout for large files
|
|
2420
|
+
)
|
|
2421
|
+
|
|
2422
|
+
if response.status_code not in (200, 201, 204):
|
|
2423
|
+
raise ValueError(
|
|
2424
|
+
f"Failed to upload file to S3. Status: {response.status_code}, "
|
|
2425
|
+
f"Response: {response.text}"
|
|
2426
|
+
)
|
|
2427
|
+
|
|
2428
|
+
# Step 3: Return asset URL
|
|
2429
|
+
logging.getLogger(__name__).info(
|
|
2430
|
+
f"Successfully uploaded file '{filename}' ({file_size} bytes) to Linear"
|
|
2431
|
+
)
|
|
2432
|
+
return asset_url
|
|
2433
|
+
|
|
2434
|
+
except Exception as e:
|
|
2435
|
+
raise ValueError(f"Failed to upload file '{filename}': {e}") from e
|
|
2436
|
+
|
|
2437
|
+
async def attach_file_to_issue(
|
|
2438
|
+
self,
|
|
2439
|
+
issue_id: str,
|
|
2440
|
+
file_url: str,
|
|
2441
|
+
title: str,
|
|
2442
|
+
subtitle: str | None = None,
|
|
2443
|
+
comment_body: str | None = None,
|
|
2444
|
+
) -> dict[str, Any]:
|
|
2445
|
+
"""Attach a file to a Linear issue.
|
|
2446
|
+
|
|
2447
|
+
The file must already be uploaded using upload_file() or be a publicly
|
|
2448
|
+
accessible URL.
|
|
2449
|
+
|
|
2450
|
+
Args:
|
|
2451
|
+
----
|
|
2452
|
+
issue_id: Linear issue identifier (e.g., "ENG-842") or UUID
|
|
2453
|
+
file_url: URL of the file (from upload_file() or external URL)
|
|
2454
|
+
title: Title for the attachment
|
|
2455
|
+
subtitle: Optional subtitle for the attachment
|
|
2456
|
+
comment_body: Optional comment text to include with the attachment
|
|
2457
|
+
|
|
2458
|
+
Returns:
|
|
2459
|
+
-------
|
|
2460
|
+
Dictionary with attachment details including id, title, url, etc.
|
|
2461
|
+
|
|
2462
|
+
Raises:
|
|
2463
|
+
------
|
|
2464
|
+
ValueError: If attachment creation fails or issue not found
|
|
2465
|
+
|
|
2466
|
+
"""
|
|
2467
|
+
# Resolve issue identifier to UUID
|
|
2468
|
+
issue_uuid = await self._resolve_issue_id(issue_id)
|
|
2469
|
+
if not issue_uuid:
|
|
2470
|
+
raise ValueError(f"Issue '{issue_id}' not found")
|
|
2471
|
+
|
|
2472
|
+
# Build attachment input
|
|
2473
|
+
attachment_input: dict[str, Any] = {
|
|
2474
|
+
"issueId": issue_uuid,
|
|
2475
|
+
"title": title,
|
|
2476
|
+
"url": file_url,
|
|
2477
|
+
}
|
|
2478
|
+
|
|
2479
|
+
if subtitle:
|
|
2480
|
+
attachment_input["subtitle"] = subtitle
|
|
2481
|
+
|
|
2482
|
+
if comment_body:
|
|
2483
|
+
attachment_input["commentBody"] = comment_body
|
|
2484
|
+
|
|
2485
|
+
# Create attachment mutation
|
|
2486
|
+
attachment_mutation = """
|
|
2487
|
+
mutation AttachmentCreate($input: AttachmentCreateInput!) {
|
|
2488
|
+
attachmentCreate(input: $input) {
|
|
2489
|
+
success
|
|
2490
|
+
attachment {
|
|
2491
|
+
id
|
|
2492
|
+
title
|
|
2493
|
+
url
|
|
2494
|
+
subtitle
|
|
2495
|
+
metadata
|
|
2496
|
+
createdAt
|
|
2497
|
+
updatedAt
|
|
2498
|
+
}
|
|
2499
|
+
}
|
|
2500
|
+
}
|
|
2501
|
+
"""
|
|
2502
|
+
|
|
2503
|
+
try:
|
|
2504
|
+
result = await self.client.execute_mutation(
|
|
2505
|
+
attachment_mutation, {"input": attachment_input}
|
|
2506
|
+
)
|
|
2507
|
+
|
|
2508
|
+
if not result["attachmentCreate"]["success"]:
|
|
2509
|
+
raise ValueError(f"Failed to attach file to issue '{issue_id}'")
|
|
2510
|
+
|
|
2511
|
+
attachment = result["attachmentCreate"]["attachment"]
|
|
2512
|
+
logging.getLogger(__name__).info(
|
|
2513
|
+
f"Successfully attached file '{title}' to issue '{issue_id}'"
|
|
2514
|
+
)
|
|
2515
|
+
return attachment
|
|
2516
|
+
|
|
2517
|
+
except Exception as e:
|
|
2518
|
+
raise ValueError(f"Failed to attach file to issue '{issue_id}': {e}") from e
|
|
2519
|
+
|
|
2520
|
+
async def attach_file_to_epic(
|
|
2521
|
+
self,
|
|
2522
|
+
epic_id: str,
|
|
2523
|
+
file_url: str,
|
|
2524
|
+
title: str,
|
|
2525
|
+
subtitle: str | None = None,
|
|
2526
|
+
) -> dict[str, Any]:
|
|
2527
|
+
"""Attach a file to a Linear project (Epic).
|
|
2528
|
+
|
|
2529
|
+
The file must already be uploaded using upload_file() or be a publicly
|
|
2530
|
+
accessible URL.
|
|
2531
|
+
|
|
2532
|
+
Args:
|
|
2533
|
+
----
|
|
2534
|
+
epic_id: Linear project UUID or slug-shortid
|
|
2535
|
+
file_url: URL of the file (from upload_file() or external URL)
|
|
2536
|
+
title: Title for the attachment
|
|
2537
|
+
subtitle: Optional subtitle for the attachment
|
|
2538
|
+
|
|
2539
|
+
Returns:
|
|
2540
|
+
-------
|
|
2541
|
+
Dictionary with attachment details including id, title, url, etc.
|
|
2542
|
+
|
|
2543
|
+
Raises:
|
|
2544
|
+
------
|
|
2545
|
+
ValueError: If attachment creation fails or project not found
|
|
2546
|
+
|
|
2547
|
+
"""
|
|
2548
|
+
# Resolve project identifier to UUID
|
|
2549
|
+
project_uuid = await self._resolve_project_id(epic_id)
|
|
2550
|
+
if not project_uuid:
|
|
2551
|
+
raise ValueError(f"Project '{epic_id}' not found")
|
|
2552
|
+
|
|
2553
|
+
# Build attachment input (use projectId instead of issueId)
|
|
2554
|
+
attachment_input: dict[str, Any] = {
|
|
2555
|
+
"projectId": project_uuid,
|
|
2556
|
+
"title": title,
|
|
2557
|
+
"url": file_url,
|
|
2558
|
+
}
|
|
2559
|
+
|
|
2560
|
+
if subtitle:
|
|
2561
|
+
attachment_input["subtitle"] = subtitle
|
|
2562
|
+
|
|
2563
|
+
# Create attachment mutation (same as for issues)
|
|
2564
|
+
attachment_mutation = """
|
|
2565
|
+
mutation AttachmentCreate($input: AttachmentCreateInput!) {
|
|
2566
|
+
attachmentCreate(input: $input) {
|
|
2567
|
+
success
|
|
2568
|
+
attachment {
|
|
2569
|
+
id
|
|
2570
|
+
title
|
|
2571
|
+
url
|
|
2572
|
+
subtitle
|
|
2573
|
+
metadata
|
|
2574
|
+
createdAt
|
|
2575
|
+
updatedAt
|
|
2576
|
+
}
|
|
2577
|
+
}
|
|
2578
|
+
}
|
|
2579
|
+
"""
|
|
2580
|
+
|
|
2581
|
+
try:
|
|
2582
|
+
result = await self.client.execute_mutation(
|
|
2583
|
+
attachment_mutation, {"input": attachment_input}
|
|
2584
|
+
)
|
|
2585
|
+
|
|
2586
|
+
if not result["attachmentCreate"]["success"]:
|
|
2587
|
+
raise ValueError(f"Failed to attach file to project '{epic_id}'")
|
|
2588
|
+
|
|
2589
|
+
attachment = result["attachmentCreate"]["attachment"]
|
|
2590
|
+
logging.getLogger(__name__).info(
|
|
2591
|
+
f"Successfully attached file '{title}' to project '{epic_id}'"
|
|
2592
|
+
)
|
|
2593
|
+
return attachment
|
|
2594
|
+
|
|
2595
|
+
except Exception as e:
|
|
2596
|
+
raise ValueError(
|
|
2597
|
+
f"Failed to attach file to project '{epic_id}': {e}"
|
|
2598
|
+
) from e
|
|
2599
|
+
|
|
2600
|
+
async def get_attachments(self, ticket_id: str) -> builtins.list[Attachment]:
|
|
2601
|
+
"""Get all attachments for a Linear issue or project.
|
|
2602
|
+
|
|
2603
|
+
This method retrieves attachment metadata from Linear's GraphQL API.
|
|
2604
|
+
Note that Linear attachment URLs require authentication to access.
|
|
2605
|
+
|
|
2606
|
+
Args:
|
|
2607
|
+
----
|
|
2608
|
+
ticket_id: Linear issue identifier (e.g., "ENG-842") or project UUID
|
|
2609
|
+
|
|
2610
|
+
Returns:
|
|
2611
|
+
-------
|
|
2612
|
+
List of Attachment objects with metadata
|
|
2613
|
+
|
|
2614
|
+
Raises:
|
|
2615
|
+
------
|
|
2616
|
+
ValueError: If credentials are invalid
|
|
2617
|
+
|
|
2618
|
+
Authentication Note:
|
|
2619
|
+
-------------------
|
|
2620
|
+
Linear attachment URLs require authentication headers:
|
|
2621
|
+
Authorization: Bearer {api_key}
|
|
2622
|
+
|
|
2623
|
+
URLs are in format: https://files.linear.app/workspace/attachment-id/filename
|
|
2624
|
+
Direct access without authentication will return 401 Unauthorized.
|
|
2625
|
+
|
|
2626
|
+
"""
|
|
2627
|
+
logger = logging.getLogger(__name__)
|
|
2628
|
+
|
|
2629
|
+
# Validate credentials
|
|
2630
|
+
is_valid, error_message = self.validate_credentials()
|
|
2631
|
+
if not is_valid:
|
|
2632
|
+
raise ValueError(error_message)
|
|
2633
|
+
|
|
2634
|
+
# Try as issue first (most common case)
|
|
2635
|
+
issue_uuid = await self._resolve_issue_id(ticket_id)
|
|
2636
|
+
|
|
2637
|
+
if issue_uuid:
|
|
2638
|
+
# Query issue attachments
|
|
2639
|
+
query = """
|
|
2640
|
+
query GetIssueAttachments($issueId: String!) {
|
|
2641
|
+
issue(id: $issueId) {
|
|
2642
|
+
id
|
|
2643
|
+
identifier
|
|
2644
|
+
attachments {
|
|
2645
|
+
nodes {
|
|
2646
|
+
id
|
|
2647
|
+
title
|
|
2648
|
+
url
|
|
2649
|
+
subtitle
|
|
2650
|
+
metadata
|
|
2651
|
+
createdAt
|
|
2652
|
+
updatedAt
|
|
2653
|
+
}
|
|
2654
|
+
}
|
|
2655
|
+
}
|
|
2656
|
+
}
|
|
2657
|
+
"""
|
|
2658
|
+
|
|
2659
|
+
try:
|
|
2660
|
+
result = await self.client.execute_query(query, {"issueId": issue_uuid})
|
|
2661
|
+
|
|
2662
|
+
if not result.get("issue"):
|
|
2663
|
+
logger.warning(f"Issue {ticket_id} not found")
|
|
2664
|
+
return []
|
|
2665
|
+
|
|
2666
|
+
attachments_data = (
|
|
2667
|
+
result["issue"].get("attachments", {}).get("nodes", [])
|
|
2668
|
+
)
|
|
2669
|
+
|
|
2670
|
+
# Map to Attachment objects using identifier (not UUID)
|
|
2671
|
+
return [
|
|
2672
|
+
map_linear_attachment_to_attachment(att, ticket_id)
|
|
2673
|
+
for att in attachments_data
|
|
2674
|
+
]
|
|
2675
|
+
|
|
2676
|
+
except Exception as e:
|
|
2677
|
+
logger.error(f"Failed to get attachments for issue {ticket_id}: {e}")
|
|
2678
|
+
return []
|
|
2679
|
+
|
|
2680
|
+
# Try as project if not an issue
|
|
2681
|
+
project_uuid = await self._resolve_project_id(ticket_id)
|
|
2682
|
+
|
|
2683
|
+
if project_uuid:
|
|
2684
|
+
# Query project attachments (documents)
|
|
2685
|
+
query = """
|
|
2686
|
+
query GetProjectAttachments($projectId: String!) {
|
|
2687
|
+
project(id: $projectId) {
|
|
2688
|
+
id
|
|
2689
|
+
name
|
|
2690
|
+
documents {
|
|
2691
|
+
nodes {
|
|
2692
|
+
id
|
|
2693
|
+
title
|
|
2694
|
+
url
|
|
2695
|
+
createdAt
|
|
2696
|
+
updatedAt
|
|
2697
|
+
}
|
|
2698
|
+
}
|
|
2699
|
+
}
|
|
2700
|
+
}
|
|
2701
|
+
"""
|
|
2702
|
+
|
|
2703
|
+
try:
|
|
2704
|
+
result = await self.client.execute_query(
|
|
2705
|
+
query, {"projectId": project_uuid}
|
|
2706
|
+
)
|
|
2707
|
+
|
|
2708
|
+
if not result.get("project"):
|
|
2709
|
+
logger.warning(f"Project {ticket_id} not found")
|
|
2710
|
+
return []
|
|
2711
|
+
|
|
2712
|
+
documents_data = result["project"].get("documents", {}).get("nodes", [])
|
|
2713
|
+
|
|
2714
|
+
# Map documents to Attachment objects
|
|
2715
|
+
return [
|
|
2716
|
+
map_linear_attachment_to_attachment(doc, ticket_id)
|
|
2717
|
+
for doc in documents_data
|
|
2718
|
+
]
|
|
2719
|
+
|
|
2720
|
+
except Exception as e:
|
|
2721
|
+
logger.error(f"Failed to get attachments for project {ticket_id}: {e}")
|
|
2722
|
+
return []
|
|
2723
|
+
|
|
2724
|
+
# Not found as either issue or project
|
|
2725
|
+
logger.warning(f"Ticket {ticket_id} not found as issue or project")
|
|
2726
|
+
return []
|
|
2727
|
+
|
|
2728
|
+
async def list_cycles(
|
|
2729
|
+
self, team_id: str | None = None, limit: int = 50
|
|
2730
|
+
) -> builtins.list[dict[str, Any]]:
|
|
2731
|
+
"""List Linear Cycles (Sprints) for the team.
|
|
2732
|
+
|
|
2733
|
+
Args:
|
|
2734
|
+
----
|
|
2735
|
+
team_id: Linear team UUID. If None, uses the configured team.
|
|
2736
|
+
limit: Maximum number of cycles to return (default: 50)
|
|
2737
|
+
|
|
2738
|
+
Returns:
|
|
2739
|
+
-------
|
|
2740
|
+
List of cycle dictionaries with fields:
|
|
2741
|
+
- id: Cycle UUID
|
|
2742
|
+
- name: Cycle name
|
|
2743
|
+
- number: Cycle number
|
|
2744
|
+
- startsAt: Start date (ISO format)
|
|
2745
|
+
- endsAt: End date (ISO format)
|
|
2746
|
+
- completedAt: Completion date (ISO format, None if not completed)
|
|
2747
|
+
- progress: Progress percentage (0-1)
|
|
2748
|
+
|
|
2749
|
+
Raises:
|
|
2750
|
+
------
|
|
2751
|
+
ValueError: If credentials are invalid or query fails
|
|
2752
|
+
|
|
2753
|
+
"""
|
|
2754
|
+
# Validate credentials
|
|
2755
|
+
is_valid, error_message = self.validate_credentials()
|
|
2756
|
+
if not is_valid:
|
|
2757
|
+
raise ValueError(error_message)
|
|
2758
|
+
|
|
2759
|
+
await self.initialize()
|
|
2760
|
+
|
|
2761
|
+
# Use configured team if not specified
|
|
2762
|
+
if team_id is None:
|
|
2763
|
+
team_id = await self._ensure_team_id()
|
|
2764
|
+
|
|
2765
|
+
try:
|
|
2766
|
+
# Fetch all cycles with pagination
|
|
2767
|
+
all_cycles: list[dict[str, Any]] = []
|
|
2768
|
+
has_next_page = True
|
|
2769
|
+
after_cursor = None
|
|
2770
|
+
|
|
2771
|
+
while has_next_page and len(all_cycles) < limit:
|
|
2772
|
+
# Calculate remaining items needed
|
|
2773
|
+
remaining = limit - len(all_cycles)
|
|
2774
|
+
page_size = min(remaining, 50) # Linear max page size is typically 50
|
|
2775
|
+
|
|
2776
|
+
variables = {"teamId": team_id, "first": page_size}
|
|
2777
|
+
if after_cursor:
|
|
2778
|
+
variables["after"] = after_cursor
|
|
2779
|
+
|
|
2780
|
+
result = await self.client.execute_query(LIST_CYCLES_QUERY, variables)
|
|
2781
|
+
|
|
2782
|
+
cycles_data = result.get("team", {}).get("cycles", {})
|
|
2783
|
+
page_cycles = cycles_data.get("nodes", [])
|
|
2784
|
+
page_info = cycles_data.get("pageInfo", {})
|
|
2785
|
+
|
|
2786
|
+
all_cycles.extend(page_cycles)
|
|
2787
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
2788
|
+
after_cursor = page_info.get("endCursor")
|
|
2789
|
+
|
|
2790
|
+
return all_cycles[:limit] # Ensure we don't exceed limit
|
|
2791
|
+
|
|
2792
|
+
except Exception as e:
|
|
2793
|
+
raise ValueError(f"Failed to list Linear cycles: {e}") from e
|
|
2794
|
+
|
|
2795
|
+
async def get_issue_status(self, issue_id: str) -> dict[str, Any] | None:
|
|
2796
|
+
"""Get rich issue status information for a Linear issue.
|
|
2797
|
+
|
|
2798
|
+
Args:
|
|
2799
|
+
----
|
|
2800
|
+
issue_id: Linear issue identifier (e.g., 'BTA-123') or UUID
|
|
2801
|
+
|
|
2802
|
+
Returns:
|
|
2803
|
+
-------
|
|
2804
|
+
Dictionary with workflow state details:
|
|
2805
|
+
- id: State UUID
|
|
2806
|
+
- name: State name (e.g., "In Progress")
|
|
2807
|
+
- type: State type (e.g., "started", "completed")
|
|
2808
|
+
- color: State color (hex format)
|
|
2809
|
+
- description: State description
|
|
2810
|
+
- position: Position in workflow
|
|
2811
|
+
Returns None if issue not found.
|
|
2812
|
+
|
|
2813
|
+
Raises:
|
|
2814
|
+
------
|
|
2815
|
+
ValueError: If credentials are invalid or query fails
|
|
2816
|
+
|
|
2817
|
+
"""
|
|
2818
|
+
# Validate credentials
|
|
2819
|
+
is_valid, error_message = self.validate_credentials()
|
|
2820
|
+
if not is_valid:
|
|
2821
|
+
raise ValueError(error_message)
|
|
2822
|
+
|
|
2823
|
+
await self.initialize()
|
|
2824
|
+
|
|
2825
|
+
# Resolve issue identifier to UUID if needed
|
|
2826
|
+
issue_uuid = await self._resolve_issue_id(issue_id)
|
|
2827
|
+
if not issue_uuid:
|
|
2828
|
+
return None
|
|
2829
|
+
|
|
2830
|
+
try:
|
|
2831
|
+
result = await self.client.execute_query(
|
|
2832
|
+
GET_ISSUE_STATUS_QUERY, {"issueId": issue_uuid}
|
|
2833
|
+
)
|
|
2834
|
+
|
|
2835
|
+
issue_data = result.get("issue")
|
|
2836
|
+
if not issue_data:
|
|
2837
|
+
return None
|
|
2838
|
+
|
|
2839
|
+
return issue_data.get("state")
|
|
2840
|
+
|
|
2841
|
+
except Exception as e:
|
|
2842
|
+
raise ValueError(f"Failed to get issue status for '{issue_id}': {e}") from e
|
|
2843
|
+
|
|
2844
|
+
async def list_issue_statuses(
|
|
2845
|
+
self, team_id: str | None = None
|
|
2846
|
+
) -> builtins.list[dict[str, Any]]:
|
|
2847
|
+
"""List all workflow states for the team.
|
|
2848
|
+
|
|
2849
|
+
Args:
|
|
2850
|
+
----
|
|
2851
|
+
team_id: Linear team UUID. If None, uses the configured team.
|
|
2852
|
+
|
|
2853
|
+
Returns:
|
|
2854
|
+
-------
|
|
2855
|
+
List of workflow state dictionaries with fields:
|
|
2856
|
+
- id: State UUID
|
|
2857
|
+
- name: State name (e.g., "Backlog", "In Progress", "Done")
|
|
2858
|
+
- type: State type (e.g., "backlog", "unstarted", "started", "completed", "canceled")
|
|
2859
|
+
- color: State color (hex format)
|
|
2860
|
+
- description: State description
|
|
2861
|
+
- position: Position in workflow (lower = earlier)
|
|
2862
|
+
|
|
2863
|
+
Raises:
|
|
2864
|
+
------
|
|
2865
|
+
ValueError: If credentials are invalid or query fails
|
|
2866
|
+
|
|
2867
|
+
"""
|
|
2868
|
+
# Validate credentials
|
|
2869
|
+
is_valid, error_message = self.validate_credentials()
|
|
2870
|
+
if not is_valid:
|
|
2871
|
+
raise ValueError(error_message)
|
|
2872
|
+
|
|
2873
|
+
await self.initialize()
|
|
2874
|
+
|
|
2875
|
+
# Use configured team if not specified
|
|
2876
|
+
if team_id is None:
|
|
2877
|
+
team_id = await self._ensure_team_id()
|
|
2878
|
+
|
|
2879
|
+
try:
|
|
2880
|
+
result = await self.client.execute_query(
|
|
2881
|
+
LIST_ISSUE_STATUSES_QUERY, {"teamId": team_id}
|
|
2882
|
+
)
|
|
2883
|
+
|
|
2884
|
+
states_data = result.get("team", {}).get("states", {})
|
|
2885
|
+
states = states_data.get("nodes", [])
|
|
2886
|
+
|
|
2887
|
+
# Sort by position to maintain workflow order
|
|
2888
|
+
states.sort(key=lambda s: s.get("position", 0))
|
|
2889
|
+
|
|
2890
|
+
return states
|
|
2891
|
+
|
|
2892
|
+
except Exception as e:
|
|
2893
|
+
raise ValueError(f"Failed to list workflow states: {e}") from e
|
|
2894
|
+
|
|
2895
|
+
async def list_epics(
|
|
2896
|
+
self,
|
|
2897
|
+
limit: int = 50,
|
|
2898
|
+
offset: int = 0,
|
|
2899
|
+
state: str | None = None,
|
|
2900
|
+
include_completed: bool = True,
|
|
2901
|
+
**kwargs: Any,
|
|
2902
|
+
) -> builtins.list[Epic]:
|
|
2903
|
+
"""List Linear projects (epics) with efficient pagination.
|
|
2904
|
+
|
|
2905
|
+
Args:
|
|
2906
|
+
----
|
|
2907
|
+
limit: Maximum number of projects to return (default: 50)
|
|
2908
|
+
offset: Number of projects to skip (note: Linear uses cursor-based pagination)
|
|
2909
|
+
state: Filter by project state (e.g., "planned", "started", "completed", "canceled")
|
|
2910
|
+
include_completed: Whether to include completed projects (default: True)
|
|
2911
|
+
**kwargs: Additional filter parameters (reserved for future use)
|
|
2912
|
+
|
|
2913
|
+
Returns:
|
|
2914
|
+
-------
|
|
2915
|
+
List of Epic objects mapped from Linear projects
|
|
2916
|
+
|
|
2917
|
+
Raises:
|
|
2918
|
+
------
|
|
2919
|
+
ValueError: If credentials are invalid or query fails
|
|
2920
|
+
|
|
2921
|
+
"""
|
|
2922
|
+
# Validate credentials
|
|
2923
|
+
is_valid, error_message = self.validate_credentials()
|
|
2924
|
+
if not is_valid:
|
|
2925
|
+
raise ValueError(error_message)
|
|
2926
|
+
|
|
2927
|
+
await self.initialize()
|
|
2928
|
+
team_id = await self._ensure_team_id()
|
|
2929
|
+
|
|
2930
|
+
# Build project filter using existing helper
|
|
2931
|
+
from .types import build_project_filter
|
|
2932
|
+
|
|
2933
|
+
project_filter = build_project_filter(
|
|
2934
|
+
state=state,
|
|
2935
|
+
team_id=team_id,
|
|
2936
|
+
include_completed=include_completed,
|
|
2937
|
+
)
|
|
2938
|
+
|
|
2939
|
+
try:
|
|
2940
|
+
# Fetch projects with pagination
|
|
2941
|
+
all_projects = []
|
|
2942
|
+
has_next_page = True
|
|
2943
|
+
after_cursor = None
|
|
2944
|
+
projects_fetched = 0
|
|
2945
|
+
|
|
2946
|
+
while has_next_page and projects_fetched < limit + offset:
|
|
2947
|
+
# Calculate how many more we need
|
|
2948
|
+
remaining = (limit + offset) - projects_fetched
|
|
2949
|
+
page_size = min(remaining, 50) # Linear max page size is typically 50
|
|
2950
|
+
|
|
2951
|
+
variables = {"filter": project_filter, "first": page_size}
|
|
2952
|
+
if after_cursor:
|
|
2953
|
+
variables["after"] = after_cursor
|
|
2954
|
+
|
|
2955
|
+
result = await self.client.execute_query(LIST_PROJECTS_QUERY, variables)
|
|
2956
|
+
|
|
2957
|
+
projects_data = result.get("projects", {})
|
|
2958
|
+
page_projects = projects_data.get("nodes", [])
|
|
2959
|
+
page_info = projects_data.get("pageInfo", {})
|
|
2960
|
+
|
|
2961
|
+
all_projects.extend(page_projects)
|
|
2962
|
+
projects_fetched += len(page_projects)
|
|
2963
|
+
|
|
2964
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
2965
|
+
after_cursor = page_info.get("endCursor")
|
|
2966
|
+
|
|
2967
|
+
# Stop if no more results on this page
|
|
2968
|
+
if not page_projects:
|
|
2969
|
+
break
|
|
2970
|
+
|
|
2971
|
+
# Apply offset and limit
|
|
2972
|
+
paginated_projects = all_projects[offset : offset + limit]
|
|
2973
|
+
|
|
2974
|
+
# Map Linear projects to Epic objects using existing mapper
|
|
2975
|
+
epics = []
|
|
2976
|
+
for project in paginated_projects:
|
|
2977
|
+
epics.append(map_linear_project_to_epic(project))
|
|
2978
|
+
|
|
2979
|
+
return epics
|
|
2980
|
+
|
|
2981
|
+
except Exception as e:
|
|
2982
|
+
raise ValueError(f"Failed to list Linear projects: {e}") from e
|
|
2983
|
+
|
|
2984
|
+
def _linear_update_to_model(self, linear_data: dict[str, Any]) -> ProjectUpdate:
|
|
2985
|
+
"""Convert Linear GraphQL response to ProjectUpdate model (1M-238).
|
|
2986
|
+
|
|
2987
|
+
Maps Linear's ProjectUpdate entity fields to the universal ProjectUpdate model,
|
|
2988
|
+
handling health value transformations and optional fields.
|
|
2989
|
+
|
|
2990
|
+
Args:
|
|
2991
|
+
----
|
|
2992
|
+
linear_data: GraphQL response data for a ProjectUpdate entity
|
|
2993
|
+
|
|
2994
|
+
Returns:
|
|
2995
|
+
-------
|
|
2996
|
+
ProjectUpdate instance with mapped fields
|
|
2997
|
+
|
|
2998
|
+
Linear Health Mapping:
|
|
2999
|
+
---------------------
|
|
3000
|
+
Linear uses camelCase enum values: onTrack, atRisk, offTrack
|
|
3001
|
+
Universal model uses snake_case: ON_TRACK, AT_RISK, OFF_TRACK
|
|
3002
|
+
|
|
3003
|
+
"""
|
|
3004
|
+
# Map Linear health values (camelCase) to universal enum (UPPER_SNAKE_CASE)
|
|
3005
|
+
health_mapping = {
|
|
3006
|
+
"onTrack": ProjectUpdateHealth.ON_TRACK,
|
|
3007
|
+
"atRisk": ProjectUpdateHealth.AT_RISK,
|
|
3008
|
+
"offTrack": ProjectUpdateHealth.OFF_TRACK,
|
|
3009
|
+
}
|
|
3010
|
+
|
|
3011
|
+
health_value = linear_data.get("health")
|
|
3012
|
+
health = health_mapping.get(health_value) if health_value else None
|
|
3013
|
+
|
|
3014
|
+
# Extract user info
|
|
3015
|
+
user_data = linear_data.get("user", {})
|
|
3016
|
+
author_id = user_data.get("id") if user_data else None
|
|
3017
|
+
author_name = user_data.get("name") if user_data else None
|
|
3018
|
+
|
|
3019
|
+
# Extract project info
|
|
3020
|
+
project_data = linear_data.get("project", {})
|
|
3021
|
+
project_id = project_data.get("id", "")
|
|
3022
|
+
project_name = project_data.get("name")
|
|
3023
|
+
|
|
3024
|
+
# Parse timestamps
|
|
3025
|
+
created_at = datetime.fromisoformat(
|
|
3026
|
+
linear_data["createdAt"].replace("Z", "+00:00")
|
|
3027
|
+
)
|
|
3028
|
+
updated_at = None
|
|
3029
|
+
if linear_data.get("updatedAt"):
|
|
3030
|
+
updated_at = datetime.fromisoformat(
|
|
3031
|
+
linear_data["updatedAt"].replace("Z", "+00:00")
|
|
3032
|
+
)
|
|
3033
|
+
|
|
3034
|
+
return ProjectUpdate(
|
|
3035
|
+
id=linear_data["id"],
|
|
3036
|
+
project_id=project_id,
|
|
3037
|
+
project_name=project_name,
|
|
3038
|
+
body=linear_data["body"],
|
|
3039
|
+
health=health,
|
|
3040
|
+
created_at=created_at,
|
|
3041
|
+
updated_at=updated_at,
|
|
3042
|
+
author_id=author_id,
|
|
3043
|
+
author_name=author_name,
|
|
3044
|
+
url=linear_data.get("url"),
|
|
3045
|
+
diff_markdown=linear_data.get("diffMarkdown"),
|
|
3046
|
+
)
|
|
3047
|
+
|
|
3048
|
+
async def create_project_update(
|
|
3049
|
+
self,
|
|
3050
|
+
project_id: str,
|
|
3051
|
+
body: str,
|
|
3052
|
+
health: ProjectUpdateHealth | None = None,
|
|
3053
|
+
) -> ProjectUpdate:
|
|
3054
|
+
"""Create a project status update in Linear (1M-238).
|
|
3055
|
+
|
|
3056
|
+
Creates a new status update for a Linear project with optional health indicator.
|
|
3057
|
+
Linear will automatically generate a diff showing changes since the last update.
|
|
3058
|
+
|
|
3059
|
+
Args:
|
|
3060
|
+
----
|
|
3061
|
+
project_id: Linear project UUID, slugId, or short ID
|
|
3062
|
+
body: Markdown-formatted update content (required)
|
|
3063
|
+
health: Optional health status (ON_TRACK, AT_RISK, OFF_TRACK)
|
|
3064
|
+
|
|
3065
|
+
Returns:
|
|
3066
|
+
-------
|
|
3067
|
+
Created ProjectUpdate with Linear metadata including auto-generated diff
|
|
3068
|
+
|
|
3069
|
+
Raises:
|
|
3070
|
+
------
|
|
3071
|
+
ValueError: If credentials invalid, project not found, or creation fails
|
|
3072
|
+
|
|
3073
|
+
Example:
|
|
3074
|
+
-------
|
|
3075
|
+
>>> update = await adapter.create_project_update(
|
|
3076
|
+
... project_id="PROJ-123",
|
|
3077
|
+
... body="Sprint 23 completed. 15/20 stories done.",
|
|
3078
|
+
... health=ProjectUpdateHealth.AT_RISK
|
|
3079
|
+
... )
|
|
3080
|
+
|
|
3081
|
+
"""
|
|
3082
|
+
logger = logging.getLogger(__name__)
|
|
3083
|
+
|
|
3084
|
+
# Validate credentials
|
|
3085
|
+
is_valid, error_message = self.validate_credentials()
|
|
3086
|
+
if not is_valid:
|
|
3087
|
+
raise ValueError(error_message)
|
|
3088
|
+
|
|
3089
|
+
await self.initialize()
|
|
3090
|
+
|
|
3091
|
+
# Resolve project identifier to UUID if needed
|
|
3092
|
+
project_uuid = await self._resolve_project_id(project_id)
|
|
3093
|
+
if not project_uuid:
|
|
3094
|
+
raise ValueError(f"Project '{project_id}' not found")
|
|
3095
|
+
|
|
3096
|
+
# Build mutation variables
|
|
3097
|
+
variables: dict[str, Any] = {
|
|
3098
|
+
"projectId": project_uuid,
|
|
3099
|
+
"body": body,
|
|
3100
|
+
}
|
|
3101
|
+
|
|
3102
|
+
# Map health enum to Linear's camelCase format
|
|
3103
|
+
if health:
|
|
3104
|
+
health_mapping = {
|
|
3105
|
+
ProjectUpdateHealth.ON_TRACK: "onTrack",
|
|
3106
|
+
ProjectUpdateHealth.AT_RISK: "atRisk",
|
|
3107
|
+
ProjectUpdateHealth.OFF_TRACK: "offTrack",
|
|
3108
|
+
}
|
|
3109
|
+
variables["health"] = health_mapping.get(health)
|
|
3110
|
+
|
|
3111
|
+
try:
|
|
3112
|
+
result = await self.client.execute_mutation(
|
|
3113
|
+
CREATE_PROJECT_UPDATE_MUTATION, variables
|
|
3114
|
+
)
|
|
3115
|
+
|
|
3116
|
+
if not result["projectUpdateCreate"]["success"]:
|
|
3117
|
+
raise ValueError(f"Failed to create project update for '{project_id}'")
|
|
3118
|
+
|
|
3119
|
+
update_data = result["projectUpdateCreate"]["projectUpdate"]
|
|
3120
|
+
logger.info(
|
|
3121
|
+
f"Created project update for project '{project_id}' (UUID: {project_uuid})"
|
|
3122
|
+
)
|
|
3123
|
+
|
|
3124
|
+
return self._linear_update_to_model(update_data)
|
|
3125
|
+
|
|
3126
|
+
except Exception as e:
|
|
3127
|
+
raise ValueError(
|
|
3128
|
+
f"Failed to create project update for '{project_id}': {e}"
|
|
3129
|
+
) from e
|
|
3130
|
+
|
|
3131
|
+
async def list_project_updates(
|
|
3132
|
+
self,
|
|
3133
|
+
project_id: str,
|
|
3134
|
+
limit: int = 10,
|
|
3135
|
+
) -> list[ProjectUpdate]:
|
|
3136
|
+
"""List project updates for a project (1M-238).
|
|
3137
|
+
|
|
3138
|
+
Retrieves recent status updates for a Linear project, ordered by creation date.
|
|
3139
|
+
|
|
3140
|
+
Args:
|
|
3141
|
+
----
|
|
3142
|
+
project_id: Linear project UUID, slugId, or short ID
|
|
3143
|
+
limit: Maximum number of updates to return (default: 10, max: 250)
|
|
3144
|
+
|
|
3145
|
+
Returns:
|
|
3146
|
+
-------
|
|
3147
|
+
List of ProjectUpdate objects ordered by creation date (newest first)
|
|
3148
|
+
|
|
3149
|
+
Raises:
|
|
3150
|
+
------
|
|
3151
|
+
ValueError: If credentials invalid or query fails
|
|
3152
|
+
|
|
3153
|
+
Example:
|
|
3154
|
+
-------
|
|
3155
|
+
>>> updates = await adapter.list_project_updates("PROJ-123", limit=5)
|
|
3156
|
+
>>> for update in updates:
|
|
3157
|
+
... print(f"{update.created_at}: {update.health} - {update.body[:50]}")
|
|
3158
|
+
|
|
3159
|
+
"""
|
|
3160
|
+
logger = logging.getLogger(__name__)
|
|
3161
|
+
|
|
3162
|
+
# Validate credentials
|
|
3163
|
+
is_valid, error_message = self.validate_credentials()
|
|
3164
|
+
if not is_valid:
|
|
3165
|
+
raise ValueError(error_message)
|
|
3166
|
+
|
|
3167
|
+
await self.initialize()
|
|
3168
|
+
|
|
3169
|
+
# Resolve project identifier to UUID if needed
|
|
3170
|
+
project_uuid = await self._resolve_project_id(project_id)
|
|
3171
|
+
if not project_uuid:
|
|
3172
|
+
raise ValueError(f"Project '{project_id}' not found")
|
|
3173
|
+
|
|
3174
|
+
try:
|
|
3175
|
+
result = await self.client.execute_query(
|
|
3176
|
+
LIST_PROJECT_UPDATES_QUERY,
|
|
3177
|
+
{"projectId": project_uuid, "first": min(limit, 250)},
|
|
3178
|
+
)
|
|
3179
|
+
|
|
3180
|
+
project_data = result.get("project")
|
|
3181
|
+
if not project_data:
|
|
3182
|
+
raise ValueError(f"Project '{project_id}' not found")
|
|
3183
|
+
|
|
3184
|
+
updates_data = project_data.get("projectUpdates", {}).get("nodes", [])
|
|
3185
|
+
|
|
3186
|
+
# Map Linear updates to ProjectUpdate models
|
|
3187
|
+
return [self._linear_update_to_model(update) for update in updates_data]
|
|
3188
|
+
|
|
3189
|
+
except Exception as e:
|
|
3190
|
+
logger.warning(f"Failed to list project updates for {project_id}: {e}")
|
|
3191
|
+
raise ValueError(
|
|
3192
|
+
f"Failed to list project updates for '{project_id}': {e}"
|
|
3193
|
+
) from e
|
|
3194
|
+
|
|
3195
|
+
async def get_project_update(
|
|
3196
|
+
self,
|
|
3197
|
+
update_id: str,
|
|
3198
|
+
) -> ProjectUpdate:
|
|
3199
|
+
"""Get a specific project update by ID (1M-238).
|
|
3200
|
+
|
|
3201
|
+
Retrieves detailed information about a single project status update.
|
|
3202
|
+
|
|
3203
|
+
Args:
|
|
3204
|
+
----
|
|
3205
|
+
update_id: Linear ProjectUpdate UUID
|
|
3206
|
+
|
|
3207
|
+
Returns:
|
|
3208
|
+
-------
|
|
3209
|
+
ProjectUpdate object with full details
|
|
3210
|
+
|
|
3211
|
+
Raises:
|
|
3212
|
+
------
|
|
3213
|
+
ValueError: If credentials invalid, update not found, or query fails
|
|
3214
|
+
|
|
3215
|
+
Example:
|
|
3216
|
+
-------
|
|
3217
|
+
>>> update = await adapter.get_project_update("update-uuid-here")
|
|
3218
|
+
>>> print(f"Update: {update.body}")
|
|
3219
|
+
>>> print(f"Health: {update.health}")
|
|
3220
|
+
>>> print(f"Diff: {update.diff_markdown}")
|
|
3221
|
+
|
|
3222
|
+
"""
|
|
3223
|
+
logger = logging.getLogger(__name__)
|
|
3224
|
+
|
|
3225
|
+
# Validate credentials
|
|
3226
|
+
is_valid, error_message = self.validate_credentials()
|
|
3227
|
+
if not is_valid:
|
|
3228
|
+
raise ValueError(error_message)
|
|
3229
|
+
|
|
3230
|
+
await self.initialize()
|
|
3231
|
+
|
|
3232
|
+
try:
|
|
3233
|
+
result = await self.client.execute_query(
|
|
3234
|
+
GET_PROJECT_UPDATE_QUERY, {"id": update_id}
|
|
3235
|
+
)
|
|
3236
|
+
|
|
3237
|
+
update_data = result.get("projectUpdate")
|
|
3238
|
+
if not update_data:
|
|
3239
|
+
raise ValueError(f"Project update '{update_id}' not found")
|
|
3240
|
+
|
|
3241
|
+
return self._linear_update_to_model(update_data)
|
|
3242
|
+
|
|
3243
|
+
except Exception as e:
|
|
3244
|
+
logger.error(f"Failed to get project update {update_id}: {e}")
|
|
3245
|
+
raise ValueError(f"Failed to get project update '{update_id}': {e}") from e
|
|
3246
|
+
|
|
1062
3247
|
async def close(self) -> None:
|
|
1063
3248
|
"""Close the adapter and clean up resources."""
|
|
1064
3249
|
await self.client.close()
|