mcp-ticketer 0.1.30__py3-none-any.whl → 1.2.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-ticketer might be problematic. Click here for more details.

Files changed (109) hide show
  1. mcp_ticketer/__init__.py +10 -10
  2. mcp_ticketer/__version__.py +3 -3
  3. mcp_ticketer/adapters/__init__.py +2 -0
  4. mcp_ticketer/adapters/aitrackdown.py +796 -46
  5. mcp_ticketer/adapters/asana/__init__.py +15 -0
  6. mcp_ticketer/adapters/asana/adapter.py +1416 -0
  7. mcp_ticketer/adapters/asana/client.py +292 -0
  8. mcp_ticketer/adapters/asana/mappers.py +348 -0
  9. mcp_ticketer/adapters/asana/types.py +146 -0
  10. mcp_ticketer/adapters/github.py +879 -129
  11. mcp_ticketer/adapters/hybrid.py +11 -11
  12. mcp_ticketer/adapters/jira.py +973 -73
  13. mcp_ticketer/adapters/linear/__init__.py +24 -0
  14. mcp_ticketer/adapters/linear/adapter.py +2732 -0
  15. mcp_ticketer/adapters/linear/client.py +344 -0
  16. mcp_ticketer/adapters/linear/mappers.py +420 -0
  17. mcp_ticketer/adapters/linear/queries.py +479 -0
  18. mcp_ticketer/adapters/linear/types.py +360 -0
  19. mcp_ticketer/adapters/linear.py +10 -2315
  20. mcp_ticketer/analysis/__init__.py +23 -0
  21. mcp_ticketer/analysis/orphaned.py +218 -0
  22. mcp_ticketer/analysis/similarity.py +224 -0
  23. mcp_ticketer/analysis/staleness.py +266 -0
  24. mcp_ticketer/cache/memory.py +9 -8
  25. mcp_ticketer/cli/adapter_diagnostics.py +421 -0
  26. mcp_ticketer/cli/auggie_configure.py +116 -15
  27. mcp_ticketer/cli/codex_configure.py +274 -82
  28. mcp_ticketer/cli/configure.py +888 -151
  29. mcp_ticketer/cli/diagnostics.py +400 -157
  30. mcp_ticketer/cli/discover.py +297 -26
  31. mcp_ticketer/cli/gemini_configure.py +119 -26
  32. mcp_ticketer/cli/init_command.py +880 -0
  33. mcp_ticketer/cli/instruction_commands.py +435 -0
  34. mcp_ticketer/cli/linear_commands.py +616 -0
  35. mcp_ticketer/cli/main.py +203 -1165
  36. mcp_ticketer/cli/mcp_configure.py +474 -90
  37. mcp_ticketer/cli/mcp_server_commands.py +415 -0
  38. mcp_ticketer/cli/migrate_config.py +12 -8
  39. mcp_ticketer/cli/platform_commands.py +123 -0
  40. mcp_ticketer/cli/platform_detection.py +418 -0
  41. mcp_ticketer/cli/platform_installer.py +513 -0
  42. mcp_ticketer/cli/python_detection.py +126 -0
  43. mcp_ticketer/cli/queue_commands.py +15 -15
  44. mcp_ticketer/cli/setup_command.py +639 -0
  45. mcp_ticketer/cli/simple_health.py +90 -65
  46. mcp_ticketer/cli/ticket_commands.py +1013 -0
  47. mcp_ticketer/cli/update_checker.py +313 -0
  48. mcp_ticketer/cli/utils.py +114 -66
  49. mcp_ticketer/core/__init__.py +24 -1
  50. mcp_ticketer/core/adapter.py +250 -16
  51. mcp_ticketer/core/config.py +145 -37
  52. mcp_ticketer/core/env_discovery.py +101 -22
  53. mcp_ticketer/core/env_loader.py +349 -0
  54. mcp_ticketer/core/exceptions.py +160 -0
  55. mcp_ticketer/core/http_client.py +26 -26
  56. mcp_ticketer/core/instructions.py +405 -0
  57. mcp_ticketer/core/label_manager.py +732 -0
  58. mcp_ticketer/core/mappers.py +42 -30
  59. mcp_ticketer/core/models.py +280 -28
  60. mcp_ticketer/core/onepassword_secrets.py +379 -0
  61. mcp_ticketer/core/project_config.py +183 -49
  62. mcp_ticketer/core/registry.py +3 -3
  63. mcp_ticketer/core/session_state.py +171 -0
  64. mcp_ticketer/core/state_matcher.py +592 -0
  65. mcp_ticketer/core/url_parser.py +425 -0
  66. mcp_ticketer/core/validators.py +69 -0
  67. mcp_ticketer/defaults/ticket_instructions.md +644 -0
  68. mcp_ticketer/mcp/__init__.py +29 -1
  69. mcp_ticketer/mcp/__main__.py +60 -0
  70. mcp_ticketer/mcp/server/__init__.py +25 -0
  71. mcp_ticketer/mcp/server/__main__.py +60 -0
  72. mcp_ticketer/mcp/server/constants.py +58 -0
  73. mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
  74. mcp_ticketer/mcp/server/dto.py +195 -0
  75. mcp_ticketer/mcp/server/main.py +1343 -0
  76. mcp_ticketer/mcp/server/response_builder.py +206 -0
  77. mcp_ticketer/mcp/server/routing.py +655 -0
  78. mcp_ticketer/mcp/server/server_sdk.py +151 -0
  79. mcp_ticketer/mcp/server/tools/__init__.py +56 -0
  80. mcp_ticketer/mcp/server/tools/analysis_tools.py +495 -0
  81. mcp_ticketer/mcp/server/tools/attachment_tools.py +226 -0
  82. mcp_ticketer/mcp/server/tools/bulk_tools.py +273 -0
  83. mcp_ticketer/mcp/server/tools/comment_tools.py +152 -0
  84. mcp_ticketer/mcp/server/tools/config_tools.py +1439 -0
  85. mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
  86. mcp_ticketer/mcp/server/tools/hierarchy_tools.py +921 -0
  87. mcp_ticketer/mcp/server/tools/instruction_tools.py +300 -0
  88. mcp_ticketer/mcp/server/tools/label_tools.py +948 -0
  89. mcp_ticketer/mcp/server/tools/pr_tools.py +152 -0
  90. mcp_ticketer/mcp/server/tools/search_tools.py +215 -0
  91. mcp_ticketer/mcp/server/tools/session_tools.py +170 -0
  92. mcp_ticketer/mcp/server/tools/ticket_tools.py +1268 -0
  93. mcp_ticketer/mcp/server/tools/user_ticket_tools.py +547 -0
  94. mcp_ticketer/queue/__init__.py +1 -0
  95. mcp_ticketer/queue/health_monitor.py +168 -136
  96. mcp_ticketer/queue/manager.py +95 -25
  97. mcp_ticketer/queue/queue.py +40 -21
  98. mcp_ticketer/queue/run_worker.py +6 -1
  99. mcp_ticketer/queue/ticket_registry.py +213 -155
  100. mcp_ticketer/queue/worker.py +109 -49
  101. mcp_ticketer-1.2.11.dist-info/METADATA +792 -0
  102. mcp_ticketer-1.2.11.dist-info/RECORD +110 -0
  103. mcp_ticketer/mcp/server.py +0 -1895
  104. mcp_ticketer-0.1.30.dist-info/METADATA +0 -413
  105. mcp_ticketer-0.1.30.dist-info/RECORD +0 -49
  106. {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/WHEEL +0 -0
  107. {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/entry_points.txt +0 -0
  108. {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/licenses/LICENSE +0 -0
  109. {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,2732 @@
1
+ """Main LinearAdapter class for Linear API integration."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import asyncio
6
+ import logging
7
+ import mimetypes
8
+ import os
9
+ from pathlib import Path
10
+ from typing import Any
11
+
12
+ try:
13
+ import httpx
14
+ from gql import gql
15
+ from gql.transport.exceptions import TransportQueryError
16
+ except ImportError:
17
+ gql = None
18
+ TransportQueryError = Exception
19
+ httpx = None
20
+
21
+ import builtins
22
+
23
+ from ...core.adapter import BaseAdapter
24
+ from ...core.models import Attachment, Comment, Epic, SearchQuery, Task, TicketState
25
+ from ...core.registry import AdapterRegistry
26
+ from ...core.url_parser import URLParserError, normalize_project_id
27
+ from .client import LinearGraphQLClient
28
+ from .mappers import (
29
+ build_linear_issue_input,
30
+ build_linear_issue_update_input,
31
+ map_linear_attachment_to_attachment,
32
+ map_linear_comment_to_comment,
33
+ map_linear_issue_to_task,
34
+ map_linear_project_to_epic,
35
+ )
36
+ from .queries import (
37
+ ALL_FRAGMENTS,
38
+ CREATE_ISSUE_MUTATION,
39
+ CREATE_LABEL_MUTATION,
40
+ GET_CUSTOM_VIEW_QUERY,
41
+ GET_ISSUE_STATUS_QUERY,
42
+ LIST_CYCLES_QUERY,
43
+ LIST_ISSUE_STATUSES_QUERY,
44
+ LIST_ISSUES_QUERY,
45
+ LIST_PROJECTS_QUERY,
46
+ SEARCH_ISSUES_QUERY,
47
+ UPDATE_ISSUE_MUTATION,
48
+ WORKFLOW_STATES_QUERY,
49
+ )
50
+ from .types import (
51
+ LinearStateMapping,
52
+ build_issue_filter,
53
+ get_linear_priority,
54
+ get_linear_state_type,
55
+ )
56
+
57
+
58
+ class LinearAdapter(BaseAdapter[Task]):
59
+ """Adapter for Linear issue tracking system using native GraphQL API.
60
+
61
+ This adapter provides comprehensive integration with Linear's GraphQL API,
62
+ supporting all major ticket management operations including:
63
+
64
+ - CRUD operations for issues and projects
65
+ - State transitions and workflow management
66
+ - User assignment and search functionality
67
+ - Comment management
68
+ - Epic/Issue/Task hierarchy support
69
+
70
+ The adapter is organized into multiple modules for better maintainability:
71
+ - client.py: GraphQL client management
72
+ - queries.py: GraphQL queries and fragments
73
+ - types.py: Linear-specific types and mappings
74
+ - mappers.py: Data transformation logic
75
+ """
76
+
77
+ def __init__(self, config: dict[str, Any]):
78
+ """Initialize Linear adapter.
79
+
80
+ Args:
81
+ ----
82
+ config: Configuration with:
83
+ - api_key: Linear API key (or LINEAR_API_KEY env var)
84
+ - workspace: Linear workspace name (optional, for documentation)
85
+ - team_key: Linear team key (e.g., 'BTA') OR
86
+ - team_id: Linear team UUID (e.g., '02d15669-7351-4451-9719-807576c16049')
87
+ - api_url: Optional Linear API URL (defaults to https://api.linear.app/graphql)
88
+
89
+ Raises:
90
+ ------
91
+ ValueError: If required configuration is missing
92
+
93
+ """
94
+ # Initialize instance variables before calling super().__init__
95
+ # because parent constructor calls _get_state_mapping()
96
+ self._team_data: dict[str, Any] | None = None
97
+ self._workflow_states: dict[str, dict[str, Any]] | None = None
98
+ self._labels_cache: list[dict[str, Any]] | None = None
99
+ self._users_cache: dict[str, dict[str, Any]] | None = None
100
+ self._initialized = False
101
+
102
+ super().__init__(config)
103
+
104
+ # Extract configuration
105
+ self.api_key = config.get("api_key") or os.getenv("LINEAR_API_KEY")
106
+ if not self.api_key:
107
+ raise ValueError(
108
+ "Linear API key is required (api_key or LINEAR_API_KEY env var)"
109
+ )
110
+
111
+ # Clean API key - remove common prefixes if accidentally included in config
112
+ # (The client will add Bearer back when making requests)
113
+ if isinstance(self.api_key, str):
114
+ # Remove Bearer prefix
115
+ if self.api_key.startswith("Bearer "):
116
+ self.api_key = self.api_key.replace("Bearer ", "")
117
+ # Remove environment variable name prefix (e.g., "LINEAR_API_KEY=")
118
+ if "=" in self.api_key:
119
+ parts = self.api_key.split("=", 1)
120
+ if len(parts) == 2 and parts[0].upper() in (
121
+ "LINEAR_API_KEY",
122
+ "API_KEY",
123
+ ):
124
+ self.api_key = parts[1]
125
+
126
+ # Validate API key format (Linear keys start with "lin_api_")
127
+ if not self.api_key.startswith("lin_api_"):
128
+ raise ValueError(
129
+ f"Invalid Linear API key format. Expected key starting with 'lin_api_', "
130
+ f"got: {self.api_key[:15]}... "
131
+ f"Please check your configuration and ensure the API key is correct."
132
+ )
133
+
134
+ self.workspace = config.get("workspace", "")
135
+ self.team_key = config.get("team_key")
136
+ self.team_id = config.get("team_id")
137
+ self.user_email = config.get("user_email") # Optional default assignee
138
+ self.api_url = config.get("api_url", "https://api.linear.app/graphql")
139
+
140
+ # Validate team configuration
141
+ if not self.team_key and not self.team_id:
142
+ raise ValueError("Either team_key or team_id must be provided")
143
+
144
+ # Initialize client with clean API key
145
+ self.client = LinearGraphQLClient(self.api_key)
146
+
147
+ def validate_credentials(self) -> tuple[bool, str]:
148
+ """Validate Linear API credentials.
149
+
150
+ Returns:
151
+ -------
152
+ Tuple of (is_valid, error_message)
153
+
154
+ """
155
+ if not self.api_key:
156
+ return False, "Linear API key is required"
157
+
158
+ if not self.team_key and not self.team_id:
159
+ return False, "Either team_key or team_id must be provided"
160
+
161
+ return True, ""
162
+
163
+ async def initialize(self) -> None:
164
+ """Initialize adapter by preloading team, states, and labels data concurrently."""
165
+ if self._initialized:
166
+ return
167
+
168
+ try:
169
+ # Test connection first
170
+ if not await self.client.test_connection():
171
+ raise ValueError("Failed to connect to Linear API - check credentials")
172
+
173
+ # Load team data and workflow states concurrently
174
+ team_id = await self._ensure_team_id()
175
+
176
+ # Load workflow states and labels for the team
177
+ await self._load_workflow_states(team_id)
178
+ await self._load_team_labels(team_id)
179
+
180
+ self._initialized = True
181
+
182
+ except Exception as e:
183
+ raise ValueError(f"Failed to initialize Linear adapter: {e}") from e
184
+
185
+ async def _ensure_team_id(self) -> str:
186
+ """Ensure we have a team ID, resolving from team_key if needed.
187
+
188
+ Validates that team_id is a UUID. If it looks like a team_key,
189
+ resolves it to the actual UUID.
190
+
191
+ Returns:
192
+ -------
193
+ Valid Linear team UUID
194
+
195
+ Raises:
196
+ ------
197
+ ValueError: If neither team_id nor team_key provided, or resolution fails
198
+
199
+ """
200
+ logger = logging.getLogger(__name__)
201
+
202
+ # If we have a team_id, validate it's actually a UUID
203
+ if self.team_id:
204
+ # Check if it looks like a UUID (36 chars with hyphens)
205
+ import re
206
+
207
+ uuid_pattern = re.compile(
208
+ r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$",
209
+ re.IGNORECASE,
210
+ )
211
+
212
+ if uuid_pattern.match(self.team_id):
213
+ # Already a valid UUID
214
+ return str(self.team_id)
215
+ # Looks like a team_key string - need to resolve it
216
+ logger.warning(
217
+ f"team_id '{self.team_id}' is not a UUID - treating as team_key and resolving"
218
+ )
219
+ teams = await self._get_team_by_key(self.team_id)
220
+ if teams and len(teams) > 0:
221
+ resolved_id = teams[0]["id"]
222
+ logger.info(
223
+ f"Resolved team_key '{self.team_id}' to UUID: {resolved_id}"
224
+ )
225
+ # Cache the resolved UUID
226
+ self.team_id = resolved_id
227
+ return resolved_id
228
+ raise ValueError(
229
+ f"Cannot resolve team_id '{self.team_id}' to a valid Linear team UUID. "
230
+ f"Please use team_key instead for team short codes like 'ENG'."
231
+ )
232
+
233
+ # No team_id, must have team_key
234
+ if not self.team_key:
235
+ raise ValueError(
236
+ "Either team_id (UUID) or team_key (short code) must be provided"
237
+ )
238
+
239
+ # Query team by key
240
+ teams = await self._get_team_by_key(self.team_key)
241
+
242
+ if not teams or len(teams) == 0:
243
+ raise ValueError(f"Team with key '{self.team_key}' not found")
244
+
245
+ team = teams[0]
246
+ team_id = team["id"]
247
+
248
+ # Cache the resolved team_id
249
+ self.team_id = team_id
250
+ self._team_data = team
251
+ logger.info(f"Resolved team_key '{self.team_key}' to team_id: {team_id}")
252
+
253
+ return team_id
254
+
255
+ async def _get_team_by_key(self, team_key: str) -> list[dict[str, Any]]:
256
+ """Query Linear API to get team by key.
257
+
258
+ Args:
259
+ ----
260
+ team_key: Short team identifier (e.g., 'ENG', 'BTA')
261
+
262
+ Returns:
263
+ -------
264
+ List of matching teams
265
+
266
+ """
267
+ query = """
268
+ query GetTeamByKey($key: String!) {
269
+ teams(filter: { key: { eq: $key } }) {
270
+ nodes {
271
+ id
272
+ key
273
+ name
274
+ }
275
+ }
276
+ }
277
+ """
278
+
279
+ result = await self.client.execute_query(query, {"key": team_key})
280
+
281
+ if "teams" in result and "nodes" in result["teams"]:
282
+ return result["teams"]["nodes"]
283
+
284
+ return []
285
+
286
+ async def _get_custom_view(self, view_id: str) -> dict[str, Any] | None:
287
+ """Get a Linear custom view by ID to check if it exists.
288
+
289
+ Args:
290
+ ----
291
+ view_id: View identifier (slug-uuid format)
292
+
293
+ Returns:
294
+ -------
295
+ View dict with fields (id, name, description, issues) or None if not found
296
+
297
+ """
298
+ logging.debug(f"[VIEW DEBUG] _get_custom_view called with view_id: {view_id}")
299
+
300
+ if not view_id:
301
+ logging.debug("[VIEW DEBUG] view_id is empty, returning None")
302
+ return None
303
+
304
+ try:
305
+ logging.debug(
306
+ f"[VIEW DEBUG] Executing GET_CUSTOM_VIEW_QUERY for view_id: {view_id}"
307
+ )
308
+ result = await self.client.execute_query(
309
+ GET_CUSTOM_VIEW_QUERY, {"viewId": view_id, "first": 10}
310
+ )
311
+ logging.debug(f"[VIEW DEBUG] Query result: {result}")
312
+
313
+ if result.get("customView"):
314
+ logging.debug(
315
+ f"[VIEW DEBUG] customView found in result: {result.get('customView')}"
316
+ )
317
+ return result["customView"]
318
+
319
+ logging.debug(
320
+ f"[VIEW DEBUG] No customView in result. Checking pattern: has_hyphen={'-' in view_id}, length={len(view_id)}"
321
+ )
322
+
323
+ # API query failed but check if this looks like a view identifier
324
+ # View IDs from URLs have format: slug-uuid (e.g., "mcp-skills-issues-0d0359fabcf9")
325
+ # If it has hyphens and is longer than 12 chars, it's likely a view URL identifier
326
+ if "-" in view_id and len(view_id) > 12:
327
+ logging.debug(
328
+ "[VIEW DEBUG] Pattern matched! Returning minimal view object"
329
+ )
330
+ # Return minimal view object to trigger helpful error message
331
+ # We can't fetch the actual name, so use generic "Linear View"
332
+ return {
333
+ "id": view_id,
334
+ "name": "Linear View",
335
+ "issues": {"nodes": [], "pageInfo": {"hasNextPage": False}},
336
+ }
337
+
338
+ logging.debug("[VIEW DEBUG] Pattern did not match, returning None")
339
+ return None
340
+
341
+ except Exception as e:
342
+ logging.debug(
343
+ f"[VIEW DEBUG] Exception caught: {type(e).__name__}: {str(e)}"
344
+ )
345
+ # Linear returns error if view not found
346
+ # Check if this looks like a view identifier to provide helpful error
347
+ if "-" in view_id and len(view_id) > 12:
348
+ logging.debug(
349
+ "[VIEW DEBUG] Exception handler: Pattern matched! Returning minimal view object"
350
+ )
351
+ # Return minimal view object to trigger helpful error message
352
+ return {
353
+ "id": view_id,
354
+ "name": "Linear View",
355
+ "issues": {"nodes": [], "pageInfo": {"hasNextPage": False}},
356
+ }
357
+ logging.debug(
358
+ "[VIEW DEBUG] Exception handler: Pattern did not match, returning None"
359
+ )
360
+ return None
361
+
362
+ async def get_project(self, project_id: str) -> dict[str, Any] | None:
363
+ """Get a Linear project by ID using direct query.
364
+
365
+ This method uses Linear's direct project(id:) GraphQL query for efficient lookups.
366
+ Supports UUID, slugId, or short ID formats.
367
+
368
+ Args:
369
+ ----
370
+ project_id: Project UUID, slugId, or short ID
371
+
372
+ Returns:
373
+ -------
374
+ Project dict with fields (id, name, description, state, etc.) or None if not found
375
+
376
+ Examples:
377
+ --------
378
+ - "a1b2c3d4-e5f6-7890-abcd-ef1234567890" (UUID)
379
+ - "crm-smart-monitoring-system-f59a41a96c52" (slugId)
380
+ - "6cf55cfcfad4" (short ID - 12 hex chars)
381
+
382
+ """
383
+ if not project_id:
384
+ return None
385
+
386
+ # Direct query using Linear's project(id:) endpoint
387
+ query = """
388
+ query GetProject($id: String!) {
389
+ project(id: $id) {
390
+ id
391
+ name
392
+ description
393
+ state
394
+ slugId
395
+ createdAt
396
+ updatedAt
397
+ url
398
+ icon
399
+ color
400
+ targetDate
401
+ startedAt
402
+ completedAt
403
+ teams {
404
+ nodes {
405
+ id
406
+ name
407
+ key
408
+ description
409
+ }
410
+ }
411
+ }
412
+ }
413
+ """
414
+
415
+ try:
416
+ result = await self.client.execute_query(query, {"id": project_id})
417
+
418
+ if result.get("project"):
419
+ return result["project"]
420
+
421
+ # No match found
422
+ return None
423
+
424
+ except Exception:
425
+ # Linear returns error if project not found - return None instead of raising
426
+ return None
427
+
428
+ async def get_epic(self, epic_id: str, include_issues: bool = True) -> Epic | None:
429
+ """Get Linear project as Epic with optional issue loading.
430
+
431
+ This is the preferred method for reading projects/epics as it provides
432
+ explicit control over whether to load child issues.
433
+
434
+ Args:
435
+ ----
436
+ epic_id: Project UUID, slugId, or short ID
437
+ include_issues: Whether to fetch and populate child_issues (default True)
438
+
439
+ Returns:
440
+ -------
441
+ Epic object with child_issues populated if include_issues=True,
442
+ or None if project not found
443
+
444
+ Raises:
445
+ ------
446
+ ValueError: If credentials invalid
447
+
448
+ Example:
449
+ -------
450
+ # Get project with issues
451
+ epic = await adapter.get_epic("c0e6db5a-03b6-479f-8796-5070b8fb7895")
452
+
453
+ # Get project metadata only (faster)
454
+ epic = await adapter.get_epic("c0e6db5a-03b6-479f-8796-5070b8fb7895", include_issues=False)
455
+
456
+ """
457
+ # Validate credentials
458
+ is_valid, error_message = self.validate_credentials()
459
+ if not is_valid:
460
+ raise ValueError(error_message)
461
+
462
+ # Fetch project data
463
+ project_data = await self.get_project(epic_id)
464
+ if not project_data:
465
+ return None
466
+
467
+ # Map to Epic
468
+ epic = map_linear_project_to_epic(project_data)
469
+
470
+ # Optionally fetch and populate child issues
471
+ if include_issues:
472
+ issues = await self._get_project_issues(epic_id)
473
+ epic.child_issues = [issue.id for issue in issues if issue.id is not None]
474
+
475
+ return epic
476
+
477
+ async def _resolve_project_id(self, project_identifier: str) -> str | None:
478
+ """Resolve project identifier (slug, name, short ID, or URL) to full UUID.
479
+
480
+ Args:
481
+ ----
482
+ project_identifier: Project slug, name, short ID, or URL
483
+
484
+ Returns:
485
+ -------
486
+ Full Linear project UUID, or None if not found
487
+
488
+ Raises:
489
+ ------
490
+ ValueError: If project lookup fails
491
+
492
+ Examples:
493
+ --------
494
+ - "crm-smart-monitoring-system" (slug)
495
+ - "CRM Smart Monitoring System" (name)
496
+ - "f59a41a96c52" (short ID from URL)
497
+ - "https://linear.app/travel-bta/project/crm-smart-monitoring-system-f59a41a96c52/overview" (full URL)
498
+
499
+ """
500
+ if not project_identifier:
501
+ return None
502
+
503
+ # Use tested URL parser to normalize the identifier
504
+ # This correctly extracts project IDs from URLs and handles:
505
+ # - Full URLs: https://linear.app/team/project/slug-id/overview
506
+ # - Slug-ID format: slug-id
507
+ # - Plain identifiers: id
508
+ try:
509
+ project_identifier = normalize_project_id(
510
+ project_identifier, adapter_type="linear"
511
+ )
512
+ except URLParserError as e:
513
+ logging.getLogger(__name__).warning(
514
+ f"Failed to parse project identifier: {e}"
515
+ )
516
+ # Continue with original identifier - may still work if it's a name
517
+
518
+ # If it looks like a full UUID already (exactly 36 chars with exactly 4 dashes), return it
519
+ # UUID format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
520
+ if len(project_identifier) == 36 and project_identifier.count("-") == 4:
521
+ return project_identifier
522
+
523
+ # OPTIMIZATION: Try direct query first if it looks like a UUID, slugId, or short ID
524
+ # This is more efficient than listing all projects
525
+ should_try_direct_query = False
526
+
527
+ # Check if it looks like a short ID (exactly 12 hex characters)
528
+ if len(project_identifier) == 12 and all(
529
+ c in "0123456789abcdefABCDEF" for c in project_identifier
530
+ ):
531
+ should_try_direct_query = True
532
+
533
+ # Check if it looks like a slugId format (contains dashes and ends with 12 hex chars)
534
+ if "-" in project_identifier:
535
+ parts = project_identifier.rsplit("-", 1)
536
+ if len(parts) > 1:
537
+ potential_short_id = parts[1]
538
+ if len(potential_short_id) == 12 and all(
539
+ c in "0123456789abcdefABCDEF" for c in potential_short_id
540
+ ):
541
+ should_try_direct_query = True
542
+
543
+ # Try direct query first if identifier format suggests it might work
544
+ if should_try_direct_query:
545
+ try:
546
+ project = await self.get_project(project_identifier)
547
+ if project:
548
+ return project["id"]
549
+ except Exception as e:
550
+ # Direct query failed - fall through to list-based search
551
+ logging.getLogger(__name__).debug(
552
+ f"Direct project query failed for '{project_identifier}': {e}. "
553
+ f"Falling back to listing all projects."
554
+ )
555
+
556
+ # FALLBACK: Query all projects with pagination support
557
+ # This is less efficient but handles name-based lookups and edge cases
558
+ query = """
559
+ query GetProjects($first: Int!, $after: String) {
560
+ projects(first: $first, after: $after) {
561
+ nodes {
562
+ id
563
+ name
564
+ slugId
565
+ }
566
+ pageInfo {
567
+ hasNextPage
568
+ endCursor
569
+ }
570
+ }
571
+ }
572
+ """
573
+
574
+ try:
575
+ # Fetch all projects across multiple pages
576
+ all_projects = []
577
+ has_next_page = True
578
+ after_cursor = None
579
+
580
+ while has_next_page:
581
+ variables = {"first": 100}
582
+ if after_cursor:
583
+ variables["after"] = after_cursor
584
+
585
+ result = await self.client.execute_query(query, variables)
586
+ projects_data = result.get("projects", {})
587
+ page_projects = projects_data.get("nodes", [])
588
+ page_info = projects_data.get("pageInfo", {})
589
+
590
+ all_projects.extend(page_projects)
591
+ has_next_page = page_info.get("hasNextPage", False)
592
+ after_cursor = page_info.get("endCursor")
593
+
594
+ # Search for match by slug, slugId, name (case-insensitive)
595
+ project_lower = project_identifier.lower()
596
+ for project in all_projects:
597
+ # Check if identifier matches slug pattern (extracted from slugId)
598
+ slug_id = project.get("slugId", "")
599
+ if slug_id:
600
+ # slugId format: "crm-smart-monitoring-system-f59a41a96c52"
601
+ # Linear short IDs are always exactly 12 hexadecimal characters
602
+ # Extract both the slug part and short ID
603
+ if "-" in slug_id:
604
+ parts = slug_id.rsplit("-", 1)
605
+ potential_short_id = parts[1] if len(parts) > 1 else ""
606
+
607
+ # Validate it's exactly 12 hex characters
608
+ if len(potential_short_id) == 12 and all(
609
+ c in "0123456789abcdefABCDEF" for c in potential_short_id
610
+ ):
611
+ slug_part = parts[0]
612
+ short_id = potential_short_id
613
+ else:
614
+ # Fallback: treat entire slugId as slug if last part isn't valid
615
+ slug_part = slug_id
616
+ short_id = ""
617
+
618
+ # Match full slugId, slug part, or short ID
619
+ if (
620
+ slug_id.lower() == project_lower
621
+ or slug_part.lower() == project_lower
622
+ or short_id.lower() == project_lower
623
+ ):
624
+ return project["id"]
625
+
626
+ # Also check exact name match (case-insensitive)
627
+ if project["name"].lower() == project_lower:
628
+ return project["id"]
629
+
630
+ # No match found
631
+ return None
632
+
633
+ except Exception as e:
634
+ raise ValueError(
635
+ f"Failed to resolve project '{project_identifier}': {e}"
636
+ ) from e
637
+
638
+ async def _validate_project_team_association(
639
+ self, project_id: str, team_id: str
640
+ ) -> tuple[bool, list[str]]:
641
+ """Check if team is associated with project.
642
+
643
+ Args:
644
+ ----
645
+ project_id: Linear project UUID
646
+ team_id: Linear team UUID
647
+
648
+ Returns:
649
+ -------
650
+ Tuple of (is_associated, list_of_project_team_ids)
651
+
652
+ """
653
+ project = await self.get_project(project_id)
654
+ if not project:
655
+ return False, []
656
+
657
+ # Extract team IDs from project's teams
658
+ project_team_ids = [
659
+ team["id"] for team in project.get("teams", {}).get("nodes", [])
660
+ ]
661
+
662
+ return team_id in project_team_ids, project_team_ids
663
+
664
+ async def _ensure_team_in_project(self, project_id: str, team_id: str) -> bool:
665
+ """Add team to project if not already associated.
666
+
667
+ Args:
668
+ ----
669
+ project_id: Linear project UUID
670
+ team_id: Linear team UUID to add
671
+
672
+ Returns:
673
+ -------
674
+ True if successful, False otherwise
675
+
676
+ """
677
+ # First check current association
678
+ is_associated, existing_team_ids = (
679
+ await self._validate_project_team_association(project_id, team_id)
680
+ )
681
+
682
+ if is_associated:
683
+ return True # Already associated, nothing to do
684
+
685
+ # Add team to project by updating project's teamIds
686
+ update_query = """
687
+ mutation UpdateProject($id: String!, $input: ProjectUpdateInput!) {
688
+ projectUpdate(id: $id, input: $input) {
689
+ success
690
+ project {
691
+ id
692
+ teams {
693
+ nodes {
694
+ id
695
+ name
696
+ }
697
+ }
698
+ }
699
+ }
700
+ }
701
+ """
702
+
703
+ # Include existing teams + new team
704
+ all_team_ids = existing_team_ids + [team_id]
705
+
706
+ try:
707
+ result = await self.client.execute_mutation(
708
+ update_query, {"id": project_id, "input": {"teamIds": all_team_ids}}
709
+ )
710
+ success = result.get("projectUpdate", {}).get("success", False)
711
+
712
+ if success:
713
+ logging.getLogger(__name__).info(
714
+ f"Successfully added team {team_id} to project {project_id}"
715
+ )
716
+ else:
717
+ logging.getLogger(__name__).warning(
718
+ f"Failed to add team {team_id} to project {project_id}"
719
+ )
720
+
721
+ return success
722
+ except Exception as e:
723
+ logging.getLogger(__name__).error(
724
+ f"Error adding team {team_id} to project {project_id}: {e}"
725
+ )
726
+ return False
727
+
728
+ async def _get_project_issues(
729
+ self, project_id: str, limit: int = 100
730
+ ) -> list[Task]:
731
+ """Fetch all issues belonging to a Linear project.
732
+
733
+ Uses existing build_issue_filter() and LIST_ISSUES_QUERY infrastructure
734
+ to fetch issues filtered by project_id.
735
+
736
+ Args:
737
+ ----
738
+ project_id: Project UUID, slugId, or short ID
739
+ limit: Maximum issues to return (default 100, max 250)
740
+
741
+ Returns:
742
+ -------
743
+ List of Task objects representing project's issues
744
+
745
+ Raises:
746
+ ------
747
+ ValueError: If credentials invalid or query fails
748
+
749
+ """
750
+ logger = logging.getLogger(__name__)
751
+
752
+ # Build filter for issues belonging to this project
753
+ issue_filter = build_issue_filter(project_id=project_id)
754
+
755
+ variables = {
756
+ "filter": issue_filter,
757
+ "first": min(limit, 250), # Linear API max per page
758
+ }
759
+
760
+ try:
761
+ result = await self.client.execute_query(LIST_ISSUES_QUERY, variables)
762
+ issues = result.get("issues", {}).get("nodes", [])
763
+
764
+ # Map Linear issues to Task objects
765
+ return [map_linear_issue_to_task(issue) for issue in issues]
766
+
767
+ except Exception as e:
768
+ # Log but don't fail - return empty list if issues can't be fetched
769
+ logger.warning(f"Failed to fetch project issues for {project_id}: {e}")
770
+ return []
771
+
772
+ async def _resolve_issue_id(self, issue_identifier: str) -> str | None:
773
+ """Resolve issue identifier (like "ENG-842") to full UUID.
774
+
775
+ Args:
776
+ ----
777
+ issue_identifier: Issue identifier (e.g., "ENG-842") or UUID
778
+
779
+ Returns:
780
+ -------
781
+ Full Linear issue UUID, or None if not found
782
+
783
+ Raises:
784
+ ------
785
+ ValueError: If issue lookup fails
786
+
787
+ Examples:
788
+ --------
789
+ - "ENG-842" (issue identifier)
790
+ - "BTA-123" (issue identifier)
791
+ - "a1b2c3d4-e5f6-7890-abcd-ef1234567890" (already a UUID)
792
+
793
+ """
794
+ if not issue_identifier:
795
+ return None
796
+
797
+ # If it looks like a full UUID already (exactly 36 chars with exactly 4 dashes), return it
798
+ # UUID format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
799
+ if len(issue_identifier) == 36 and issue_identifier.count("-") == 4:
800
+ return issue_identifier
801
+
802
+ # Query issue by identifier to get its UUID
803
+ query = """
804
+ query GetIssueId($identifier: String!) {
805
+ issue(id: $identifier) {
806
+ id
807
+ }
808
+ }
809
+ """
810
+
811
+ try:
812
+ result = await self.client.execute_query(
813
+ query, {"identifier": issue_identifier}
814
+ )
815
+
816
+ if result.get("issue"):
817
+ return result["issue"]["id"]
818
+
819
+ # No match found
820
+ return None
821
+
822
+ except Exception as e:
823
+ raise ValueError(
824
+ f"Failed to resolve issue '{issue_identifier}': {e}"
825
+ ) from e
826
+
827
+ async def _load_workflow_states(self, team_id: str) -> None:
828
+ """Load and cache workflow states for the team.
829
+
830
+ Args:
831
+ ----
832
+ team_id: Linear team ID
833
+
834
+ """
835
+ try:
836
+ result = await self.client.execute_query(
837
+ WORKFLOW_STATES_QUERY, {"teamId": team_id}
838
+ )
839
+
840
+ workflow_states = {}
841
+ for state in result["team"]["states"]["nodes"]:
842
+ state_type = state["type"].lower()
843
+ if state_type not in workflow_states:
844
+ workflow_states[state_type] = state
845
+ elif state["position"] < workflow_states[state_type]["position"]:
846
+ workflow_states[state_type] = state
847
+
848
+ self._workflow_states = workflow_states
849
+
850
+ except Exception as e:
851
+ raise ValueError(f"Failed to load workflow states: {e}") from e
852
+
853
+ async def _load_team_labels(self, team_id: str) -> None:
854
+ """Load and cache labels for the team with retry logic.
855
+
856
+ Args:
857
+ ----
858
+ team_id: Linear team ID
859
+
860
+ """
861
+ logger = logging.getLogger(__name__)
862
+
863
+ query = """
864
+ query GetTeamLabels($teamId: String!) {
865
+ team(id: $teamId) {
866
+ labels {
867
+ nodes {
868
+ id
869
+ name
870
+ color
871
+ description
872
+ }
873
+ }
874
+ }
875
+ }
876
+ """
877
+
878
+ max_retries = 3
879
+ for attempt in range(max_retries):
880
+ try:
881
+ result = await self.client.execute_query(query, {"teamId": team_id})
882
+ labels = result.get("team", {}).get("labels", {}).get("nodes", [])
883
+ self._labels_cache = labels
884
+ logger.info(f"Loaded {len(labels)} labels for team {team_id}")
885
+ return # Success
886
+
887
+ except Exception as e:
888
+ if attempt < max_retries - 1:
889
+ wait_time = 2**attempt
890
+ logger.warning(
891
+ f"Failed to load labels (attempt {attempt + 1}/{max_retries}): {e}. "
892
+ f"Retrying in {wait_time}s..."
893
+ )
894
+ await asyncio.sleep(wait_time)
895
+ else:
896
+ logger.error(
897
+ f"Failed to load team labels after {max_retries} attempts: {e}",
898
+ exc_info=True,
899
+ )
900
+ self._labels_cache = [] # Explicitly empty on failure
901
+
902
+ async def _create_label(
903
+ self, name: str, team_id: str, color: str = "#0366d6"
904
+ ) -> str:
905
+ """Create a new label in Linear.
906
+
907
+ Args:
908
+ ----
909
+ name: Label name
910
+ team_id: Linear team ID
911
+ color: Label color (hex format, default: blue)
912
+
913
+ Returns:
914
+ -------
915
+ Created label ID
916
+
917
+ Raises:
918
+ ------
919
+ ValueError: If label creation fails
920
+
921
+ """
922
+ logger = logging.getLogger(__name__)
923
+
924
+ label_input = {
925
+ "name": name,
926
+ "teamId": team_id,
927
+ "color": color,
928
+ }
929
+
930
+ try:
931
+ result = await self.client.execute_mutation(
932
+ CREATE_LABEL_MUTATION, {"input": label_input}
933
+ )
934
+
935
+ if not result["issueLabelCreate"]["success"]:
936
+ raise ValueError(f"Failed to create label '{name}'")
937
+
938
+ created_label = result["issueLabelCreate"]["issueLabel"]
939
+ label_id = created_label["id"]
940
+
941
+ # Update cache with new label
942
+ if self._labels_cache is not None:
943
+ self._labels_cache.append(created_label)
944
+
945
+ logger.info(f"Created new label '{name}' with ID: {label_id}")
946
+ return label_id
947
+
948
+ except Exception as e:
949
+ logger.error(f"Failed to create label '{name}': {e}")
950
+ raise ValueError(f"Failed to create label '{name}': {e}") from e
951
+
952
+ async def _ensure_labels_exist(self, label_names: list[str]) -> list[str]:
953
+ """Ensure labels exist, creating them if necessary.
954
+
955
+ This method implements the universal label creation flow:
956
+ 1. Load existing labels (if not cached)
957
+ 2. Map each name to existing labels (case-insensitive)
958
+ 3. Create missing labels
959
+ 4. Return list of label IDs
960
+
961
+ Args:
962
+ ----
963
+ label_names: List of label names (strings)
964
+
965
+ Returns:
966
+ -------
967
+ List of Linear label IDs (UUIDs)
968
+
969
+ """
970
+ logger = logging.getLogger(__name__)
971
+
972
+ if not label_names:
973
+ return []
974
+
975
+ # Ensure labels are loaded
976
+ if self._labels_cache is None:
977
+ team_id = await self._ensure_team_id()
978
+ await self._load_team_labels(team_id)
979
+
980
+ if self._labels_cache is None:
981
+ logger.error(
982
+ "Label cache is None after load attempt. Tags will be skipped."
983
+ )
984
+ return []
985
+
986
+ # Get team ID for creating new labels
987
+ team_id = await self._ensure_team_id()
988
+
989
+ # Create name -> ID mapping (case-insensitive)
990
+ label_map = {
991
+ label["name"].lower(): label["id"] for label in (self._labels_cache or [])
992
+ }
993
+
994
+ logger.debug(f"Available labels in team: {list(label_map.keys())}")
995
+
996
+ # Map or create each label
997
+ label_ids = []
998
+ for name in label_names:
999
+ name_lower = name.lower()
1000
+
1001
+ # Check if label already exists (case-insensitive)
1002
+ if name_lower in label_map:
1003
+ # Label exists - use its ID
1004
+ label_id = label_map[name_lower]
1005
+ label_ids.append(label_id)
1006
+ logger.debug(f"Resolved existing label '{name}' to ID: {label_id}")
1007
+ else:
1008
+ # Label doesn't exist - create it
1009
+ try:
1010
+ new_label_id = await self._create_label(name, team_id)
1011
+ label_ids.append(new_label_id)
1012
+ # Update local map for subsequent labels in same call
1013
+ label_map[name_lower] = new_label_id
1014
+ logger.info(f"Created new label '{name}' with ID: {new_label_id}")
1015
+ except Exception as e:
1016
+ # Log error for better visibility (was warning)
1017
+ logger.error(
1018
+ f"Failed to create label '{name}': {e}. "
1019
+ f"This label will be excluded from issue creation."
1020
+ )
1021
+ # Continue processing other labels
1022
+
1023
+ return label_ids
1024
+
1025
+ async def _resolve_label_ids(self, label_names: list[str]) -> list[str]:
1026
+ """Resolve label names to Linear label IDs, creating labels if needed.
1027
+
1028
+ This method wraps _ensure_labels_exist for backward compatibility.
1029
+
1030
+ Args:
1031
+ ----
1032
+ label_names: List of label names
1033
+
1034
+ Returns:
1035
+ -------
1036
+ List of Linear label IDs
1037
+
1038
+ """
1039
+ return await self._ensure_labels_exist(label_names)
1040
+
1041
+ def _get_state_mapping(self) -> dict[TicketState, str]:
1042
+ """Get mapping from universal states to Linear workflow state IDs.
1043
+
1044
+ Returns:
1045
+ -------
1046
+ Dictionary mapping TicketState to Linear state ID
1047
+
1048
+ """
1049
+ if not self._workflow_states:
1050
+ # Return type-based mapping if states not loaded
1051
+ return {
1052
+ TicketState.OPEN: "unstarted",
1053
+ TicketState.IN_PROGRESS: "started",
1054
+ TicketState.READY: "unstarted",
1055
+ TicketState.TESTED: "started",
1056
+ TicketState.DONE: "completed",
1057
+ TicketState.CLOSED: "canceled",
1058
+ TicketState.WAITING: "unstarted",
1059
+ TicketState.BLOCKED: "unstarted",
1060
+ }
1061
+
1062
+ # Return ID-based mapping using cached workflow states
1063
+ mapping = {}
1064
+ for universal_state, linear_type in LinearStateMapping.TO_LINEAR.items():
1065
+ if linear_type in self._workflow_states:
1066
+ mapping[universal_state] = self._workflow_states[linear_type]["id"]
1067
+ else:
1068
+ # Fallback to type name
1069
+ mapping[universal_state] = linear_type
1070
+
1071
+ return mapping
1072
+
1073
+ async def _get_user_id(self, user_identifier: str) -> str | None:
1074
+ """Get Linear user ID from email, display name, or user ID.
1075
+
1076
+ Args:
1077
+ ----
1078
+ user_identifier: Email, display name, or user ID
1079
+
1080
+ Returns:
1081
+ -------
1082
+ Linear user ID or None if not found
1083
+
1084
+ """
1085
+ if not user_identifier:
1086
+ return None
1087
+
1088
+ # Try email lookup first (most specific)
1089
+ user = await self.client.get_user_by_email(user_identifier)
1090
+ if user:
1091
+ return user["id"]
1092
+
1093
+ # Try name search (displayName or full name)
1094
+ users = await self.client.get_users_by_name(user_identifier)
1095
+ if users:
1096
+ if len(users) == 1:
1097
+ # Exact match found
1098
+ return users[0]["id"]
1099
+ else:
1100
+ # Multiple matches - try exact match
1101
+ for u in users:
1102
+ if (
1103
+ u.get("displayName", "").lower() == user_identifier.lower()
1104
+ or u.get("name", "").lower() == user_identifier.lower()
1105
+ ):
1106
+ return u["id"]
1107
+
1108
+ # No exact match - log ambiguity and return first
1109
+ logging.getLogger(__name__).warning(
1110
+ f"Multiple users match '{user_identifier}': "
1111
+ f"{[u.get('displayName', u.get('name')) for u in users]}. "
1112
+ f"Using first match: {users[0].get('displayName')}"
1113
+ )
1114
+ return users[0]["id"]
1115
+
1116
+ # Assume it's already a user ID
1117
+ return user_identifier
1118
+
1119
+ # CRUD Operations
1120
+
1121
+ async def create(self, ticket: Epic | Task) -> Epic | Task:
1122
+ """Create a new Linear issue or project with full field support.
1123
+
1124
+ Args:
1125
+ ----
1126
+ ticket: Epic or Task to create
1127
+
1128
+ Returns:
1129
+ -------
1130
+ Created ticket with populated ID and metadata
1131
+
1132
+ Raises:
1133
+ ------
1134
+ ValueError: If credentials are invalid or creation fails
1135
+
1136
+ """
1137
+ # Validate credentials before attempting operation
1138
+ is_valid, error_message = self.validate_credentials()
1139
+ if not is_valid:
1140
+ raise ValueError(error_message)
1141
+
1142
+ # Ensure adapter is initialized
1143
+ await self.initialize()
1144
+
1145
+ # Handle Epic creation (Linear Projects)
1146
+ if isinstance(ticket, Epic):
1147
+ return await self._create_epic(ticket)
1148
+
1149
+ # Handle Task creation (Linear Issues)
1150
+ return await self._create_task(ticket)
1151
+
1152
+ async def _create_task(self, task: Task) -> Task:
1153
+ """Create a Linear issue or sub-issue from a Task.
1154
+
1155
+ Creates a top-level issue when task.parent_issue is not set, or a
1156
+ sub-issue (child of another issue) when task.parent_issue is provided.
1157
+ In Linear terminology:
1158
+ - Issue: Top-level work item (no parent)
1159
+ - Sub-issue: Child work item (has parent issue)
1160
+
1161
+ Args:
1162
+ ----
1163
+ task: Task to create
1164
+
1165
+ Returns:
1166
+ -------
1167
+ Created task with Linear metadata
1168
+
1169
+ """
1170
+ logger = logging.getLogger(__name__)
1171
+ team_id = await self._ensure_team_id()
1172
+
1173
+ # Build issue input using mapper
1174
+ issue_input = build_linear_issue_input(task, team_id)
1175
+
1176
+ # Set default state if not provided
1177
+ # Map OPEN to "unstarted" state (typically "To-Do" in Linear)
1178
+ if task.state == TicketState.OPEN and self._workflow_states:
1179
+ state_mapping = self._get_state_mapping()
1180
+ if TicketState.OPEN in state_mapping:
1181
+ issue_input["stateId"] = state_mapping[TicketState.OPEN]
1182
+
1183
+ # Resolve assignee to user ID if provided
1184
+ # Use configured default user if no assignee specified
1185
+ assignee = task.assignee
1186
+ if not assignee and self.user_email:
1187
+ assignee = self.user_email
1188
+ logger.debug(f"Using default assignee from config: {assignee}")
1189
+
1190
+ if assignee:
1191
+ user_id = await self._get_user_id(assignee)
1192
+ if user_id:
1193
+ issue_input["assigneeId"] = user_id
1194
+
1195
+ # Resolve label names to IDs if provided
1196
+ if task.tags:
1197
+ label_ids = await self._resolve_label_ids(task.tags)
1198
+ if label_ids:
1199
+ issue_input["labelIds"] = label_ids
1200
+ else:
1201
+ # Remove labelIds if no labels resolved
1202
+ issue_input.pop("labelIds", None)
1203
+
1204
+ # Resolve project ID if parent_epic is provided (supports slug, name, short ID, or URL)
1205
+ if task.parent_epic:
1206
+ project_id = await self._resolve_project_id(task.parent_epic)
1207
+ if project_id:
1208
+ # Validate team-project association before assigning
1209
+ is_valid, _ = await self._validate_project_team_association(
1210
+ project_id, team_id
1211
+ )
1212
+
1213
+ if not is_valid:
1214
+ # Attempt to add team to project automatically
1215
+ logging.getLogger(__name__).info(
1216
+ f"Team {team_id} not associated with project {project_id}. "
1217
+ f"Attempting to add team to project..."
1218
+ )
1219
+ success = await self._ensure_team_in_project(project_id, team_id)
1220
+
1221
+ if success:
1222
+ issue_input["projectId"] = project_id
1223
+ logging.getLogger(__name__).info(
1224
+ "Successfully associated team with project. "
1225
+ "Issue will be assigned to project."
1226
+ )
1227
+ else:
1228
+ logging.getLogger(__name__).warning(
1229
+ "Could not associate team with project. "
1230
+ "Issue will be created without project assignment. "
1231
+ "Manual assignment required."
1232
+ )
1233
+ issue_input.pop("projectId", None)
1234
+ else:
1235
+ # Team already associated - safe to assign
1236
+ issue_input["projectId"] = project_id
1237
+ else:
1238
+ # Log warning but don't fail - user may have provided invalid project
1239
+ logging.getLogger(__name__).warning(
1240
+ f"Could not resolve project identifier '{task.parent_epic}' to UUID. "
1241
+ "Issue will be created without project assignment."
1242
+ )
1243
+ # Remove projectId if we couldn't resolve it
1244
+ issue_input.pop("projectId", None)
1245
+
1246
+ # Resolve parent issue ID if provided (creates a sub-issue when parent is set)
1247
+ # Supports identifiers like "ENG-842" or UUIDs
1248
+ if task.parent_issue:
1249
+ issue_id = await self._resolve_issue_id(task.parent_issue)
1250
+ if issue_id:
1251
+ issue_input["parentId"] = issue_id
1252
+ else:
1253
+ # Log warning but don't fail - user may have provided invalid issue
1254
+ logging.getLogger(__name__).warning(
1255
+ f"Could not resolve issue identifier '{task.parent_issue}' to UUID. "
1256
+ "Sub-issue will be created without parent assignment."
1257
+ )
1258
+ # Remove parentId if we couldn't resolve it
1259
+ issue_input.pop("parentId", None)
1260
+
1261
+ # Validate labelIds are proper UUIDs before sending to Linear API
1262
+ # Bug Fix (v1.1.1): This validation prevents "Argument Validation Error"
1263
+ # by ensuring labelIds contains UUIDs (e.g., "uuid-1"), not names (e.g., "bug").
1264
+ # Linear's GraphQL API requires labelIds to be [String!]! (non-null array of
1265
+ # non-null UUID strings). If tag names leak through, we detect and remove them
1266
+ # here to prevent API errors.
1267
+ #
1268
+ # See: docs/TROUBLESHOOTING.md#issue-argument-validation-error-when-creating-issues-with-labels
1269
+ if "labelIds" in issue_input:
1270
+ invalid_labels = []
1271
+ for label_id in issue_input["labelIds"]:
1272
+ # Linear UUIDs are 36 characters with hyphens (8-4-4-4-12 format)
1273
+ if not isinstance(label_id, str) or len(label_id) != 36:
1274
+ invalid_labels.append(label_id)
1275
+
1276
+ if invalid_labels:
1277
+ logging.getLogger(__name__).error(
1278
+ f"Invalid label ID format detected: {invalid_labels}. "
1279
+ f"Labels must be UUIDs (36 chars), not names. Removing labelIds from request."
1280
+ )
1281
+ issue_input.pop("labelIds")
1282
+
1283
+ try:
1284
+ result = await self.client.execute_mutation(
1285
+ CREATE_ISSUE_MUTATION, {"input": issue_input}
1286
+ )
1287
+
1288
+ if not result["issueCreate"]["success"]:
1289
+ item_type = "sub-issue" if task.parent_issue else "issue"
1290
+ raise ValueError(f"Failed to create Linear {item_type}")
1291
+
1292
+ created_issue = result["issueCreate"]["issue"]
1293
+ return map_linear_issue_to_task(created_issue)
1294
+
1295
+ except Exception as e:
1296
+ item_type = "sub-issue" if task.parent_issue else "issue"
1297
+ raise ValueError(f"Failed to create Linear {item_type}: {e}") from e
1298
+
1299
+ async def _create_epic(self, epic: Epic) -> Epic:
1300
+ """Create a Linear project from an Epic.
1301
+
1302
+ Args:
1303
+ ----
1304
+ epic: Epic to create
1305
+
1306
+ Returns:
1307
+ -------
1308
+ Created epic with Linear metadata
1309
+
1310
+ """
1311
+ team_id = await self._ensure_team_id()
1312
+
1313
+ project_input = {
1314
+ "name": epic.title,
1315
+ "teamIds": [team_id],
1316
+ }
1317
+
1318
+ if epic.description:
1319
+ project_input["description"] = epic.description
1320
+
1321
+ # Create project mutation
1322
+ create_query = """
1323
+ mutation CreateProject($input: ProjectCreateInput!) {
1324
+ projectCreate(input: $input) {
1325
+ success
1326
+ project {
1327
+ id
1328
+ name
1329
+ description
1330
+ state
1331
+ createdAt
1332
+ updatedAt
1333
+ url
1334
+ icon
1335
+ color
1336
+ targetDate
1337
+ startedAt
1338
+ completedAt
1339
+ teams {
1340
+ nodes {
1341
+ id
1342
+ name
1343
+ key
1344
+ description
1345
+ }
1346
+ }
1347
+ }
1348
+ }
1349
+ }
1350
+ """
1351
+
1352
+ try:
1353
+ result = await self.client.execute_mutation(
1354
+ create_query, {"input": project_input}
1355
+ )
1356
+
1357
+ if not result["projectCreate"]["success"]:
1358
+ raise ValueError("Failed to create Linear project")
1359
+
1360
+ created_project = result["projectCreate"]["project"]
1361
+ return map_linear_project_to_epic(created_project)
1362
+
1363
+ except Exception as e:
1364
+ raise ValueError(f"Failed to create Linear project: {e}") from e
1365
+
1366
+ async def update_epic(self, epic_id: str, updates: dict[str, Any]) -> Epic | None:
1367
+ """Update a Linear project (Epic) with specified fields.
1368
+
1369
+ Args:
1370
+ ----
1371
+ epic_id: Linear project UUID or slug-shortid
1372
+ updates: Dictionary of fields to update. Supported fields:
1373
+ - title: Project name
1374
+ - description: Project description
1375
+ - state: Project state (e.g., "planned", "started", "completed", "canceled")
1376
+ - target_date: Target completion date (ISO format YYYY-MM-DD)
1377
+ - color: Project color
1378
+ - icon: Project icon
1379
+
1380
+ Returns:
1381
+ -------
1382
+ Updated Epic object or None if not found
1383
+
1384
+ Raises:
1385
+ ------
1386
+ ValueError: If update fails or project not found
1387
+
1388
+ """
1389
+ # Validate credentials before attempting operation
1390
+ is_valid, error_message = self.validate_credentials()
1391
+ if not is_valid:
1392
+ raise ValueError(error_message)
1393
+
1394
+ # Resolve project identifier to UUID if needed
1395
+ project_uuid = await self._resolve_project_id(epic_id)
1396
+ if not project_uuid:
1397
+ raise ValueError(f"Project '{epic_id}' not found")
1398
+
1399
+ # Validate field lengths before building update input
1400
+ from mcp_ticketer.core.validators import FieldValidator, ValidationError
1401
+
1402
+ # Build update input from updates dict
1403
+ update_input = {}
1404
+
1405
+ if "title" in updates:
1406
+ try:
1407
+ validated_title = FieldValidator.validate_field(
1408
+ "linear", "epic_name", updates["title"], truncate=False
1409
+ )
1410
+ update_input["name"] = validated_title
1411
+ except ValidationError as e:
1412
+ raise ValueError(str(e)) from e
1413
+
1414
+ if "description" in updates:
1415
+ try:
1416
+ validated_description = FieldValidator.validate_field(
1417
+ "linear", "epic_description", updates["description"], truncate=False
1418
+ )
1419
+ update_input["description"] = validated_description
1420
+ except ValidationError as e:
1421
+ raise ValueError(str(e)) from e
1422
+ if "state" in updates:
1423
+ update_input["state"] = updates["state"]
1424
+ if "target_date" in updates:
1425
+ update_input["targetDate"] = updates["target_date"]
1426
+ if "color" in updates:
1427
+ update_input["color"] = updates["color"]
1428
+ if "icon" in updates:
1429
+ update_input["icon"] = updates["icon"]
1430
+
1431
+ # ProjectUpdate mutation
1432
+ update_query = """
1433
+ mutation UpdateProject($id: String!, $input: ProjectUpdateInput!) {
1434
+ projectUpdate(id: $id, input: $input) {
1435
+ success
1436
+ project {
1437
+ id
1438
+ name
1439
+ description
1440
+ state
1441
+ createdAt
1442
+ updatedAt
1443
+ url
1444
+ icon
1445
+ color
1446
+ targetDate
1447
+ startedAt
1448
+ completedAt
1449
+ teams {
1450
+ nodes {
1451
+ id
1452
+ name
1453
+ key
1454
+ description
1455
+ }
1456
+ }
1457
+ }
1458
+ }
1459
+ }
1460
+ """
1461
+
1462
+ try:
1463
+ result = await self.client.execute_mutation(
1464
+ update_query, {"id": project_uuid, "input": update_input}
1465
+ )
1466
+
1467
+ if not result["projectUpdate"]["success"]:
1468
+ raise ValueError(f"Failed to update Linear project '{epic_id}'")
1469
+
1470
+ updated_project = result["projectUpdate"]["project"]
1471
+ return map_linear_project_to_epic(updated_project)
1472
+
1473
+ except Exception as e:
1474
+ raise ValueError(f"Failed to update Linear project: {e}") from e
1475
+
1476
+ async def read(self, ticket_id: str) -> Task | Epic | None:
1477
+ """Read a Linear issue OR project by identifier with full details.
1478
+
1479
+ Args:
1480
+ ----
1481
+ ticket_id: Linear issue identifier (e.g., 'BTA-123') or project UUID
1482
+
1483
+ Returns:
1484
+ -------
1485
+ Task with full details if issue found,
1486
+ Epic with full details if project found,
1487
+ None if not found
1488
+
1489
+ Raises:
1490
+ ------
1491
+ ValueError: If ticket_id is a view URL (views are not supported in ticket_read)
1492
+
1493
+ """
1494
+ # Validate credentials before attempting operation
1495
+ is_valid, error_message = self.validate_credentials()
1496
+ if not is_valid:
1497
+ raise ValueError(error_message)
1498
+
1499
+ # Try reading as an issue first (most common case)
1500
+ query = (
1501
+ ALL_FRAGMENTS
1502
+ + """
1503
+ query GetIssue($identifier: String!) {
1504
+ issue(id: $identifier) {
1505
+ ...IssueFullFields
1506
+ }
1507
+ }
1508
+ """
1509
+ )
1510
+
1511
+ try:
1512
+ result = await self.client.execute_query(query, {"identifier": ticket_id})
1513
+
1514
+ if result.get("issue"):
1515
+ return map_linear_issue_to_task(result["issue"])
1516
+
1517
+ except Exception:
1518
+ # Not found as issue, continue to project/view check
1519
+ pass
1520
+
1521
+ # If not found as issue, try reading as project
1522
+ try:
1523
+ project_data = await self.get_project(ticket_id)
1524
+ if project_data:
1525
+ # Fetch project's issues to populate child_issues field
1526
+ issues = await self._get_project_issues(ticket_id)
1527
+
1528
+ # Map to Epic
1529
+ epic = map_linear_project_to_epic(project_data)
1530
+
1531
+ # Populate child_issues with issue IDs
1532
+ epic.child_issues = [issue.id for issue in issues]
1533
+
1534
+ return epic
1535
+ except Exception:
1536
+ # Not found as project either
1537
+ pass
1538
+
1539
+ # If not found as issue or project, check if it's a view URL
1540
+ # Views are collections of issues, not individual tickets
1541
+ logging.debug(
1542
+ f"[VIEW DEBUG] read() checking if ticket_id is a view: {ticket_id}"
1543
+ )
1544
+ try:
1545
+ view_data = await self._get_custom_view(ticket_id)
1546
+ logging.debug(f"[VIEW DEBUG] read() _get_custom_view returned: {view_data}")
1547
+
1548
+ if view_data:
1549
+ logging.debug(
1550
+ "[VIEW DEBUG] read() view_data is truthy, preparing to raise ValueError"
1551
+ )
1552
+ # View found - raise informative error
1553
+ view_name = view_data.get("name", "Unknown")
1554
+ issues_data = view_data.get("issues", {})
1555
+ issue_count = len(issues_data.get("nodes", []))
1556
+ has_more = issues_data.get("pageInfo", {}).get("hasNextPage", False)
1557
+ count_str = f"{issue_count}+" if has_more else str(issue_count)
1558
+
1559
+ logging.debug(
1560
+ f"[VIEW DEBUG] read() raising ValueError with view_name={view_name}, count={count_str}"
1561
+ )
1562
+ raise ValueError(
1563
+ f"Linear view URLs are not supported in ticket_read.\n"
1564
+ f"\n"
1565
+ f"View: '{view_name}' ({ticket_id})\n"
1566
+ f"This view contains {count_str} issues.\n"
1567
+ f"\n"
1568
+ f"Use ticket_list or ticket_search to query issues instead."
1569
+ )
1570
+ else:
1571
+ logging.debug("[VIEW DEBUG] read() view_data is falsy (None or empty)")
1572
+ except ValueError:
1573
+ # Re-raise ValueError (our informative error message)
1574
+ logging.debug("[VIEW DEBUG] read() re-raising ValueError")
1575
+ raise
1576
+ except Exception as e:
1577
+ # View query failed - not a view
1578
+ logging.debug(
1579
+ f"[VIEW DEBUG] read() caught exception in view check: {type(e).__name__}: {str(e)}"
1580
+ )
1581
+ pass
1582
+
1583
+ # Not found as either issue, project, or view
1584
+ logging.debug(
1585
+ "[VIEW DEBUG] read() returning None - not found as issue, project, or view"
1586
+ )
1587
+ return None
1588
+
1589
+ async def update(self, ticket_id: str, updates: dict[str, Any]) -> Task | None:
1590
+ """Update a Linear issue with comprehensive field support.
1591
+
1592
+ Args:
1593
+ ----
1594
+ ticket_id: Linear issue identifier
1595
+ updates: Dictionary of fields to update
1596
+
1597
+ Returns:
1598
+ -------
1599
+ Updated task or None if not found
1600
+
1601
+ """
1602
+ # Validate credentials before attempting operation
1603
+ is_valid, error_message = self.validate_credentials()
1604
+ if not is_valid:
1605
+ raise ValueError(error_message)
1606
+
1607
+ # Ensure adapter is initialized (loads workflow states for state transitions)
1608
+ await self.initialize()
1609
+
1610
+ # First get the Linear internal ID
1611
+ id_query = """
1612
+ query GetIssueId($identifier: String!) {
1613
+ issue(id: $identifier) {
1614
+ id
1615
+ }
1616
+ }
1617
+ """
1618
+
1619
+ try:
1620
+ result = await self.client.execute_query(
1621
+ id_query, {"identifier": ticket_id}
1622
+ )
1623
+
1624
+ if not result.get("issue"):
1625
+ return None
1626
+
1627
+ linear_id = result["issue"]["id"]
1628
+
1629
+ # Build update input using mapper
1630
+ update_input = build_linear_issue_update_input(updates)
1631
+
1632
+ # Handle state transitions
1633
+ if "state" in updates:
1634
+ target_state = (
1635
+ TicketState(updates["state"])
1636
+ if isinstance(updates["state"], str)
1637
+ else updates["state"]
1638
+ )
1639
+ state_mapping = self._get_state_mapping()
1640
+ if target_state in state_mapping:
1641
+ update_input["stateId"] = state_mapping[target_state]
1642
+
1643
+ # Resolve assignee to user ID if provided
1644
+ if "assignee" in updates and updates["assignee"]:
1645
+ user_id = await self._get_user_id(updates["assignee"])
1646
+ if user_id:
1647
+ update_input["assigneeId"] = user_id
1648
+
1649
+ # Resolve label names to IDs if provided
1650
+ if "tags" in updates:
1651
+ if updates["tags"]: # Non-empty list
1652
+ label_ids = await self._resolve_label_ids(updates["tags"])
1653
+ if label_ids:
1654
+ update_input["labelIds"] = label_ids
1655
+ else: # Empty list = remove all labels
1656
+ update_input["labelIds"] = []
1657
+
1658
+ # Resolve project ID if parent_epic is provided (supports slug, name, short ID, or URL)
1659
+ if "parent_epic" in updates and updates["parent_epic"]:
1660
+ project_id = await self._resolve_project_id(updates["parent_epic"])
1661
+ if project_id:
1662
+ update_input["projectId"] = project_id
1663
+ else:
1664
+ logging.getLogger(__name__).warning(
1665
+ f"Could not resolve project identifier '{updates['parent_epic']}'"
1666
+ )
1667
+
1668
+ # Validate labelIds are proper UUIDs before sending to Linear API
1669
+ if "labelIds" in update_input and update_input["labelIds"]:
1670
+ invalid_labels = []
1671
+ for label_id in update_input["labelIds"]:
1672
+ # Linear UUIDs are 36 characters with hyphens (8-4-4-4-12 format)
1673
+ if not isinstance(label_id, str) or len(label_id) != 36:
1674
+ invalid_labels.append(label_id)
1675
+
1676
+ if invalid_labels:
1677
+ logging.getLogger(__name__).error(
1678
+ f"Invalid label ID format detected in update: {invalid_labels}. "
1679
+ f"Labels must be UUIDs (36 chars), not names. Removing labelIds from request."
1680
+ )
1681
+ update_input.pop("labelIds")
1682
+
1683
+ # Execute update
1684
+ result = await self.client.execute_mutation(
1685
+ UPDATE_ISSUE_MUTATION, {"id": linear_id, "input": update_input}
1686
+ )
1687
+
1688
+ if not result["issueUpdate"]["success"]:
1689
+ raise ValueError("Failed to update Linear issue")
1690
+
1691
+ updated_issue = result["issueUpdate"]["issue"]
1692
+ return map_linear_issue_to_task(updated_issue)
1693
+
1694
+ except Exception as e:
1695
+ raise ValueError(f"Failed to update Linear issue: {e}") from e
1696
+
1697
+ async def delete(self, ticket_id: str) -> bool:
1698
+ """Delete a Linear issue (archive it).
1699
+
1700
+ Args:
1701
+ ----
1702
+ ticket_id: Linear issue identifier
1703
+
1704
+ Returns:
1705
+ -------
1706
+ True if successfully deleted/archived
1707
+
1708
+ """
1709
+ # Linear doesn't support true deletion, so we archive the issue
1710
+ try:
1711
+ result = await self.update(ticket_id, {"archived": True})
1712
+ return result is not None
1713
+ except Exception:
1714
+ return False
1715
+
1716
+ async def list(
1717
+ self, limit: int = 10, offset: int = 0, filters: dict[str, Any] | None = None
1718
+ ) -> builtins.list[Task]:
1719
+ """List Linear issues with optional filtering.
1720
+
1721
+ Args:
1722
+ ----
1723
+ limit: Maximum number of issues to return
1724
+ offset: Number of issues to skip (Note: Linear uses cursor-based pagination)
1725
+ filters: Optional filters (state, assignee, priority, etc.)
1726
+
1727
+ Returns:
1728
+ -------
1729
+ List of tasks matching the criteria
1730
+
1731
+ """
1732
+ # Validate credentials
1733
+ is_valid, error_message = self.validate_credentials()
1734
+ if not is_valid:
1735
+ raise ValueError(error_message)
1736
+
1737
+ await self.initialize()
1738
+ team_id = await self._ensure_team_id()
1739
+
1740
+ # Build issue filter
1741
+ issue_filter = build_issue_filter(
1742
+ team_id=team_id,
1743
+ state=filters.get("state") if filters else None,
1744
+ priority=filters.get("priority") if filters else None,
1745
+ include_archived=(
1746
+ filters.get("includeArchived", False) if filters else False
1747
+ ),
1748
+ )
1749
+
1750
+ # Add additional filters
1751
+ if filters:
1752
+ if "assignee" in filters:
1753
+ user_id = await self._get_user_id(filters["assignee"])
1754
+ if user_id:
1755
+ issue_filter["assignee"] = {"id": {"eq": user_id}}
1756
+
1757
+ # Support parent_issue filter for listing children (critical for parent state constraints)
1758
+ if "parent_issue" in filters:
1759
+ parent_id = await self._resolve_issue_id(filters["parent_issue"])
1760
+ if parent_id:
1761
+ issue_filter["parent"] = {"id": {"eq": parent_id}}
1762
+
1763
+ if "created_after" in filters:
1764
+ issue_filter["createdAt"] = {"gte": filters["created_after"]}
1765
+ if "updated_after" in filters:
1766
+ issue_filter["updatedAt"] = {"gte": filters["updated_after"]}
1767
+ if "due_before" in filters:
1768
+ issue_filter["dueDate"] = {"lte": filters["due_before"]}
1769
+
1770
+ try:
1771
+ result = await self.client.execute_query(
1772
+ LIST_ISSUES_QUERY, {"filter": issue_filter, "first": limit}
1773
+ )
1774
+
1775
+ tasks = []
1776
+ for issue in result["issues"]["nodes"]:
1777
+ tasks.append(map_linear_issue_to_task(issue))
1778
+
1779
+ return tasks
1780
+
1781
+ except Exception as e:
1782
+ raise ValueError(f"Failed to list Linear issues: {e}") from e
1783
+
1784
+ async def search(self, query: SearchQuery) -> builtins.list[Task]:
1785
+ """Search Linear issues using comprehensive filters.
1786
+
1787
+ Args:
1788
+ ----
1789
+ query: Search query with filters and criteria
1790
+
1791
+ Returns:
1792
+ -------
1793
+ List of tasks matching the search criteria
1794
+
1795
+ """
1796
+ # Validate credentials
1797
+ is_valid, error_message = self.validate_credentials()
1798
+ if not is_valid:
1799
+ raise ValueError(error_message)
1800
+
1801
+ await self.initialize()
1802
+ team_id = await self._ensure_team_id()
1803
+
1804
+ # Build comprehensive issue filter
1805
+ issue_filter = {"team": {"id": {"eq": team_id}}}
1806
+
1807
+ # Text search (Linear supports full-text search)
1808
+ if query.query:
1809
+ # Linear's search is quite sophisticated, but we'll use a simple approach
1810
+ # In practice, you might want to use Linear's search API endpoint
1811
+ issue_filter["title"] = {"containsIgnoreCase": query.query}
1812
+
1813
+ # State filter
1814
+ # Bug fix: Handle OPEN state specially to include both unstarted AND backlog
1815
+ # tickets, as both Linear states map to TicketState.OPEN
1816
+ if query.state:
1817
+ if query.state == TicketState.OPEN:
1818
+ # Include both "unstarted" and "backlog" states for OPEN
1819
+ issue_filter["state"] = {"type": {"in": ["unstarted", "backlog"]}}
1820
+ else:
1821
+ state_type = get_linear_state_type(query.state)
1822
+ issue_filter["state"] = {"type": {"eq": state_type}}
1823
+
1824
+ # Priority filter
1825
+ if query.priority:
1826
+ linear_priority = get_linear_priority(query.priority)
1827
+ issue_filter["priority"] = {"eq": linear_priority}
1828
+
1829
+ # Assignee filter
1830
+ if query.assignee:
1831
+ user_id = await self._get_user_id(query.assignee)
1832
+ if user_id:
1833
+ issue_filter["assignee"] = {"id": {"eq": user_id}}
1834
+
1835
+ # Project filter (Bug fix: Add support for filtering by project/epic)
1836
+ if query.project:
1837
+ # Resolve project ID (supports ID, name, or URL)
1838
+ project_id = await self._resolve_project_id(query.project)
1839
+ if project_id:
1840
+ issue_filter["project"] = {"id": {"eq": project_id}}
1841
+
1842
+ # Tags filter (labels in Linear)
1843
+ if query.tags:
1844
+ issue_filter["labels"] = {"some": {"name": {"in": query.tags}}}
1845
+
1846
+ # Exclude archived by default
1847
+ issue_filter["archivedAt"] = {"null": True}
1848
+
1849
+ try:
1850
+ result = await self.client.execute_query(
1851
+ SEARCH_ISSUES_QUERY, {"filter": issue_filter, "first": query.limit}
1852
+ )
1853
+
1854
+ tasks = []
1855
+ for issue in result["issues"]["nodes"]:
1856
+ tasks.append(map_linear_issue_to_task(issue))
1857
+
1858
+ return tasks
1859
+
1860
+ except Exception as e:
1861
+ raise ValueError(f"Failed to search Linear issues: {e}") from e
1862
+
1863
+ async def transition_state(
1864
+ self, ticket_id: str, target_state: TicketState
1865
+ ) -> Task | None:
1866
+ """Transition Linear issue to new state with workflow validation.
1867
+
1868
+ Args:
1869
+ ----
1870
+ ticket_id: Linear issue identifier
1871
+ target_state: Target state to transition to
1872
+
1873
+ Returns:
1874
+ -------
1875
+ Updated task or None if transition failed
1876
+
1877
+ """
1878
+ # Validate transition
1879
+ if not await self.validate_transition(ticket_id, target_state):
1880
+ return None
1881
+
1882
+ # Update state
1883
+ return await self.update(ticket_id, {"state": target_state})
1884
+
1885
+ async def validate_transition(
1886
+ self, ticket_id: str, target_state: TicketState
1887
+ ) -> bool:
1888
+ """Validate if state transition is allowed.
1889
+
1890
+ Delegates to BaseAdapter for:
1891
+ - Workflow state machine validation
1892
+ - Parent/child state constraint validation (from 1M-93 requirement)
1893
+
1894
+ The BaseAdapter implementation (core/adapter.py lines 312-370) ensures:
1895
+ 1. Valid workflow state transitions (OPEN → IN_PROGRESS → READY → etc.)
1896
+ 2. Parent issues maintain completion level ≥ max child completion level
1897
+
1898
+ Args:
1899
+ ----
1900
+ ticket_id: Linear issue identifier
1901
+ target_state: Target state to validate
1902
+
1903
+ Returns:
1904
+ -------
1905
+ True if transition is valid, False otherwise
1906
+
1907
+ """
1908
+ # Call parent implementation for all validation logic
1909
+ return await super().validate_transition(ticket_id, target_state)
1910
+
1911
+ async def add_comment(self, comment: Comment) -> Comment:
1912
+ """Add a comment to a Linear issue.
1913
+
1914
+ Args:
1915
+ ----
1916
+ comment: Comment to add
1917
+
1918
+ Returns:
1919
+ -------
1920
+ Created comment with ID
1921
+
1922
+ """
1923
+ # First get the Linear internal ID
1924
+ id_query = """
1925
+ query GetIssueId($identifier: String!) {
1926
+ issue(id: $identifier) {
1927
+ id
1928
+ }
1929
+ }
1930
+ """
1931
+
1932
+ try:
1933
+ result = await self.client.execute_query(
1934
+ id_query, {"identifier": comment.ticket_id}
1935
+ )
1936
+
1937
+ if not result.get("issue"):
1938
+ raise ValueError(f"Issue {comment.ticket_id} not found")
1939
+
1940
+ linear_id = result["issue"]["id"]
1941
+
1942
+ # Create comment mutation
1943
+ create_comment_query = """
1944
+ mutation CreateComment($input: CommentCreateInput!) {
1945
+ commentCreate(input: $input) {
1946
+ success
1947
+ comment {
1948
+ id
1949
+ body
1950
+ createdAt
1951
+ updatedAt
1952
+ user {
1953
+ id
1954
+ name
1955
+ email
1956
+ displayName
1957
+ }
1958
+ }
1959
+ }
1960
+ }
1961
+ """
1962
+
1963
+ comment_input = {
1964
+ "issueId": linear_id,
1965
+ "body": comment.content,
1966
+ }
1967
+
1968
+ result = await self.client.execute_mutation(
1969
+ create_comment_query, {"input": comment_input}
1970
+ )
1971
+
1972
+ if not result["commentCreate"]["success"]:
1973
+ raise ValueError("Failed to create comment")
1974
+
1975
+ created_comment = result["commentCreate"]["comment"]
1976
+ return map_linear_comment_to_comment(created_comment, comment.ticket_id)
1977
+
1978
+ except Exception as e:
1979
+ raise ValueError(f"Failed to add comment: {e}") from e
1980
+
1981
+ async def get_comments(
1982
+ self, ticket_id: str, limit: int = 10, offset: int = 0
1983
+ ) -> builtins.list[Comment]:
1984
+ """Get comments for a Linear issue.
1985
+
1986
+ Args:
1987
+ ----
1988
+ ticket_id: Linear issue identifier
1989
+ limit: Maximum number of comments to return
1990
+ offset: Number of comments to skip
1991
+
1992
+ Returns:
1993
+ -------
1994
+ List of comments for the issue
1995
+
1996
+ """
1997
+ query = """
1998
+ query GetIssueComments($identifier: String!, $first: Int!) {
1999
+ issue(id: $identifier) {
2000
+ comments(first: $first) {
2001
+ nodes {
2002
+ id
2003
+ body
2004
+ createdAt
2005
+ updatedAt
2006
+ user {
2007
+ id
2008
+ name
2009
+ email
2010
+ displayName
2011
+ avatarUrl
2012
+ }
2013
+ parent {
2014
+ id
2015
+ }
2016
+ }
2017
+ }
2018
+ }
2019
+ }
2020
+ """
2021
+
2022
+ try:
2023
+ result = await self.client.execute_query(
2024
+ query, {"identifier": ticket_id, "first": limit}
2025
+ )
2026
+
2027
+ if not result.get("issue"):
2028
+ return []
2029
+
2030
+ comments = []
2031
+ for comment_data in result["issue"]["comments"]["nodes"]:
2032
+ comments.append(map_linear_comment_to_comment(comment_data, ticket_id))
2033
+
2034
+ return comments
2035
+
2036
+ except Exception:
2037
+ return []
2038
+
2039
+ async def list_labels(self) -> builtins.list[dict[str, Any]]:
2040
+ """List all labels available in the Linear team.
2041
+
2042
+ Returns:
2043
+ -------
2044
+ List of label dictionaries with 'id', 'name', and 'color' fields
2045
+
2046
+ """
2047
+ # Ensure labels are loaded
2048
+ if self._labels_cache is None:
2049
+ team_id = await self._ensure_team_id()
2050
+ await self._load_team_labels(team_id)
2051
+
2052
+ # Return cached labels or empty list if not available
2053
+ if not self._labels_cache:
2054
+ return []
2055
+
2056
+ # Transform to standardized format
2057
+ return [
2058
+ {
2059
+ "id": label["id"],
2060
+ "name": label["name"],
2061
+ "color": label.get("color", ""),
2062
+ }
2063
+ for label in self._labels_cache
2064
+ ]
2065
+
2066
+ async def upload_file(self, file_path: str, mime_type: str | None = None) -> str:
2067
+ """Upload a file to Linear's storage and return the asset URL.
2068
+
2069
+ This method implements Linear's three-step file upload process:
2070
+ 1. Request a pre-signed upload URL via fileUpload mutation
2071
+ 2. Upload the file to S3 using the pre-signed URL
2072
+ 3. Return the asset URL for use in attachments
2073
+
2074
+ Args:
2075
+ ----
2076
+ file_path: Path to the file to upload
2077
+ mime_type: MIME type of the file. If None, will be auto-detected.
2078
+
2079
+ Returns:
2080
+ -------
2081
+ Asset URL that can be used with attachmentCreate mutation
2082
+
2083
+ Raises:
2084
+ ------
2085
+ ValueError: If file doesn't exist, upload fails, or httpx not available
2086
+ FileNotFoundError: If the specified file doesn't exist
2087
+
2088
+ """
2089
+ if httpx is None:
2090
+ raise ValueError(
2091
+ "httpx library not installed. Install with: pip install httpx"
2092
+ )
2093
+
2094
+ # Validate file exists
2095
+ file_path_obj = Path(file_path)
2096
+ if not file_path_obj.exists():
2097
+ raise FileNotFoundError(f"File not found: {file_path}")
2098
+ if not file_path_obj.is_file():
2099
+ raise ValueError(f"Path is not a file: {file_path}")
2100
+
2101
+ # Get file info
2102
+ file_size = file_path_obj.stat().st_size
2103
+ filename = file_path_obj.name
2104
+
2105
+ # Auto-detect MIME type if not provided
2106
+ if mime_type is None:
2107
+ mime_type, _ = mimetypes.guess_type(file_path)
2108
+ if mime_type is None:
2109
+ # Default to binary if can't detect
2110
+ mime_type = "application/octet-stream"
2111
+
2112
+ # Step 1: Request pre-signed upload URL
2113
+ upload_mutation = """
2114
+ mutation FileUpload($contentType: String!, $filename: String!, $size: Int!) {
2115
+ fileUpload(contentType: $contentType, filename: $filename, size: $size) {
2116
+ success
2117
+ uploadFile {
2118
+ uploadUrl
2119
+ assetUrl
2120
+ headers {
2121
+ key
2122
+ value
2123
+ }
2124
+ }
2125
+ }
2126
+ }
2127
+ """
2128
+
2129
+ try:
2130
+ result = await self.client.execute_mutation(
2131
+ upload_mutation,
2132
+ {
2133
+ "contentType": mime_type,
2134
+ "filename": filename,
2135
+ "size": file_size,
2136
+ },
2137
+ )
2138
+
2139
+ if not result["fileUpload"]["success"]:
2140
+ raise ValueError("Failed to get upload URL from Linear API")
2141
+
2142
+ upload_file_data = result["fileUpload"]["uploadFile"]
2143
+ upload_url = upload_file_data["uploadUrl"]
2144
+ asset_url = upload_file_data["assetUrl"]
2145
+ headers_list = upload_file_data.get("headers", [])
2146
+
2147
+ # Convert headers list to dict
2148
+ upload_headers = {h["key"]: h["value"] for h in headers_list}
2149
+ # Add Content-Type header
2150
+ upload_headers["Content-Type"] = mime_type
2151
+
2152
+ # Step 2: Upload file to S3 using pre-signed URL
2153
+ async with httpx.AsyncClient() as http_client:
2154
+ with open(file_path, "rb") as f:
2155
+ file_content = f.read()
2156
+
2157
+ response = await http_client.put(
2158
+ upload_url,
2159
+ content=file_content,
2160
+ headers=upload_headers,
2161
+ timeout=60.0, # 60 second timeout for large files
2162
+ )
2163
+
2164
+ if response.status_code not in (200, 201, 204):
2165
+ raise ValueError(
2166
+ f"Failed to upload file to S3. Status: {response.status_code}, "
2167
+ f"Response: {response.text}"
2168
+ )
2169
+
2170
+ # Step 3: Return asset URL
2171
+ logging.getLogger(__name__).info(
2172
+ f"Successfully uploaded file '{filename}' ({file_size} bytes) to Linear"
2173
+ )
2174
+ return asset_url
2175
+
2176
+ except Exception as e:
2177
+ raise ValueError(f"Failed to upload file '{filename}': {e}") from e
2178
+
2179
+ async def attach_file_to_issue(
2180
+ self,
2181
+ issue_id: str,
2182
+ file_url: str,
2183
+ title: str,
2184
+ subtitle: str | None = None,
2185
+ comment_body: str | None = None,
2186
+ ) -> dict[str, Any]:
2187
+ """Attach a file to a Linear issue.
2188
+
2189
+ The file must already be uploaded using upload_file() or be a publicly
2190
+ accessible URL.
2191
+
2192
+ Args:
2193
+ ----
2194
+ issue_id: Linear issue identifier (e.g., "ENG-842") or UUID
2195
+ file_url: URL of the file (from upload_file() or external URL)
2196
+ title: Title for the attachment
2197
+ subtitle: Optional subtitle for the attachment
2198
+ comment_body: Optional comment text to include with the attachment
2199
+
2200
+ Returns:
2201
+ -------
2202
+ Dictionary with attachment details including id, title, url, etc.
2203
+
2204
+ Raises:
2205
+ ------
2206
+ ValueError: If attachment creation fails or issue not found
2207
+
2208
+ """
2209
+ # Resolve issue identifier to UUID
2210
+ issue_uuid = await self._resolve_issue_id(issue_id)
2211
+ if not issue_uuid:
2212
+ raise ValueError(f"Issue '{issue_id}' not found")
2213
+
2214
+ # Build attachment input
2215
+ attachment_input: dict[str, Any] = {
2216
+ "issueId": issue_uuid,
2217
+ "title": title,
2218
+ "url": file_url,
2219
+ }
2220
+
2221
+ if subtitle:
2222
+ attachment_input["subtitle"] = subtitle
2223
+
2224
+ if comment_body:
2225
+ attachment_input["commentBody"] = comment_body
2226
+
2227
+ # Create attachment mutation
2228
+ attachment_mutation = """
2229
+ mutation AttachmentCreate($input: AttachmentCreateInput!) {
2230
+ attachmentCreate(input: $input) {
2231
+ success
2232
+ attachment {
2233
+ id
2234
+ title
2235
+ url
2236
+ subtitle
2237
+ metadata
2238
+ createdAt
2239
+ updatedAt
2240
+ }
2241
+ }
2242
+ }
2243
+ """
2244
+
2245
+ try:
2246
+ result = await self.client.execute_mutation(
2247
+ attachment_mutation, {"input": attachment_input}
2248
+ )
2249
+
2250
+ if not result["attachmentCreate"]["success"]:
2251
+ raise ValueError(f"Failed to attach file to issue '{issue_id}'")
2252
+
2253
+ attachment = result["attachmentCreate"]["attachment"]
2254
+ logging.getLogger(__name__).info(
2255
+ f"Successfully attached file '{title}' to issue '{issue_id}'"
2256
+ )
2257
+ return attachment
2258
+
2259
+ except Exception as e:
2260
+ raise ValueError(f"Failed to attach file to issue '{issue_id}': {e}") from e
2261
+
2262
+ async def attach_file_to_epic(
2263
+ self,
2264
+ epic_id: str,
2265
+ file_url: str,
2266
+ title: str,
2267
+ subtitle: str | None = None,
2268
+ ) -> dict[str, Any]:
2269
+ """Attach a file to a Linear project (Epic).
2270
+
2271
+ The file must already be uploaded using upload_file() or be a publicly
2272
+ accessible URL.
2273
+
2274
+ Args:
2275
+ ----
2276
+ epic_id: Linear project UUID or slug-shortid
2277
+ file_url: URL of the file (from upload_file() or external URL)
2278
+ title: Title for the attachment
2279
+ subtitle: Optional subtitle for the attachment
2280
+
2281
+ Returns:
2282
+ -------
2283
+ Dictionary with attachment details including id, title, url, etc.
2284
+
2285
+ Raises:
2286
+ ------
2287
+ ValueError: If attachment creation fails or project not found
2288
+
2289
+ """
2290
+ # Resolve project identifier to UUID
2291
+ project_uuid = await self._resolve_project_id(epic_id)
2292
+ if not project_uuid:
2293
+ raise ValueError(f"Project '{epic_id}' not found")
2294
+
2295
+ # Build attachment input (use projectId instead of issueId)
2296
+ attachment_input: dict[str, Any] = {
2297
+ "projectId": project_uuid,
2298
+ "title": title,
2299
+ "url": file_url,
2300
+ }
2301
+
2302
+ if subtitle:
2303
+ attachment_input["subtitle"] = subtitle
2304
+
2305
+ # Create attachment mutation (same as for issues)
2306
+ attachment_mutation = """
2307
+ mutation AttachmentCreate($input: AttachmentCreateInput!) {
2308
+ attachmentCreate(input: $input) {
2309
+ success
2310
+ attachment {
2311
+ id
2312
+ title
2313
+ url
2314
+ subtitle
2315
+ metadata
2316
+ createdAt
2317
+ updatedAt
2318
+ }
2319
+ }
2320
+ }
2321
+ """
2322
+
2323
+ try:
2324
+ result = await self.client.execute_mutation(
2325
+ attachment_mutation, {"input": attachment_input}
2326
+ )
2327
+
2328
+ if not result["attachmentCreate"]["success"]:
2329
+ raise ValueError(f"Failed to attach file to project '{epic_id}'")
2330
+
2331
+ attachment = result["attachmentCreate"]["attachment"]
2332
+ logging.getLogger(__name__).info(
2333
+ f"Successfully attached file '{title}' to project '{epic_id}'"
2334
+ )
2335
+ return attachment
2336
+
2337
+ except Exception as e:
2338
+ raise ValueError(
2339
+ f"Failed to attach file to project '{epic_id}': {e}"
2340
+ ) from e
2341
+
2342
+ async def get_attachments(self, ticket_id: str) -> builtins.list[Attachment]:
2343
+ """Get all attachments for a Linear issue or project.
2344
+
2345
+ This method retrieves attachment metadata from Linear's GraphQL API.
2346
+ Note that Linear attachment URLs require authentication to access.
2347
+
2348
+ Args:
2349
+ ----
2350
+ ticket_id: Linear issue identifier (e.g., "ENG-842") or project UUID
2351
+
2352
+ Returns:
2353
+ -------
2354
+ List of Attachment objects with metadata
2355
+
2356
+ Raises:
2357
+ ------
2358
+ ValueError: If credentials are invalid
2359
+
2360
+ Authentication Note:
2361
+ -------------------
2362
+ Linear attachment URLs require authentication headers:
2363
+ Authorization: Bearer {api_key}
2364
+
2365
+ URLs are in format: https://files.linear.app/workspace/attachment-id/filename
2366
+ Direct access without authentication will return 401 Unauthorized.
2367
+
2368
+ """
2369
+ logger = logging.getLogger(__name__)
2370
+
2371
+ # Validate credentials
2372
+ is_valid, error_message = self.validate_credentials()
2373
+ if not is_valid:
2374
+ raise ValueError(error_message)
2375
+
2376
+ # Try as issue first (most common case)
2377
+ issue_uuid = await self._resolve_issue_id(ticket_id)
2378
+
2379
+ if issue_uuid:
2380
+ # Query issue attachments
2381
+ query = """
2382
+ query GetIssueAttachments($issueId: String!) {
2383
+ issue(id: $issueId) {
2384
+ id
2385
+ identifier
2386
+ attachments {
2387
+ nodes {
2388
+ id
2389
+ title
2390
+ url
2391
+ subtitle
2392
+ metadata
2393
+ createdAt
2394
+ updatedAt
2395
+ }
2396
+ }
2397
+ }
2398
+ }
2399
+ """
2400
+
2401
+ try:
2402
+ result = await self.client.execute_query(query, {"issueId": issue_uuid})
2403
+
2404
+ if not result.get("issue"):
2405
+ logger.warning(f"Issue {ticket_id} not found")
2406
+ return []
2407
+
2408
+ attachments_data = (
2409
+ result["issue"].get("attachments", {}).get("nodes", [])
2410
+ )
2411
+
2412
+ # Map to Attachment objects using identifier (not UUID)
2413
+ return [
2414
+ map_linear_attachment_to_attachment(att, ticket_id)
2415
+ for att in attachments_data
2416
+ ]
2417
+
2418
+ except Exception as e:
2419
+ logger.error(f"Failed to get attachments for issue {ticket_id}: {e}")
2420
+ return []
2421
+
2422
+ # Try as project if not an issue
2423
+ project_uuid = await self._resolve_project_id(ticket_id)
2424
+
2425
+ if project_uuid:
2426
+ # Query project attachments (documents)
2427
+ query = """
2428
+ query GetProjectAttachments($projectId: String!) {
2429
+ project(id: $projectId) {
2430
+ id
2431
+ name
2432
+ documents {
2433
+ nodes {
2434
+ id
2435
+ title
2436
+ url
2437
+ createdAt
2438
+ updatedAt
2439
+ }
2440
+ }
2441
+ }
2442
+ }
2443
+ """
2444
+
2445
+ try:
2446
+ result = await self.client.execute_query(
2447
+ query, {"projectId": project_uuid}
2448
+ )
2449
+
2450
+ if not result.get("project"):
2451
+ logger.warning(f"Project {ticket_id} not found")
2452
+ return []
2453
+
2454
+ documents_data = result["project"].get("documents", {}).get("nodes", [])
2455
+
2456
+ # Map documents to Attachment objects
2457
+ return [
2458
+ map_linear_attachment_to_attachment(doc, ticket_id)
2459
+ for doc in documents_data
2460
+ ]
2461
+
2462
+ except Exception as e:
2463
+ logger.error(f"Failed to get attachments for project {ticket_id}: {e}")
2464
+ return []
2465
+
2466
+ # Not found as either issue or project
2467
+ logger.warning(f"Ticket {ticket_id} not found as issue or project")
2468
+ return []
2469
+
2470
+ async def list_cycles(
2471
+ self, team_id: str | None = None, limit: int = 50
2472
+ ) -> builtins.list[dict[str, Any]]:
2473
+ """List Linear Cycles (Sprints) for the team.
2474
+
2475
+ Args:
2476
+ ----
2477
+ team_id: Linear team UUID. If None, uses the configured team.
2478
+ limit: Maximum number of cycles to return (default: 50)
2479
+
2480
+ Returns:
2481
+ -------
2482
+ List of cycle dictionaries with fields:
2483
+ - id: Cycle UUID
2484
+ - name: Cycle name
2485
+ - number: Cycle number
2486
+ - startsAt: Start date (ISO format)
2487
+ - endsAt: End date (ISO format)
2488
+ - completedAt: Completion date (ISO format, None if not completed)
2489
+ - progress: Progress percentage (0-1)
2490
+
2491
+ Raises:
2492
+ ------
2493
+ ValueError: If credentials are invalid or query fails
2494
+
2495
+ """
2496
+ # Validate credentials
2497
+ is_valid, error_message = self.validate_credentials()
2498
+ if not is_valid:
2499
+ raise ValueError(error_message)
2500
+
2501
+ await self.initialize()
2502
+
2503
+ # Use configured team if not specified
2504
+ if team_id is None:
2505
+ team_id = await self._ensure_team_id()
2506
+
2507
+ try:
2508
+ # Fetch all cycles with pagination
2509
+ all_cycles: list[dict[str, Any]] = []
2510
+ has_next_page = True
2511
+ after_cursor = None
2512
+
2513
+ while has_next_page and len(all_cycles) < limit:
2514
+ # Calculate remaining items needed
2515
+ remaining = limit - len(all_cycles)
2516
+ page_size = min(remaining, 50) # Linear max page size is typically 50
2517
+
2518
+ variables = {"teamId": team_id, "first": page_size}
2519
+ if after_cursor:
2520
+ variables["after"] = after_cursor
2521
+
2522
+ result = await self.client.execute_query(LIST_CYCLES_QUERY, variables)
2523
+
2524
+ cycles_data = result.get("team", {}).get("cycles", {})
2525
+ page_cycles = cycles_data.get("nodes", [])
2526
+ page_info = cycles_data.get("pageInfo", {})
2527
+
2528
+ all_cycles.extend(page_cycles)
2529
+ has_next_page = page_info.get("hasNextPage", False)
2530
+ after_cursor = page_info.get("endCursor")
2531
+
2532
+ return all_cycles[:limit] # Ensure we don't exceed limit
2533
+
2534
+ except Exception as e:
2535
+ raise ValueError(f"Failed to list Linear cycles: {e}") from e
2536
+
2537
+ async def get_issue_status(self, issue_id: str) -> dict[str, Any] | None:
2538
+ """Get rich issue status information for a Linear issue.
2539
+
2540
+ Args:
2541
+ ----
2542
+ issue_id: Linear issue identifier (e.g., 'BTA-123') or UUID
2543
+
2544
+ Returns:
2545
+ -------
2546
+ Dictionary with workflow state details:
2547
+ - id: State UUID
2548
+ - name: State name (e.g., "In Progress")
2549
+ - type: State type (e.g., "started", "completed")
2550
+ - color: State color (hex format)
2551
+ - description: State description
2552
+ - position: Position in workflow
2553
+ Returns None if issue not found.
2554
+
2555
+ Raises:
2556
+ ------
2557
+ ValueError: If credentials are invalid or query fails
2558
+
2559
+ """
2560
+ # Validate credentials
2561
+ is_valid, error_message = self.validate_credentials()
2562
+ if not is_valid:
2563
+ raise ValueError(error_message)
2564
+
2565
+ await self.initialize()
2566
+
2567
+ # Resolve issue identifier to UUID if needed
2568
+ issue_uuid = await self._resolve_issue_id(issue_id)
2569
+ if not issue_uuid:
2570
+ return None
2571
+
2572
+ try:
2573
+ result = await self.client.execute_query(
2574
+ GET_ISSUE_STATUS_QUERY, {"issueId": issue_uuid}
2575
+ )
2576
+
2577
+ issue_data = result.get("issue")
2578
+ if not issue_data:
2579
+ return None
2580
+
2581
+ return issue_data.get("state")
2582
+
2583
+ except Exception as e:
2584
+ raise ValueError(f"Failed to get issue status for '{issue_id}': {e}") from e
2585
+
2586
+ async def list_issue_statuses(
2587
+ self, team_id: str | None = None
2588
+ ) -> builtins.list[dict[str, Any]]:
2589
+ """List all workflow states for the team.
2590
+
2591
+ Args:
2592
+ ----
2593
+ team_id: Linear team UUID. If None, uses the configured team.
2594
+
2595
+ Returns:
2596
+ -------
2597
+ List of workflow state dictionaries with fields:
2598
+ - id: State UUID
2599
+ - name: State name (e.g., "Backlog", "In Progress", "Done")
2600
+ - type: State type (e.g., "backlog", "unstarted", "started", "completed", "canceled")
2601
+ - color: State color (hex format)
2602
+ - description: State description
2603
+ - position: Position in workflow (lower = earlier)
2604
+
2605
+ Raises:
2606
+ ------
2607
+ ValueError: If credentials are invalid or query fails
2608
+
2609
+ """
2610
+ # Validate credentials
2611
+ is_valid, error_message = self.validate_credentials()
2612
+ if not is_valid:
2613
+ raise ValueError(error_message)
2614
+
2615
+ await self.initialize()
2616
+
2617
+ # Use configured team if not specified
2618
+ if team_id is None:
2619
+ team_id = await self._ensure_team_id()
2620
+
2621
+ try:
2622
+ result = await self.client.execute_query(
2623
+ LIST_ISSUE_STATUSES_QUERY, {"teamId": team_id}
2624
+ )
2625
+
2626
+ states_data = result.get("team", {}).get("states", {})
2627
+ states = states_data.get("nodes", [])
2628
+
2629
+ # Sort by position to maintain workflow order
2630
+ states.sort(key=lambda s: s.get("position", 0))
2631
+
2632
+ return states
2633
+
2634
+ except Exception as e:
2635
+ raise ValueError(f"Failed to list workflow states: {e}") from e
2636
+
2637
+ async def list_epics(
2638
+ self,
2639
+ limit: int = 50,
2640
+ offset: int = 0,
2641
+ state: str | None = None,
2642
+ include_completed: bool = True,
2643
+ **kwargs: Any,
2644
+ ) -> builtins.list[Epic]:
2645
+ """List Linear projects (epics) with efficient pagination.
2646
+
2647
+ Args:
2648
+ ----
2649
+ limit: Maximum number of projects to return (default: 50)
2650
+ offset: Number of projects to skip (note: Linear uses cursor-based pagination)
2651
+ state: Filter by project state (e.g., "planned", "started", "completed", "canceled")
2652
+ include_completed: Whether to include completed projects (default: True)
2653
+ **kwargs: Additional filter parameters (reserved for future use)
2654
+
2655
+ Returns:
2656
+ -------
2657
+ List of Epic objects mapped from Linear projects
2658
+
2659
+ Raises:
2660
+ ------
2661
+ ValueError: If credentials are invalid or query fails
2662
+
2663
+ """
2664
+ # Validate credentials
2665
+ is_valid, error_message = self.validate_credentials()
2666
+ if not is_valid:
2667
+ raise ValueError(error_message)
2668
+
2669
+ await self.initialize()
2670
+ team_id = await self._ensure_team_id()
2671
+
2672
+ # Build project filter using existing helper
2673
+ from .types import build_project_filter
2674
+
2675
+ project_filter = build_project_filter(
2676
+ state=state,
2677
+ team_id=team_id,
2678
+ include_completed=include_completed,
2679
+ )
2680
+
2681
+ try:
2682
+ # Fetch projects with pagination
2683
+ all_projects = []
2684
+ has_next_page = True
2685
+ after_cursor = None
2686
+ projects_fetched = 0
2687
+
2688
+ while has_next_page and projects_fetched < limit + offset:
2689
+ # Calculate how many more we need
2690
+ remaining = (limit + offset) - projects_fetched
2691
+ page_size = min(remaining, 50) # Linear max page size is typically 50
2692
+
2693
+ variables = {"filter": project_filter, "first": page_size}
2694
+ if after_cursor:
2695
+ variables["after"] = after_cursor
2696
+
2697
+ result = await self.client.execute_query(LIST_PROJECTS_QUERY, variables)
2698
+
2699
+ projects_data = result.get("projects", {})
2700
+ page_projects = projects_data.get("nodes", [])
2701
+ page_info = projects_data.get("pageInfo", {})
2702
+
2703
+ all_projects.extend(page_projects)
2704
+ projects_fetched += len(page_projects)
2705
+
2706
+ has_next_page = page_info.get("hasNextPage", False)
2707
+ after_cursor = page_info.get("endCursor")
2708
+
2709
+ # Stop if no more results on this page
2710
+ if not page_projects:
2711
+ break
2712
+
2713
+ # Apply offset and limit
2714
+ paginated_projects = all_projects[offset : offset + limit]
2715
+
2716
+ # Map Linear projects to Epic objects using existing mapper
2717
+ epics = []
2718
+ for project in paginated_projects:
2719
+ epics.append(map_linear_project_to_epic(project))
2720
+
2721
+ return epics
2722
+
2723
+ except Exception as e:
2724
+ raise ValueError(f"Failed to list Linear projects: {e}") from e
2725
+
2726
+ async def close(self) -> None:
2727
+ """Close the adapter and clean up resources."""
2728
+ await self.client.close()
2729
+
2730
+
2731
+ # Register the adapter
2732
+ AdapterRegistry.register("linear", LinearAdapter)