mcp-ticketer 0.12.0__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-ticketer might be problematic. Click here for more details.

Files changed (87) hide show
  1. mcp_ticketer/__init__.py +10 -10
  2. mcp_ticketer/__version__.py +1 -1
  3. mcp_ticketer/adapters/aitrackdown.py +385 -6
  4. mcp_ticketer/adapters/asana/adapter.py +108 -0
  5. mcp_ticketer/adapters/asana/mappers.py +14 -0
  6. mcp_ticketer/adapters/github.py +525 -11
  7. mcp_ticketer/adapters/hybrid.py +47 -5
  8. mcp_ticketer/adapters/jira.py +521 -0
  9. mcp_ticketer/adapters/linear/adapter.py +1784 -101
  10. mcp_ticketer/adapters/linear/client.py +85 -3
  11. mcp_ticketer/adapters/linear/mappers.py +96 -8
  12. mcp_ticketer/adapters/linear/queries.py +168 -1
  13. mcp_ticketer/adapters/linear/types.py +80 -4
  14. mcp_ticketer/analysis/__init__.py +56 -0
  15. mcp_ticketer/analysis/dependency_graph.py +255 -0
  16. mcp_ticketer/analysis/health_assessment.py +304 -0
  17. mcp_ticketer/analysis/orphaned.py +218 -0
  18. mcp_ticketer/analysis/project_status.py +594 -0
  19. mcp_ticketer/analysis/similarity.py +224 -0
  20. mcp_ticketer/analysis/staleness.py +266 -0
  21. mcp_ticketer/automation/__init__.py +11 -0
  22. mcp_ticketer/automation/project_updates.py +378 -0
  23. mcp_ticketer/cli/adapter_diagnostics.py +3 -1
  24. mcp_ticketer/cli/auggie_configure.py +17 -5
  25. mcp_ticketer/cli/codex_configure.py +97 -61
  26. mcp_ticketer/cli/configure.py +851 -103
  27. mcp_ticketer/cli/cursor_configure.py +314 -0
  28. mcp_ticketer/cli/diagnostics.py +13 -12
  29. mcp_ticketer/cli/discover.py +5 -0
  30. mcp_ticketer/cli/gemini_configure.py +17 -5
  31. mcp_ticketer/cli/init_command.py +880 -0
  32. mcp_ticketer/cli/instruction_commands.py +6 -0
  33. mcp_ticketer/cli/main.py +233 -3151
  34. mcp_ticketer/cli/mcp_configure.py +672 -98
  35. mcp_ticketer/cli/mcp_server_commands.py +415 -0
  36. mcp_ticketer/cli/platform_detection.py +77 -12
  37. mcp_ticketer/cli/platform_installer.py +536 -0
  38. mcp_ticketer/cli/project_update_commands.py +350 -0
  39. mcp_ticketer/cli/setup_command.py +639 -0
  40. mcp_ticketer/cli/simple_health.py +12 -10
  41. mcp_ticketer/cli/ticket_commands.py +264 -24
  42. mcp_ticketer/core/__init__.py +28 -6
  43. mcp_ticketer/core/adapter.py +166 -1
  44. mcp_ticketer/core/config.py +21 -21
  45. mcp_ticketer/core/exceptions.py +7 -1
  46. mcp_ticketer/core/label_manager.py +732 -0
  47. mcp_ticketer/core/mappers.py +31 -19
  48. mcp_ticketer/core/models.py +135 -0
  49. mcp_ticketer/core/onepassword_secrets.py +1 -1
  50. mcp_ticketer/core/priority_matcher.py +463 -0
  51. mcp_ticketer/core/project_config.py +132 -14
  52. mcp_ticketer/core/session_state.py +171 -0
  53. mcp_ticketer/core/state_matcher.py +592 -0
  54. mcp_ticketer/core/url_parser.py +425 -0
  55. mcp_ticketer/core/validators.py +69 -0
  56. mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
  57. mcp_ticketer/mcp/server/main.py +106 -25
  58. mcp_ticketer/mcp/server/routing.py +655 -0
  59. mcp_ticketer/mcp/server/server_sdk.py +58 -0
  60. mcp_ticketer/mcp/server/tools/__init__.py +31 -12
  61. mcp_ticketer/mcp/server/tools/analysis_tools.py +854 -0
  62. mcp_ticketer/mcp/server/tools/attachment_tools.py +6 -8
  63. mcp_ticketer/mcp/server/tools/bulk_tools.py +259 -202
  64. mcp_ticketer/mcp/server/tools/comment_tools.py +74 -12
  65. mcp_ticketer/mcp/server/tools/config_tools.py +1184 -136
  66. mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
  67. mcp_ticketer/mcp/server/tools/hierarchy_tools.py +870 -460
  68. mcp_ticketer/mcp/server/tools/instruction_tools.py +7 -5
  69. mcp_ticketer/mcp/server/tools/label_tools.py +942 -0
  70. mcp_ticketer/mcp/server/tools/pr_tools.py +3 -7
  71. mcp_ticketer/mcp/server/tools/project_status_tools.py +158 -0
  72. mcp_ticketer/mcp/server/tools/project_update_tools.py +473 -0
  73. mcp_ticketer/mcp/server/tools/search_tools.py +180 -97
  74. mcp_ticketer/mcp/server/tools/session_tools.py +308 -0
  75. mcp_ticketer/mcp/server/tools/ticket_tools.py +1070 -123
  76. mcp_ticketer/mcp/server/tools/user_ticket_tools.py +218 -236
  77. mcp_ticketer/queue/worker.py +1 -1
  78. mcp_ticketer/utils/__init__.py +5 -0
  79. mcp_ticketer/utils/token_utils.py +246 -0
  80. mcp_ticketer-2.0.1.dist-info/METADATA +1366 -0
  81. mcp_ticketer-2.0.1.dist-info/RECORD +122 -0
  82. mcp_ticketer-0.12.0.dist-info/METADATA +0 -550
  83. mcp_ticketer-0.12.0.dist-info/RECORD +0 -91
  84. {mcp_ticketer-0.12.0.dist-info → mcp_ticketer-2.0.1.dist-info}/WHEEL +0 -0
  85. {mcp_ticketer-0.12.0.dist-info → mcp_ticketer-2.0.1.dist-info}/entry_points.txt +0 -0
  86. {mcp_ticketer-0.12.0.dist-info → mcp_ticketer-2.0.1.dist-info}/licenses/LICENSE +0 -0
  87. {mcp_ticketer-0.12.0.dist-info → mcp_ticketer-2.0.1.dist-info}/top_level.txt +0 -0
@@ -6,6 +6,7 @@ import asyncio
6
6
  import logging
7
7
  import mimetypes
8
8
  import os
9
+ from datetime import datetime
9
10
  from pathlib import Path
10
11
  from typing import Any
11
12
 
@@ -21,12 +22,23 @@ except ImportError:
21
22
  import builtins
22
23
 
23
24
  from ...core.adapter import BaseAdapter
24
- from ...core.models import Comment, Epic, SearchQuery, Task, TicketState
25
+ from ...core.models import (
26
+ Attachment,
27
+ Comment,
28
+ Epic,
29
+ ProjectUpdate,
30
+ ProjectUpdateHealth,
31
+ SearchQuery,
32
+ Task,
33
+ TicketState,
34
+ )
25
35
  from ...core.registry import AdapterRegistry
36
+ from ...core.url_parser import URLParserError, normalize_project_id
26
37
  from .client import LinearGraphQLClient
27
38
  from .mappers import (
28
39
  build_linear_issue_input,
29
40
  build_linear_issue_update_input,
41
+ map_linear_attachment_to_attachment,
30
42
  map_linear_comment_to_comment,
31
43
  map_linear_issue_to_task,
32
44
  map_linear_project_to_epic,
@@ -34,7 +46,16 @@ from .mappers import (
34
46
  from .queries import (
35
47
  ALL_FRAGMENTS,
36
48
  CREATE_ISSUE_MUTATION,
49
+ CREATE_LABEL_MUTATION,
50
+ CREATE_PROJECT_UPDATE_MUTATION,
51
+ GET_CUSTOM_VIEW_QUERY,
52
+ GET_ISSUE_STATUS_QUERY,
53
+ GET_PROJECT_UPDATE_QUERY,
54
+ LIST_CYCLES_QUERY,
55
+ LIST_ISSUE_STATUSES_QUERY,
37
56
  LIST_ISSUES_QUERY,
57
+ LIST_PROJECT_UPDATES_QUERY,
58
+ LIST_PROJECTS_QUERY,
38
59
  SEARCH_ISSUES_QUERY,
39
60
  UPDATE_ISSUE_MUTATION,
40
61
  WORKFLOW_STATES_QUERY,
@@ -70,6 +91,7 @@ class LinearAdapter(BaseAdapter[Task]):
70
91
  """Initialize Linear adapter.
71
92
 
72
93
  Args:
94
+ ----
73
95
  config: Configuration with:
74
96
  - api_key: Linear API key (or LINEAR_API_KEY env var)
75
97
  - workspace: Linear workspace name (optional, for documentation)
@@ -78,6 +100,7 @@ class LinearAdapter(BaseAdapter[Task]):
78
100
  - api_url: Optional Linear API URL (defaults to https://api.linear.app/graphql)
79
101
 
80
102
  Raises:
103
+ ------
81
104
  ValueError: If required configuration is missing
82
105
 
83
106
  """
@@ -124,6 +147,7 @@ class LinearAdapter(BaseAdapter[Task]):
124
147
  self.workspace = config.get("workspace", "")
125
148
  self.team_key = config.get("team_key")
126
149
  self.team_id = config.get("team_id")
150
+ self.user_email = config.get("user_email") # Optional default assignee
127
151
  self.api_url = config.get("api_url", "https://api.linear.app/graphql")
128
152
 
129
153
  # Validate team configuration
@@ -137,6 +161,7 @@ class LinearAdapter(BaseAdapter[Task]):
137
161
  """Validate Linear API credentials.
138
162
 
139
163
  Returns:
164
+ -------
140
165
  Tuple of (is_valid, error_message)
141
166
 
142
167
  """
@@ -149,16 +174,49 @@ class LinearAdapter(BaseAdapter[Task]):
149
174
  return True, ""
150
175
 
151
176
  async def initialize(self) -> None:
152
- """Initialize adapter by preloading team, states, and labels data concurrently."""
177
+ """Initialize adapter by preloading team, states, and labels data concurrently.
178
+
179
+ Design Decision: Enhanced Error Handling (1M-431)
180
+ --------------------------------------------------
181
+ Improved error messages to provide actionable troubleshooting guidance.
182
+ Added logging to track initialization progress and identify failure points.
183
+ Preserves original ValueError type for backward compatibility.
184
+
185
+ Raises:
186
+ ------
187
+ ValueError: If connection fails or initialization encounters errors
188
+ with detailed troubleshooting information
189
+
190
+ """
153
191
  if self._initialized:
154
192
  return
155
193
 
194
+ import logging
195
+
196
+ logger = logging.getLogger(__name__)
197
+
156
198
  try:
157
199
  # Test connection first
158
- if not await self.client.test_connection():
159
- raise ValueError("Failed to connect to Linear API - check credentials")
200
+ logger.info(
201
+ f"Testing Linear API connection for team {self.team_key or self.team_id}..."
202
+ )
203
+ connection_ok = await self.client.test_connection()
204
+
205
+ if not connection_ok:
206
+ raise ValueError(
207
+ "Failed to connect to Linear API. Troubleshooting:\n"
208
+ "1. Verify API key is valid (starts with 'lin_api_')\n"
209
+ "2. Check team_key matches your Linear workspace\n"
210
+ "3. Ensure API key has proper permissions\n"
211
+ "4. Review logs for detailed error information\n"
212
+ f" API key preview: {self.api_key[:20] if self.api_key else 'None'}...\n"
213
+ f" Team: {self.team_key or self.team_id}"
214
+ )
215
+
216
+ logger.info("Linear API connection successful")
160
217
 
161
218
  # Load team data and workflow states concurrently
219
+ logger.debug("Loading team data and workflow states...")
162
220
  team_id = await self._ensure_team_id()
163
221
 
164
222
  # Load workflow states and labels for the team
@@ -166,69 +224,330 @@ class LinearAdapter(BaseAdapter[Task]):
166
224
  await self._load_team_labels(team_id)
167
225
 
168
226
  self._initialized = True
227
+ logger.info("Linear adapter initialized successfully")
169
228
 
229
+ except ValueError:
230
+ # Re-raise ValueError with original message (for connection failures)
231
+ raise
170
232
  except Exception as e:
171
- raise ValueError(f"Failed to initialize Linear adapter: {e}") from e
233
+ logger.error(
234
+ f"Linear adapter initialization failed: {type(e).__name__}: {e}",
235
+ exc_info=True,
236
+ )
237
+ raise ValueError(
238
+ f"Failed to initialize Linear adapter: {type(e).__name__}: {e}\n"
239
+ "Check your credentials and network connection."
240
+ ) from e
172
241
 
173
242
  async def _ensure_team_id(self) -> str:
174
243
  """Ensure we have a team ID, resolving from team_key if needed.
175
244
 
245
+ Validates that team_id is a UUID. If it looks like a team_key,
246
+ resolves it to the actual UUID.
247
+
176
248
  Returns:
177
- Linear team UUID
249
+ -------
250
+ Valid Linear team UUID
178
251
 
179
252
  Raises:
180
- ValueError: If team cannot be found or resolved
253
+ ------
254
+ ValueError: If neither team_id nor team_key provided, or resolution fails
181
255
 
182
256
  """
257
+ logger = logging.getLogger(__name__)
258
+
259
+ # If we have a team_id, validate it's actually a UUID
183
260
  if self.team_id:
184
- return self.team_id
261
+ # Check if it looks like a UUID (36 chars with hyphens)
262
+ import re
263
+
264
+ uuid_pattern = re.compile(
265
+ r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$",
266
+ re.IGNORECASE,
267
+ )
268
+
269
+ if uuid_pattern.match(self.team_id):
270
+ # Already a valid UUID
271
+ return str(self.team_id)
272
+ # Looks like a team_key string - need to resolve it
273
+ logger.warning(
274
+ f"team_id '{self.team_id}' is not a UUID - treating as team_key and resolving"
275
+ )
276
+ teams = await self._get_team_by_key(self.team_id)
277
+ if teams and len(teams) > 0:
278
+ resolved_id = teams[0]["id"]
279
+ logger.info(
280
+ f"Resolved team_key '{self.team_id}' to UUID: {resolved_id}"
281
+ )
282
+ # Cache the resolved UUID
283
+ self.team_id = resolved_id
284
+ return resolved_id
285
+ raise ValueError(
286
+ f"Cannot resolve team_id '{self.team_id}' to a valid Linear team UUID. "
287
+ f"Please use team_key instead for team short codes like 'ENG'."
288
+ )
185
289
 
290
+ # No team_id, must have team_key
186
291
  if not self.team_key:
187
- raise ValueError("Either team_id or team_key must be provided")
292
+ raise ValueError(
293
+ "Either team_id (UUID) or team_key (short code) must be provided"
294
+ )
188
295
 
189
296
  # Query team by key
297
+ teams = await self._get_team_by_key(self.team_key)
298
+
299
+ if not teams or len(teams) == 0:
300
+ raise ValueError(f"Team with key '{self.team_key}' not found")
301
+
302
+ team = teams[0]
303
+ team_id = team["id"]
304
+
305
+ # Cache the resolved team_id
306
+ self.team_id = team_id
307
+ self._team_data = team
308
+ logger.info(f"Resolved team_key '{self.team_key}' to team_id: {team_id}")
309
+
310
+ return team_id
311
+
312
+ async def _get_team_by_key(self, team_key: str) -> list[dict[str, Any]]:
313
+ """Query Linear API to get team by key.
314
+
315
+ Args:
316
+ ----
317
+ team_key: Short team identifier (e.g., 'ENG', 'BTA')
318
+
319
+ Returns:
320
+ -------
321
+ List of matching teams
322
+
323
+ """
190
324
  query = """
191
325
  query GetTeamByKey($key: String!) {
192
326
  teams(filter: { key: { eq: $key } }) {
193
327
  nodes {
194
328
  id
195
- name
196
329
  key
197
- description
330
+ name
198
331
  }
199
332
  }
200
333
  }
201
334
  """
202
335
 
336
+ result = await self.client.execute_query(query, {"key": team_key})
337
+
338
+ if "teams" in result and "nodes" in result["teams"]:
339
+ return result["teams"]["nodes"]
340
+
341
+ return []
342
+
343
+ async def _get_custom_view(self, view_id: str) -> dict[str, Any] | None:
344
+ """Get a Linear custom view by ID to check if it exists.
345
+
346
+ Args:
347
+ ----
348
+ view_id: View identifier (slug-uuid format)
349
+
350
+ Returns:
351
+ -------
352
+ View dict with fields (id, name, description, issues) or None if not found
353
+
354
+ """
355
+ logging.debug(f"[VIEW DEBUG] _get_custom_view called with view_id: {view_id}")
356
+
357
+ if not view_id:
358
+ logging.debug("[VIEW DEBUG] view_id is empty, returning None")
359
+ return None
360
+
203
361
  try:
204
- result = await self.client.execute_query(query, {"key": self.team_key})
205
- teams = result.get("teams", {}).get("nodes", [])
362
+ logging.debug(
363
+ f"[VIEW DEBUG] Executing GET_CUSTOM_VIEW_QUERY for view_id: {view_id}"
364
+ )
365
+ result = await self.client.execute_query(
366
+ GET_CUSTOM_VIEW_QUERY, {"viewId": view_id, "first": 10}
367
+ )
368
+ logging.debug(f"[VIEW DEBUG] Query result: {result}")
206
369
 
207
- if not teams:
208
- raise ValueError(f"Team with key '{self.team_key}' not found")
370
+ if result.get("customView"):
371
+ logging.debug(
372
+ f"[VIEW DEBUG] customView found in result: {result.get('customView')}"
373
+ )
374
+ return result["customView"]
375
+
376
+ logging.debug(
377
+ f"[VIEW DEBUG] No customView in result. Checking pattern: has_hyphen={'-' in view_id}, length={len(view_id)}"
378
+ )
209
379
 
210
- team = teams[0]
211
- self.team_id = team["id"]
212
- self._team_data = team
380
+ # API query failed but check if this looks like a view identifier
381
+ # View IDs from URLs have format: slug-uuid (e.g., "mcp-skills-issues-0d0359fabcf9")
382
+ # If it has hyphens and is longer than 12 chars, it's likely a view URL identifier
383
+ if "-" in view_id and len(view_id) > 12:
384
+ logging.debug(
385
+ "[VIEW DEBUG] Pattern matched! Returning minimal view object"
386
+ )
387
+ # Return minimal view object to trigger helpful error message
388
+ # We can't fetch the actual name, so use generic "Linear View"
389
+ return {
390
+ "id": view_id,
391
+ "name": "Linear View",
392
+ "issues": {"nodes": [], "pageInfo": {"hasNextPage": False}},
393
+ }
213
394
 
214
- return self.team_id
395
+ logging.debug("[VIEW DEBUG] Pattern did not match, returning None")
396
+ return None
215
397
 
216
398
  except Exception as e:
217
- raise ValueError(f"Failed to resolve team '{self.team_key}': {e}") from e
399
+ logging.debug(
400
+ f"[VIEW DEBUG] Exception caught: {type(e).__name__}: {str(e)}"
401
+ )
402
+ # Linear returns error if view not found
403
+ # Check if this looks like a view identifier to provide helpful error
404
+ if "-" in view_id and len(view_id) > 12:
405
+ logging.debug(
406
+ "[VIEW DEBUG] Exception handler: Pattern matched! Returning minimal view object"
407
+ )
408
+ # Return minimal view object to trigger helpful error message
409
+ return {
410
+ "id": view_id,
411
+ "name": "Linear View",
412
+ "issues": {"nodes": [], "pageInfo": {"hasNextPage": False}},
413
+ }
414
+ logging.debug(
415
+ "[VIEW DEBUG] Exception handler: Pattern did not match, returning None"
416
+ )
417
+ return None
418
+
419
+ async def get_project(self, project_id: str) -> dict[str, Any] | None:
420
+ """Get a Linear project by ID using direct query.
421
+
422
+ This method uses Linear's direct project(id:) GraphQL query for efficient lookups.
423
+ Supports UUID, slugId, or short ID formats.
424
+
425
+ Args:
426
+ ----
427
+ project_id: Project UUID, slugId, or short ID
428
+
429
+ Returns:
430
+ -------
431
+ Project dict with fields (id, name, description, state, etc.) or None if not found
432
+
433
+ Examples:
434
+ --------
435
+ - "a1b2c3d4-e5f6-7890-abcd-ef1234567890" (UUID)
436
+ - "crm-smart-monitoring-system-f59a41a96c52" (slugId)
437
+ - "6cf55cfcfad4" (short ID - 12 hex chars)
438
+
439
+ """
440
+ if not project_id:
441
+ return None
442
+
443
+ # Direct query using Linear's project(id:) endpoint
444
+ query = """
445
+ query GetProject($id: String!) {
446
+ project(id: $id) {
447
+ id
448
+ name
449
+ description
450
+ state
451
+ slugId
452
+ createdAt
453
+ updatedAt
454
+ url
455
+ icon
456
+ color
457
+ targetDate
458
+ startedAt
459
+ completedAt
460
+ teams {
461
+ nodes {
462
+ id
463
+ name
464
+ key
465
+ description
466
+ }
467
+ }
468
+ }
469
+ }
470
+ """
471
+
472
+ try:
473
+ result = await self.client.execute_query(query, {"id": project_id})
474
+
475
+ if result.get("project"):
476
+ return result["project"]
477
+
478
+ # No match found
479
+ return None
480
+
481
+ except Exception:
482
+ # Linear returns error if project not found - return None instead of raising
483
+ return None
484
+
485
+ async def get_epic(self, epic_id: str, include_issues: bool = True) -> Epic | None:
486
+ """Get Linear project as Epic with optional issue loading.
487
+
488
+ This is the preferred method for reading projects/epics as it provides
489
+ explicit control over whether to load child issues.
490
+
491
+ Args:
492
+ ----
493
+ epic_id: Project UUID, slugId, or short ID
494
+ include_issues: Whether to fetch and populate child_issues (default True)
495
+
496
+ Returns:
497
+ -------
498
+ Epic object with child_issues populated if include_issues=True,
499
+ or None if project not found
500
+
501
+ Raises:
502
+ ------
503
+ ValueError: If credentials invalid
504
+
505
+ Example:
506
+ -------
507
+ # Get project with issues
508
+ epic = await adapter.get_epic("c0e6db5a-03b6-479f-8796-5070b8fb7895")
509
+
510
+ # Get project metadata only (faster)
511
+ epic = await adapter.get_epic("c0e6db5a-03b6-479f-8796-5070b8fb7895", include_issues=False)
512
+
513
+ """
514
+ # Validate credentials
515
+ is_valid, error_message = self.validate_credentials()
516
+ if not is_valid:
517
+ raise ValueError(error_message)
518
+
519
+ # Fetch project data
520
+ project_data = await self.get_project(epic_id)
521
+ if not project_data:
522
+ return None
523
+
524
+ # Map to Epic
525
+ epic = map_linear_project_to_epic(project_data)
526
+
527
+ # Optionally fetch and populate child issues
528
+ if include_issues:
529
+ issues = await self._get_project_issues(epic_id)
530
+ epic.child_issues = [issue.id for issue in issues if issue.id is not None]
531
+
532
+ return epic
218
533
 
219
534
  async def _resolve_project_id(self, project_identifier: str) -> str | None:
220
535
  """Resolve project identifier (slug, name, short ID, or URL) to full UUID.
221
536
 
222
537
  Args:
538
+ ----
223
539
  project_identifier: Project slug, name, short ID, or URL
224
540
 
225
541
  Returns:
542
+ -------
226
543
  Full Linear project UUID, or None if not found
227
544
 
228
545
  Raises:
546
+ ------
229
547
  ValueError: If project lookup fails
230
548
 
231
549
  Examples:
550
+ --------
232
551
  - "crm-smart-monitoring-system" (slug)
233
552
  - "CRM Smart Monitoring System" (name)
234
553
  - "f59a41a96c52" (short ID from URL)
@@ -238,55 +557,120 @@ class LinearAdapter(BaseAdapter[Task]):
238
557
  if not project_identifier:
239
558
  return None
240
559
 
241
- # Extract slug/ID from URL if full URL provided
242
- if project_identifier.startswith("http"):
243
- # Extract slug-shortid from URL like:
244
- # https://linear.app/travel-bta/project/crm-smart-monitoring-system-f59a41a96c52/overview
245
- parts = project_identifier.split("/project/")
246
- if len(parts) > 1:
247
- slug_with_id = parts[1].split("/")[
248
- 0
249
- ] # Get "crm-smart-monitoring-system-f59a41a96c52"
250
- project_identifier = slug_with_id
251
- else:
252
- raise ValueError(f"Invalid Linear project URL: {project_identifier}")
560
+ # Use tested URL parser to normalize the identifier
561
+ # This correctly extracts project IDs from URLs and handles:
562
+ # - Full URLs: https://linear.app/team/project/slug-id/overview
563
+ # - Slug-ID format: slug-id
564
+ # - Plain identifiers: id
565
+ try:
566
+ project_identifier = normalize_project_id(
567
+ project_identifier, adapter_type="linear"
568
+ )
569
+ except URLParserError as e:
570
+ logging.getLogger(__name__).warning(
571
+ f"Failed to parse project identifier: {e}"
572
+ )
573
+ # Continue with original identifier - may still work if it's a name
253
574
 
254
575
  # If it looks like a full UUID already (exactly 36 chars with exactly 4 dashes), return it
255
576
  # UUID format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
256
577
  if len(project_identifier) == 36 and project_identifier.count("-") == 4:
257
578
  return project_identifier
258
579
 
259
- # Query all projects and search for matching slug, name, or slugId
580
+ # OPTIMIZATION: Try direct query first if it looks like a UUID, slugId, or short ID
581
+ # This is more efficient than listing all projects
582
+ should_try_direct_query = False
583
+
584
+ # Check if it looks like a short ID (exactly 12 hex characters)
585
+ if len(project_identifier) == 12 and all(
586
+ c in "0123456789abcdefABCDEF" for c in project_identifier
587
+ ):
588
+ should_try_direct_query = True
589
+
590
+ # Check if it looks like a slugId format (contains dashes and ends with 12 hex chars)
591
+ if "-" in project_identifier:
592
+ parts = project_identifier.rsplit("-", 1)
593
+ if len(parts) > 1:
594
+ potential_short_id = parts[1]
595
+ if len(potential_short_id) == 12 and all(
596
+ c in "0123456789abcdefABCDEF" for c in potential_short_id
597
+ ):
598
+ should_try_direct_query = True
599
+
600
+ # Try direct query first if identifier format suggests it might work
601
+ if should_try_direct_query:
602
+ try:
603
+ project = await self.get_project(project_identifier)
604
+ if project:
605
+ return project["id"]
606
+ except Exception as e:
607
+ # Direct query failed - fall through to list-based search
608
+ logging.getLogger(__name__).debug(
609
+ f"Direct project query failed for '{project_identifier}': {e}. "
610
+ f"Falling back to listing all projects."
611
+ )
612
+
613
+ # FALLBACK: Query all projects with pagination support
614
+ # This is less efficient but handles name-based lookups and edge cases
260
615
  query = """
261
- query GetProjects {
262
- projects(first: 100) {
616
+ query GetProjects($first: Int!, $after: String) {
617
+ projects(first: $first, after: $after) {
263
618
  nodes {
264
619
  id
265
620
  name
266
621
  slugId
267
622
  }
623
+ pageInfo {
624
+ hasNextPage
625
+ endCursor
626
+ }
268
627
  }
269
628
  }
270
629
  """
271
630
 
272
631
  try:
273
- result = await self.client.execute_query(query, {})
274
- projects = result.get("projects", {}).get("nodes", [])
632
+ # Fetch all projects across multiple pages
633
+ all_projects = []
634
+ has_next_page = True
635
+ after_cursor = None
636
+
637
+ while has_next_page:
638
+ variables = {"first": 100}
639
+ if after_cursor:
640
+ variables["after"] = after_cursor
641
+
642
+ result = await self.client.execute_query(query, variables)
643
+ projects_data = result.get("projects", {})
644
+ page_projects = projects_data.get("nodes", [])
645
+ page_info = projects_data.get("pageInfo", {})
646
+
647
+ all_projects.extend(page_projects)
648
+ has_next_page = page_info.get("hasNextPage", False)
649
+ after_cursor = page_info.get("endCursor")
275
650
 
276
651
  # Search for match by slug, slugId, name (case-insensitive)
277
652
  project_lower = project_identifier.lower()
278
- for project in projects:
653
+ for project in all_projects:
279
654
  # Check if identifier matches slug pattern (extracted from slugId)
280
655
  slug_id = project.get("slugId", "")
281
656
  if slug_id:
282
657
  # slugId format: "crm-smart-monitoring-system-f59a41a96c52"
658
+ # Linear short IDs are always exactly 12 hexadecimal characters
283
659
  # Extract both the slug part and short ID
284
660
  if "-" in slug_id:
285
- parts = slug_id.rsplit(
286
- "-", 1
287
- ) # Split from right to get last part
288
- slug_part = parts[0] # "crm-smart-monitoring-system"
289
- short_id = parts[1] if len(parts) > 1 else "" # "f59a41a96c52"
661
+ parts = slug_id.rsplit("-", 1)
662
+ potential_short_id = parts[1] if len(parts) > 1 else ""
663
+
664
+ # Validate it's exactly 12 hex characters
665
+ if len(potential_short_id) == 12 and all(
666
+ c in "0123456789abcdefABCDEF" for c in potential_short_id
667
+ ):
668
+ slug_part = parts[0]
669
+ short_id = potential_short_id
670
+ else:
671
+ # Fallback: treat entire slugId as slug if last part isn't valid
672
+ slug_part = slug_id
673
+ short_id = ""
290
674
 
291
675
  # Match full slugId, slug part, or short ID
292
676
  if (
@@ -308,19 +692,157 @@ class LinearAdapter(BaseAdapter[Task]):
308
692
  f"Failed to resolve project '{project_identifier}': {e}"
309
693
  ) from e
310
694
 
695
+ async def _validate_project_team_association(
696
+ self, project_id: str, team_id: str
697
+ ) -> tuple[bool, list[str]]:
698
+ """Check if team is associated with project.
699
+
700
+ Args:
701
+ ----
702
+ project_id: Linear project UUID
703
+ team_id: Linear team UUID
704
+
705
+ Returns:
706
+ -------
707
+ Tuple of (is_associated, list_of_project_team_ids)
708
+
709
+ """
710
+ project = await self.get_project(project_id)
711
+ if not project:
712
+ return False, []
713
+
714
+ # Extract team IDs from project's teams
715
+ project_team_ids = [
716
+ team["id"] for team in project.get("teams", {}).get("nodes", [])
717
+ ]
718
+
719
+ return team_id in project_team_ids, project_team_ids
720
+
721
+ async def _ensure_team_in_project(self, project_id: str, team_id: str) -> bool:
722
+ """Add team to project if not already associated.
723
+
724
+ Args:
725
+ ----
726
+ project_id: Linear project UUID
727
+ team_id: Linear team UUID to add
728
+
729
+ Returns:
730
+ -------
731
+ True if successful, False otherwise
732
+
733
+ """
734
+ # First check current association
735
+ is_associated, existing_team_ids = (
736
+ await self._validate_project_team_association(project_id, team_id)
737
+ )
738
+
739
+ if is_associated:
740
+ return True # Already associated, nothing to do
741
+
742
+ # Add team to project by updating project's teamIds
743
+ update_query = """
744
+ mutation UpdateProject($id: String!, $input: ProjectUpdateInput!) {
745
+ projectUpdate(id: $id, input: $input) {
746
+ success
747
+ project {
748
+ id
749
+ teams {
750
+ nodes {
751
+ id
752
+ name
753
+ }
754
+ }
755
+ }
756
+ }
757
+ }
758
+ """
759
+
760
+ # Include existing teams + new team
761
+ all_team_ids = existing_team_ids + [team_id]
762
+
763
+ try:
764
+ result = await self.client.execute_mutation(
765
+ update_query, {"id": project_id, "input": {"teamIds": all_team_ids}}
766
+ )
767
+ success = result.get("projectUpdate", {}).get("success", False)
768
+
769
+ if success:
770
+ logging.getLogger(__name__).info(
771
+ f"Successfully added team {team_id} to project {project_id}"
772
+ )
773
+ else:
774
+ logging.getLogger(__name__).warning(
775
+ f"Failed to add team {team_id} to project {project_id}"
776
+ )
777
+
778
+ return success
779
+ except Exception as e:
780
+ logging.getLogger(__name__).error(
781
+ f"Error adding team {team_id} to project {project_id}: {e}"
782
+ )
783
+ return False
784
+
785
+ async def _get_project_issues(
786
+ self, project_id: str, limit: int = 100
787
+ ) -> list[Task]:
788
+ """Fetch all issues belonging to a Linear project.
789
+
790
+ Uses existing build_issue_filter() and LIST_ISSUES_QUERY infrastructure
791
+ to fetch issues filtered by project_id.
792
+
793
+ Args:
794
+ ----
795
+ project_id: Project UUID, slugId, or short ID
796
+ limit: Maximum issues to return (default 100, max 250)
797
+
798
+ Returns:
799
+ -------
800
+ List of Task objects representing project's issues
801
+
802
+ Raises:
803
+ ------
804
+ ValueError: If credentials invalid or query fails
805
+
806
+ """
807
+ logger = logging.getLogger(__name__)
808
+
809
+ # Build filter for issues belonging to this project
810
+ issue_filter = build_issue_filter(project_id=project_id)
811
+
812
+ variables = {
813
+ "filter": issue_filter,
814
+ "first": min(limit, 250), # Linear API max per page
815
+ }
816
+
817
+ try:
818
+ result = await self.client.execute_query(LIST_ISSUES_QUERY, variables)
819
+ issues = result.get("issues", {}).get("nodes", [])
820
+
821
+ # Map Linear issues to Task objects
822
+ return [map_linear_issue_to_task(issue) for issue in issues]
823
+
824
+ except Exception as e:
825
+ # Log but don't fail - return empty list if issues can't be fetched
826
+ logger.warning(f"Failed to fetch project issues for {project_id}: {e}")
827
+ return []
828
+
311
829
  async def _resolve_issue_id(self, issue_identifier: str) -> str | None:
312
830
  """Resolve issue identifier (like "ENG-842") to full UUID.
313
831
 
314
832
  Args:
833
+ ----
315
834
  issue_identifier: Issue identifier (e.g., "ENG-842") or UUID
316
835
 
317
836
  Returns:
837
+ -------
318
838
  Full Linear issue UUID, or None if not found
319
839
 
320
840
  Raises:
841
+ ------
321
842
  ValueError: If issue lookup fails
322
843
 
323
844
  Examples:
845
+ --------
324
846
  - "ENG-842" (issue identifier)
325
847
  - "BTA-123" (issue identifier)
326
848
  - "a1b2c3d4-e5f6-7890-abcd-ef1234567890" (already a UUID)
@@ -363,6 +885,7 @@ class LinearAdapter(BaseAdapter[Task]):
363
885
  """Load and cache workflow states for the team.
364
886
 
365
887
  Args:
888
+ ----
366
889
  team_id: Linear team ID
367
890
 
368
891
  """
@@ -388,6 +911,7 @@ class LinearAdapter(BaseAdapter[Task]):
388
911
  """Load and cache labels for the team with retry logic.
389
912
 
390
913
  Args:
914
+ ----
391
915
  team_id: Linear team ID
392
916
 
393
917
  """
@@ -432,69 +956,342 @@ class LinearAdapter(BaseAdapter[Task]):
432
956
  )
433
957
  self._labels_cache = [] # Explicitly empty on failure
434
958
 
435
- async def _resolve_label_ids(self, label_names: list[str]) -> list[str]:
436
- """Resolve label names to Linear label IDs with proper None vs empty list handling.
959
+ async def _find_label_by_name(
960
+ self, name: str, team_id: str, max_retries: int = 3
961
+ ) -> dict | None:
962
+ """Find a label by name using Linear API (server-side check) with retry logic.
963
+
964
+ Handles cache staleness by checking Linear's server-side state.
965
+ This method is used when cache lookup misses to prevent duplicate
966
+ label creation attempts.
967
+
968
+ Implements retry logic with exponential backoff to handle transient
969
+ network failures and distinguish between "label not found" (None) and
970
+ "check failed" (exception).
437
971
 
438
972
  Args:
439
- label_names: List of label names
973
+ ----
974
+ name: Label name to search for (case-insensitive)
975
+ team_id: Linear team ID
976
+ max_retries: Maximum retry attempts for transient failures (default: 3)
440
977
 
441
978
  Returns:
442
- List of Linear label IDs that exist
979
+ -------
980
+ dict: Label data if found (with id, name, color, description)
981
+ None: Label definitively doesn't exist (checked successfully)
982
+
983
+ Raises:
984
+ ------
985
+ Exception: Unable to check label existence after retries exhausted
986
+ (network/API failure). Caller must handle to prevent
987
+ duplicate label creation.
988
+
989
+ Note:
990
+ ----
991
+ This method queries Linear's API and returns the first 250 labels.
992
+ For teams with >250 labels, pagination would be needed (future enhancement).
993
+
994
+ Related:
995
+ -------
996
+ 1M-443: Fix duplicate label error when setting existing labels
997
+ 1M-443 hotfix: Add retry logic to prevent ambiguous error handling
443
998
 
444
999
  """
445
1000
  logger = logging.getLogger(__name__)
446
1001
 
447
- # None = not loaded yet, [] = loaded but empty or failed
448
- if self._labels_cache is None:
449
- team_id = await self._ensure_team_id()
450
- await self._load_team_labels(team_id)
1002
+ query = """
1003
+ query GetTeamLabels($teamId: String!) {
1004
+ team(id: $teamId) {
1005
+ labels(first: 250) {
1006
+ nodes {
1007
+ id
1008
+ name
1009
+ color
1010
+ description
1011
+ }
1012
+ }
1013
+ }
1014
+ }
1015
+ """
451
1016
 
452
- if self._labels_cache is None:
453
- # Still None after load attempt - should not happen
454
- logger.error(
455
- "Label cache is None after load attempt. Tags will be skipped."
456
- )
457
- return []
1017
+ for attempt in range(max_retries):
1018
+ try:
1019
+ result = await self.client.execute_query(query, {"teamId": team_id})
1020
+ labels = result.get("team", {}).get("labels", {}).get("nodes", [])
458
1021
 
459
- if not self._labels_cache:
460
- # Empty list - either no labels in team or load failed
461
- logger.warning(
462
- f"Team has no labels available. Cannot resolve tags: {label_names}"
463
- )
464
- return []
1022
+ # Case-insensitive search
1023
+ name_lower = name.lower()
1024
+ for label in labels:
1025
+ if label["name"].lower() == name_lower:
1026
+ logger.debug(
1027
+ f"Found label '{name}' via server-side search (ID: {label['id']})"
1028
+ )
1029
+ return label
1030
+
1031
+ # Label definitively doesn't exist (successful check)
1032
+ logger.debug(f"Label '{name}' not found in {len(labels)} team labels")
1033
+ return None
465
1034
 
466
- # Create name -> ID mapping (case-insensitive)
467
- label_map = {label["name"].lower(): label["id"] for label in self._labels_cache}
1035
+ except Exception as e:
1036
+ if attempt < max_retries - 1:
1037
+ # Transient failure, retry with exponential backoff
1038
+ wait_time = 2**attempt
1039
+ await asyncio.sleep(wait_time)
1040
+ logger.debug(
1041
+ f"Retry {attempt + 1}/{max_retries} for label '{name}' search: {e}"
1042
+ )
1043
+ continue
1044
+ else:
1045
+ # All retries exhausted, propagate exception
1046
+ # CRITICAL: Caller must handle to prevent duplicate creation
1047
+ logger.error(
1048
+ f"Failed to check label '{name}' after {max_retries} attempts: {e}"
1049
+ )
1050
+ raise
1051
+
1052
+ # This should never be reached (all paths return/raise in loop)
1053
+ return None
1054
+
1055
+ async def _create_label(
1056
+ self, name: str, team_id: str, color: str = "#0366d6"
1057
+ ) -> str:
1058
+ """Create a new label in Linear.
1059
+
1060
+ Implements race condition recovery: if creation fails due to duplicate,
1061
+ retry lookup from server (Tier 2) to get the existing label ID.
1062
+
1063
+ Related: 1M-398 - Label duplicate error handling
1064
+
1065
+ Args:
1066
+ ----
1067
+ name: Label name
1068
+ team_id: Linear team ID
1069
+ color: Label color (hex format, default: blue)
1070
+
1071
+ Returns:
1072
+ -------
1073
+ str: Label ID (either newly created or existing after recovery)
1074
+
1075
+ Raises:
1076
+ ------
1077
+ ValueError: If label creation fails and recovery lookup also fails
1078
+
1079
+ """
1080
+ logger = logging.getLogger(__name__)
1081
+
1082
+ label_input = {
1083
+ "name": name,
1084
+ "teamId": team_id,
1085
+ "color": color,
1086
+ }
1087
+
1088
+ try:
1089
+ result = await self.client.execute_mutation(
1090
+ CREATE_LABEL_MUTATION, {"input": label_input}
1091
+ )
1092
+
1093
+ if not result["issueLabelCreate"]["success"]:
1094
+ raise ValueError(f"Failed to create label '{name}'")
1095
+
1096
+ created_label = result["issueLabelCreate"]["issueLabel"]
1097
+ label_id = created_label["id"]
1098
+
1099
+ # Update cache with new label
1100
+ if self._labels_cache is not None:
1101
+ self._labels_cache.append(created_label)
1102
+
1103
+ logger.info(f"Created new label '{name}' with ID: {label_id}")
1104
+ return label_id
1105
+
1106
+ except Exception as e:
1107
+ """
1108
+ Race condition recovery: Another process may have created this label
1109
+ between our Tier 2 lookup and creation attempt.
1110
+
1111
+ Graceful recovery:
1112
+ 1. Check if error is duplicate label error
1113
+ 2. Retry Tier 2 lookup (query server)
1114
+ 3. Return existing label ID if found
1115
+ 4. Raise error if recovery fails
1116
+ """
1117
+ error_str = str(e).lower()
1118
+
1119
+ # Check if this is a duplicate label error
1120
+ if "duplicate" in error_str and "label" in error_str:
1121
+ logger.debug(
1122
+ f"Duplicate label detected for '{name}', attempting recovery lookup"
1123
+ )
1124
+
1125
+ # Retry Tier 2: Query server for existing label
1126
+ server_label = await self._find_label_by_name(name, team_id)
1127
+
1128
+ if server_label:
1129
+ label_id = server_label["id"]
1130
+
1131
+ # Update cache with recovered label
1132
+ if self._labels_cache is not None:
1133
+ self._labels_cache.append(server_label)
1134
+
1135
+ logger.info(
1136
+ f"Successfully recovered from duplicate label error: '{name}' "
1137
+ f"(ID: {label_id})"
1138
+ )
1139
+ return label_id
1140
+
1141
+ # Recovery failed - label exists but we can't retrieve it
1142
+ raise ValueError(
1143
+ f"Label '{name}' already exists but could not retrieve ID. "
1144
+ f"This may indicate a permissions issue or API inconsistency."
1145
+ ) from e
1146
+
1147
+ # Not a duplicate error - re-raise original exception
1148
+ logger.error(f"Failed to create label '{name}': {e}")
1149
+ raise ValueError(f"Failed to create label '{name}': {e}") from e
1150
+
1151
+ async def _ensure_labels_exist(self, label_names: list[str]) -> list[str]:
1152
+ """Ensure labels exist, creating them if necessary.
1153
+
1154
+ This method implements a three-tier label resolution flow to prevent
1155
+ duplicate label creation errors:
1156
+
1157
+ 1. **Tier 1 (Cache)**: Check local cache (fast, 0 API calls)
1158
+ 2. **Tier 2 (Server)**: Query Linear API for label (handles staleness, +1 API call)
1159
+ 3. **Tier 3 (Create)**: Create new label only if truly doesn't exist
1160
+
1161
+ The three-tier approach solves cache staleness issues where labels exist
1162
+ in Linear but not in local cache, preventing "label already exists" errors.
1163
+
1164
+ Behavior (1M-396):
1165
+ - Fail-fast: If any label creation fails, the exception is propagated
1166
+ - All-or-nothing: Partial label updates are not allowed
1167
+ - Clear errors: Callers receive actionable error messages
1168
+
1169
+ Performance:
1170
+ - Cached labels: 0 additional API calls (Tier 1 hit)
1171
+ - New labels: +1 API call for existence check (Tier 2) + 1 for creation (Tier 3)
1172
+ - Trade-off: Accepts +1 API call to prevent duplicate errors
1173
+
1174
+ Args:
1175
+ ----
1176
+ label_names: List of label names (strings)
1177
+
1178
+ Returns:
1179
+ -------
1180
+ List of Linear label IDs (UUIDs)
1181
+
1182
+ Raises:
1183
+ ------
1184
+ ValueError: If any label creation fails
1185
+
1186
+ Related:
1187
+ -------
1188
+ 1M-443: Fix duplicate label error when setting existing labels
1189
+ 1M-396: Fail-fast label creation behavior
1190
+
1191
+ """
1192
+ logger = logging.getLogger(__name__)
1193
+
1194
+ if not label_names:
1195
+ return []
1196
+
1197
+ # Ensure labels are loaded
1198
+ if self._labels_cache is None:
1199
+ team_id = await self._ensure_team_id()
1200
+ await self._load_team_labels(team_id)
1201
+
1202
+ if self._labels_cache is None:
1203
+ logger.error(
1204
+ "Label cache is None after load attempt. Tags will be skipped."
1205
+ )
1206
+ return []
1207
+
1208
+ # Get team ID for creating new labels
1209
+ team_id = await self._ensure_team_id()
1210
+
1211
+ # Create name -> ID mapping (case-insensitive)
1212
+ label_map = {
1213
+ label["name"].lower(): label["id"] for label in (self._labels_cache or [])
1214
+ }
468
1215
 
469
1216
  logger.debug(f"Available labels in team: {list(label_map.keys())}")
470
1217
 
471
- # Resolve label names to IDs
1218
+ # Map or create each label
472
1219
  label_ids = []
473
- unmatched_labels = []
474
-
475
1220
  for name in label_names:
476
- label_id = label_map.get(name.lower())
477
- if label_id:
1221
+ name_lower = name.lower()
1222
+
1223
+ # Tier 1: Check cache (fast path, 0 API calls)
1224
+ if name_lower in label_map:
1225
+ label_id = label_map[name_lower]
478
1226
  label_ids.append(label_id)
479
- logger.debug(f"Resolved label '{name}' to ID: {label_id}")
480
- else:
481
- unmatched_labels.append(name)
482
- logger.warning(
483
- f"Label '{name}' not found in team. Available labels: {list(label_map.keys())}"
1227
+ logger.debug(
1228
+ f"[Tier 1] Resolved cached label '{name}' to ID: {label_id}"
484
1229
  )
485
-
486
- if unmatched_labels:
487
- logger.warning(
488
- f"Could not resolve labels: {unmatched_labels}. "
489
- f"Create them in Linear first or check spelling."
490
- )
1230
+ else:
1231
+ # Tier 2: Check server for label (handles cache staleness)
1232
+ try:
1233
+ server_label = await self._find_label_by_name(name, team_id)
1234
+ except Exception as e:
1235
+ # Server check failed after retries (1M-443 hotfix)
1236
+ # CRITICAL: Do NOT proceed to creation to prevent duplicates
1237
+ # Re-raise to signal failure to verify label existence
1238
+ logger.error(
1239
+ f"Unable to verify label '{name}' existence. "
1240
+ f"Cannot safely create to avoid duplicates. Error: {e}"
1241
+ )
1242
+ raise ValueError(
1243
+ f"Unable to verify label '{name}' existence. "
1244
+ f"Cannot safely create to avoid duplicates. Error: {e}"
1245
+ ) from e
1246
+
1247
+ if server_label:
1248
+ # Label exists on server but not in cache - update cache
1249
+ label_id = server_label["id"]
1250
+ label_ids.append(label_id)
1251
+ label_map[name_lower] = label_id
1252
+
1253
+ # Update cache to prevent future misses
1254
+ if self._labels_cache is not None:
1255
+ self._labels_cache.append(server_label)
1256
+
1257
+ logger.info(
1258
+ f"[Tier 2] Found stale label '{name}' on server (ID: {label_id}), "
1259
+ "updated cache"
1260
+ )
1261
+ else:
1262
+ # Tier 3: Label truly doesn't exist - create it
1263
+ # Propagate exceptions for fail-fast behavior (1M-396)
1264
+ new_label_id = await self._create_label(name, team_id)
1265
+ label_ids.append(new_label_id)
1266
+ # Update local map for subsequent labels in same call
1267
+ label_map[name_lower] = new_label_id
1268
+ logger.info(
1269
+ f"[Tier 3] Created new label '{name}' with ID: {new_label_id}"
1270
+ )
491
1271
 
492
1272
  return label_ids
493
1273
 
1274
+ async def _resolve_label_ids(self, label_names: list[str]) -> list[str]:
1275
+ """Resolve label names to Linear label IDs, creating labels if needed.
1276
+
1277
+ This method wraps _ensure_labels_exist for backward compatibility.
1278
+
1279
+ Args:
1280
+ ----
1281
+ label_names: List of label names
1282
+
1283
+ Returns:
1284
+ -------
1285
+ List of Linear label IDs
1286
+
1287
+ """
1288
+ return await self._ensure_labels_exist(label_names)
1289
+
494
1290
  def _get_state_mapping(self) -> dict[TicketState, str]:
495
1291
  """Get mapping from universal states to Linear workflow state IDs.
496
1292
 
497
1293
  Returns:
1294
+ -------
498
1295
  Dictionary mapping TicketState to Linear state ID
499
1296
 
500
1297
  """
@@ -526,9 +1323,11 @@ class LinearAdapter(BaseAdapter[Task]):
526
1323
  """Get Linear user ID from email, display name, or user ID.
527
1324
 
528
1325
  Args:
1326
+ ----
529
1327
  user_identifier: Email, display name, or user ID
530
1328
 
531
1329
  Returns:
1330
+ -------
532
1331
  Linear user ID or None if not found
533
1332
 
534
1333
  """
@@ -572,12 +1371,15 @@ class LinearAdapter(BaseAdapter[Task]):
572
1371
  """Create a new Linear issue or project with full field support.
573
1372
 
574
1373
  Args:
1374
+ ----
575
1375
  ticket: Epic or Task to create
576
1376
 
577
1377
  Returns:
1378
+ -------
578
1379
  Created ticket with populated ID and metadata
579
1380
 
580
1381
  Raises:
1382
+ ------
581
1383
  ValueError: If credentials are invalid or creation fails
582
1384
 
583
1385
  """
@@ -606,12 +1408,15 @@ class LinearAdapter(BaseAdapter[Task]):
606
1408
  - Sub-issue: Child work item (has parent issue)
607
1409
 
608
1410
  Args:
1411
+ ----
609
1412
  task: Task to create
610
1413
 
611
1414
  Returns:
1415
+ -------
612
1416
  Created task with Linear metadata
613
1417
 
614
1418
  """
1419
+ logger = logging.getLogger(__name__)
615
1420
  team_id = await self._ensure_team_id()
616
1421
 
617
1422
  # Build issue input using mapper
@@ -625,8 +1430,14 @@ class LinearAdapter(BaseAdapter[Task]):
625
1430
  issue_input["stateId"] = state_mapping[TicketState.OPEN]
626
1431
 
627
1432
  # Resolve assignee to user ID if provided
628
- if task.assignee:
629
- user_id = await self._get_user_id(task.assignee)
1433
+ # Use configured default user if no assignee specified
1434
+ assignee = task.assignee
1435
+ if not assignee and self.user_email:
1436
+ assignee = self.user_email
1437
+ logger.debug(f"Using default assignee from config: {assignee}")
1438
+
1439
+ if assignee:
1440
+ user_id = await self._get_user_id(assignee)
630
1441
  if user_id:
631
1442
  issue_input["assigneeId"] = user_id
632
1443
 
@@ -643,7 +1454,35 @@ class LinearAdapter(BaseAdapter[Task]):
643
1454
  if task.parent_epic:
644
1455
  project_id = await self._resolve_project_id(task.parent_epic)
645
1456
  if project_id:
646
- issue_input["projectId"] = project_id
1457
+ # Validate team-project association before assigning
1458
+ is_valid, _ = await self._validate_project_team_association(
1459
+ project_id, team_id
1460
+ )
1461
+
1462
+ if not is_valid:
1463
+ # Attempt to add team to project automatically
1464
+ logging.getLogger(__name__).info(
1465
+ f"Team {team_id} not associated with project {project_id}. "
1466
+ f"Attempting to add team to project..."
1467
+ )
1468
+ success = await self._ensure_team_in_project(project_id, team_id)
1469
+
1470
+ if success:
1471
+ issue_input["projectId"] = project_id
1472
+ logging.getLogger(__name__).info(
1473
+ "Successfully associated team with project. "
1474
+ "Issue will be assigned to project."
1475
+ )
1476
+ else:
1477
+ logging.getLogger(__name__).warning(
1478
+ "Could not associate team with project. "
1479
+ "Issue will be created without project assignment. "
1480
+ "Manual assignment required."
1481
+ )
1482
+ issue_input.pop("projectId", None)
1483
+ else:
1484
+ # Team already associated - safe to assign
1485
+ issue_input["projectId"] = project_id
647
1486
  else:
648
1487
  # Log warning but don't fail - user may have provided invalid project
649
1488
  logging.getLogger(__name__).warning(
@@ -668,6 +1507,28 @@ class LinearAdapter(BaseAdapter[Task]):
668
1507
  # Remove parentId if we couldn't resolve it
669
1508
  issue_input.pop("parentId", None)
670
1509
 
1510
+ # Validate labelIds are proper UUIDs before sending to Linear API
1511
+ # Bug Fix (v1.1.1): This validation prevents "Argument Validation Error"
1512
+ # by ensuring labelIds contains UUIDs (e.g., "uuid-1"), not names (e.g., "bug").
1513
+ # Linear's GraphQL API requires labelIds to be [String!]! (non-null array of
1514
+ # non-null UUID strings). If tag names leak through, we detect and remove them
1515
+ # here to prevent API errors.
1516
+ #
1517
+ # See: docs/TROUBLESHOOTING.md#issue-argument-validation-error-when-creating-issues-with-labels
1518
+ if "labelIds" in issue_input:
1519
+ invalid_labels = []
1520
+ for label_id in issue_input["labelIds"]:
1521
+ # Linear UUIDs are 36 characters with hyphens (8-4-4-4-12 format)
1522
+ if not isinstance(label_id, str) or len(label_id) != 36:
1523
+ invalid_labels.append(label_id)
1524
+
1525
+ if invalid_labels:
1526
+ logging.getLogger(__name__).error(
1527
+ f"Invalid label ID format detected: {invalid_labels}. "
1528
+ f"Labels must be UUIDs (36 chars), not names. Removing labelIds from request."
1529
+ )
1530
+ issue_input.pop("labelIds")
1531
+
671
1532
  try:
672
1533
  result = await self.client.execute_mutation(
673
1534
  CREATE_ISSUE_MUTATION, {"input": issue_input}
@@ -688,9 +1549,11 @@ class LinearAdapter(BaseAdapter[Task]):
688
1549
  """Create a Linear project from an Epic.
689
1550
 
690
1551
  Args:
1552
+ ----
691
1553
  epic: Epic to create
692
1554
 
693
1555
  Returns:
1556
+ -------
694
1557
  Created epic with Linear metadata
695
1558
 
696
1559
  """
@@ -753,6 +1616,7 @@ class LinearAdapter(BaseAdapter[Task]):
753
1616
  """Update a Linear project (Epic) with specified fields.
754
1617
 
755
1618
  Args:
1619
+ ----
756
1620
  epic_id: Linear project UUID or slug-shortid
757
1621
  updates: Dictionary of fields to update. Supported fields:
758
1622
  - title: Project name
@@ -763,9 +1627,11 @@ class LinearAdapter(BaseAdapter[Task]):
763
1627
  - icon: Project icon
764
1628
 
765
1629
  Returns:
1630
+ -------
766
1631
  Updated Epic object or None if not found
767
1632
 
768
1633
  Raises:
1634
+ ------
769
1635
  ValueError: If update fails or project not found
770
1636
 
771
1637
  """
@@ -779,13 +1645,29 @@ class LinearAdapter(BaseAdapter[Task]):
779
1645
  if not project_uuid:
780
1646
  raise ValueError(f"Project '{epic_id}' not found")
781
1647
 
1648
+ # Validate field lengths before building update input
1649
+ from mcp_ticketer.core.validators import FieldValidator, ValidationError
1650
+
782
1651
  # Build update input from updates dict
783
1652
  update_input = {}
784
1653
 
785
1654
  if "title" in updates:
786
- update_input["name"] = updates["title"]
1655
+ try:
1656
+ validated_title = FieldValidator.validate_field(
1657
+ "linear", "epic_name", updates["title"], truncate=False
1658
+ )
1659
+ update_input["name"] = validated_title
1660
+ except ValidationError as e:
1661
+ raise ValueError(str(e)) from e
1662
+
787
1663
  if "description" in updates:
788
- update_input["description"] = updates["description"]
1664
+ try:
1665
+ validated_description = FieldValidator.validate_field(
1666
+ "linear", "epic_description", updates["description"], truncate=False
1667
+ )
1668
+ update_input["description"] = validated_description
1669
+ except ValidationError as e:
1670
+ raise ValueError(str(e)) from e
789
1671
  if "state" in updates:
790
1672
  update_input["state"] = updates["state"]
791
1673
  if "target_date" in updates:
@@ -840,14 +1722,22 @@ class LinearAdapter(BaseAdapter[Task]):
840
1722
  except Exception as e:
841
1723
  raise ValueError(f"Failed to update Linear project: {e}") from e
842
1724
 
843
- async def read(self, ticket_id: str) -> Task | None:
844
- """Read a Linear issue by identifier with full details.
1725
+ async def read(self, ticket_id: str) -> Task | Epic | None:
1726
+ """Read a Linear issue OR project by identifier with full details.
845
1727
 
846
1728
  Args:
847
- ticket_id: Linear issue identifier (e.g., 'BTA-123')
1729
+ ----
1730
+ ticket_id: Linear issue identifier (e.g., 'BTA-123') or project UUID
848
1731
 
849
1732
  Returns:
850
- Task with full details or None if not found
1733
+ -------
1734
+ Task with full details if issue found,
1735
+ Epic with full details if project found,
1736
+ None if not found
1737
+
1738
+ Raises:
1739
+ ------
1740
+ ValueError: If ticket_id is a view URL (views are not supported in ticket_read)
851
1741
 
852
1742
  """
853
1743
  # Validate credentials before attempting operation
@@ -855,6 +1745,7 @@ class LinearAdapter(BaseAdapter[Task]):
855
1745
  if not is_valid:
856
1746
  raise ValueError(error_message)
857
1747
 
1748
+ # Try reading as an issue first (most common case)
858
1749
  query = (
859
1750
  ALL_FRAGMENTS
860
1751
  + """
@@ -872,20 +1763,88 @@ class LinearAdapter(BaseAdapter[Task]):
872
1763
  if result.get("issue"):
873
1764
  return map_linear_issue_to_task(result["issue"])
874
1765
 
875
- except TransportQueryError:
876
- # Issue not found
1766
+ except Exception:
1767
+ # Not found as issue, continue to project/view check
1768
+ pass
1769
+
1770
+ # If not found as issue, try reading as project
1771
+ try:
1772
+ project_data = await self.get_project(ticket_id)
1773
+ if project_data:
1774
+ # Fetch project's issues to populate child_issues field
1775
+ issues = await self._get_project_issues(ticket_id)
1776
+
1777
+ # Map to Epic
1778
+ epic = map_linear_project_to_epic(project_data)
1779
+
1780
+ # Populate child_issues with issue IDs
1781
+ epic.child_issues = [issue.id for issue in issues]
1782
+
1783
+ return epic
1784
+ except Exception:
1785
+ # Not found as project either
1786
+ pass
1787
+
1788
+ # If not found as issue or project, check if it's a view URL
1789
+ # Views are collections of issues, not individual tickets
1790
+ logging.debug(
1791
+ f"[VIEW DEBUG] read() checking if ticket_id is a view: {ticket_id}"
1792
+ )
1793
+ try:
1794
+ view_data = await self._get_custom_view(ticket_id)
1795
+ logging.debug(f"[VIEW DEBUG] read() _get_custom_view returned: {view_data}")
1796
+
1797
+ if view_data:
1798
+ logging.debug(
1799
+ "[VIEW DEBUG] read() view_data is truthy, preparing to raise ValueError"
1800
+ )
1801
+ # View found - raise informative error
1802
+ view_name = view_data.get("name", "Unknown")
1803
+ issues_data = view_data.get("issues", {})
1804
+ issue_count = len(issues_data.get("nodes", []))
1805
+ has_more = issues_data.get("pageInfo", {}).get("hasNextPage", False)
1806
+ count_str = f"{issue_count}+" if has_more else str(issue_count)
1807
+
1808
+ logging.debug(
1809
+ f"[VIEW DEBUG] read() raising ValueError with view_name={view_name}, count={count_str}"
1810
+ )
1811
+ raise ValueError(
1812
+ f"Linear view URLs are not supported in ticket_read.\n"
1813
+ f"\n"
1814
+ f"View: '{view_name}' ({ticket_id})\n"
1815
+ f"This view contains {count_str} issues.\n"
1816
+ f"\n"
1817
+ f"Use ticket_list or ticket_search to query issues instead."
1818
+ )
1819
+ else:
1820
+ logging.debug("[VIEW DEBUG] read() view_data is falsy (None or empty)")
1821
+ except ValueError:
1822
+ # Re-raise ValueError (our informative error message)
1823
+ logging.debug("[VIEW DEBUG] read() re-raising ValueError")
1824
+ raise
1825
+ except Exception as e:
1826
+ # View query failed - not a view
1827
+ logging.debug(
1828
+ f"[VIEW DEBUG] read() caught exception in view check: {type(e).__name__}: {str(e)}"
1829
+ )
877
1830
  pass
878
1831
 
1832
+ # Not found as either issue, project, or view
1833
+ logging.debug(
1834
+ "[VIEW DEBUG] read() returning None - not found as issue, project, or view"
1835
+ )
879
1836
  return None
880
1837
 
881
1838
  async def update(self, ticket_id: str, updates: dict[str, Any]) -> Task | None:
882
1839
  """Update a Linear issue with comprehensive field support.
883
1840
 
884
1841
  Args:
1842
+ ----
885
1843
  ticket_id: Linear issue identifier
886
1844
  updates: Dictionary of fields to update
887
1845
 
888
1846
  Returns:
1847
+ -------
889
1848
  Updated task or None if not found
890
1849
 
891
1850
  """
@@ -894,6 +1853,9 @@ class LinearAdapter(BaseAdapter[Task]):
894
1853
  if not is_valid:
895
1854
  raise ValueError(error_message)
896
1855
 
1856
+ # Ensure adapter is initialized (loads workflow states for state transitions)
1857
+ await self.initialize()
1858
+
897
1859
  # First get the Linear internal ID
898
1860
  id_query = """
899
1861
  query GetIssueId($identifier: String!) {
@@ -936,9 +1898,18 @@ class LinearAdapter(BaseAdapter[Task]):
936
1898
  # Resolve label names to IDs if provided
937
1899
  if "tags" in updates:
938
1900
  if updates["tags"]: # Non-empty list
939
- label_ids = await self._resolve_label_ids(updates["tags"])
940
- if label_ids:
941
- update_input["labelIds"] = label_ids
1901
+ try:
1902
+ label_ids = await self._resolve_label_ids(updates["tags"])
1903
+ if label_ids:
1904
+ update_input["labelIds"] = label_ids
1905
+ except ValueError as e:
1906
+ # Label creation failed - provide clear error message (1M-396)
1907
+ raise ValueError(
1908
+ f"Failed to update labels for issue {ticket_id}. "
1909
+ f"Label creation error: {e}. "
1910
+ f"Tip: Use the 'label_list' tool to check existing labels, "
1911
+ f"or verify you have permissions to create new labels."
1912
+ ) from e
942
1913
  else: # Empty list = remove all labels
943
1914
  update_input["labelIds"] = []
944
1915
 
@@ -952,6 +1923,21 @@ class LinearAdapter(BaseAdapter[Task]):
952
1923
  f"Could not resolve project identifier '{updates['parent_epic']}'"
953
1924
  )
954
1925
 
1926
+ # Validate labelIds are proper UUIDs before sending to Linear API
1927
+ if "labelIds" in update_input and update_input["labelIds"]:
1928
+ invalid_labels = []
1929
+ for label_id in update_input["labelIds"]:
1930
+ # Linear UUIDs are 36 characters with hyphens (8-4-4-4-12 format)
1931
+ if not isinstance(label_id, str) or len(label_id) != 36:
1932
+ invalid_labels.append(label_id)
1933
+
1934
+ if invalid_labels:
1935
+ logging.getLogger(__name__).error(
1936
+ f"Invalid label ID format detected in update: {invalid_labels}. "
1937
+ f"Labels must be UUIDs (36 chars), not names. Removing labelIds from request."
1938
+ )
1939
+ update_input.pop("labelIds")
1940
+
955
1941
  # Execute update
956
1942
  result = await self.client.execute_mutation(
957
1943
  UPDATE_ISSUE_MUTATION, {"id": linear_id, "input": update_input}
@@ -970,9 +1956,11 @@ class LinearAdapter(BaseAdapter[Task]):
970
1956
  """Delete a Linear issue (archive it).
971
1957
 
972
1958
  Args:
1959
+ ----
973
1960
  ticket_id: Linear issue identifier
974
1961
 
975
1962
  Returns:
1963
+ -------
976
1964
  True if successfully deleted/archived
977
1965
 
978
1966
  """
@@ -989,11 +1977,13 @@ class LinearAdapter(BaseAdapter[Task]):
989
1977
  """List Linear issues with optional filtering.
990
1978
 
991
1979
  Args:
1980
+ ----
992
1981
  limit: Maximum number of issues to return
993
1982
  offset: Number of issues to skip (Note: Linear uses cursor-based pagination)
994
1983
  filters: Optional filters (state, assignee, priority, etc.)
995
1984
 
996
1985
  Returns:
1986
+ -------
997
1987
  List of tasks matching the criteria
998
1988
 
999
1989
  """
@@ -1022,6 +2012,12 @@ class LinearAdapter(BaseAdapter[Task]):
1022
2012
  if user_id:
1023
2013
  issue_filter["assignee"] = {"id": {"eq": user_id}}
1024
2014
 
2015
+ # Support parent_issue filter for listing children (critical for parent state constraints)
2016
+ if "parent_issue" in filters:
2017
+ parent_id = await self._resolve_issue_id(filters["parent_issue"])
2018
+ if parent_id:
2019
+ issue_filter["parent"] = {"id": {"eq": parent_id}}
2020
+
1025
2021
  if "created_after" in filters:
1026
2022
  issue_filter["createdAt"] = {"gte": filters["created_after"]}
1027
2023
  if "updated_after" in filters:
@@ -1047,9 +2043,11 @@ class LinearAdapter(BaseAdapter[Task]):
1047
2043
  """Search Linear issues using comprehensive filters.
1048
2044
 
1049
2045
  Args:
2046
+ ----
1050
2047
  query: Search query with filters and criteria
1051
2048
 
1052
2049
  Returns:
2050
+ -------
1053
2051
  List of tasks matching the search criteria
1054
2052
 
1055
2053
  """
@@ -1071,9 +2069,15 @@ class LinearAdapter(BaseAdapter[Task]):
1071
2069
  issue_filter["title"] = {"containsIgnoreCase": query.query}
1072
2070
 
1073
2071
  # State filter
2072
+ # Bug fix: Handle OPEN state specially to include both unstarted AND backlog
2073
+ # tickets, as both Linear states map to TicketState.OPEN
1074
2074
  if query.state:
1075
- state_type = get_linear_state_type(query.state)
1076
- issue_filter["state"] = {"type": {"eq": state_type}}
2075
+ if query.state == TicketState.OPEN:
2076
+ # Include both "unstarted" and "backlog" states for OPEN
2077
+ issue_filter["state"] = {"type": {"in": ["unstarted", "backlog"]}}
2078
+ else:
2079
+ state_type = get_linear_state_type(query.state)
2080
+ issue_filter["state"] = {"type": {"eq": state_type}}
1077
2081
 
1078
2082
  # Priority filter
1079
2083
  if query.priority:
@@ -1086,6 +2090,13 @@ class LinearAdapter(BaseAdapter[Task]):
1086
2090
  if user_id:
1087
2091
  issue_filter["assignee"] = {"id": {"eq": user_id}}
1088
2092
 
2093
+ # Project filter (Bug fix: Add support for filtering by project/epic)
2094
+ if query.project:
2095
+ # Resolve project ID (supports ID, name, or URL)
2096
+ project_id = await self._resolve_project_id(query.project)
2097
+ if project_id:
2098
+ issue_filter["project"] = {"id": {"eq": project_id}}
2099
+
1089
2100
  # Tags filter (labels in Linear)
1090
2101
  if query.tags:
1091
2102
  issue_filter["labels"] = {"some": {"name": {"in": query.tags}}}
@@ -1113,10 +2124,12 @@ class LinearAdapter(BaseAdapter[Task]):
1113
2124
  """Transition Linear issue to new state with workflow validation.
1114
2125
 
1115
2126
  Args:
2127
+ ----
1116
2128
  ticket_id: Linear issue identifier
1117
2129
  target_state: Target state to transition to
1118
2130
 
1119
2131
  Returns:
2132
+ -------
1120
2133
  Updated task or None if transition failed
1121
2134
 
1122
2135
  """
@@ -1132,25 +2145,36 @@ class LinearAdapter(BaseAdapter[Task]):
1132
2145
  ) -> bool:
1133
2146
  """Validate if state transition is allowed.
1134
2147
 
2148
+ Delegates to BaseAdapter for:
2149
+ - Workflow state machine validation
2150
+ - Parent/child state constraint validation (from 1M-93 requirement)
2151
+
2152
+ The BaseAdapter implementation (core/adapter.py lines 312-370) ensures:
2153
+ 1. Valid workflow state transitions (OPEN → IN_PROGRESS → READY → etc.)
2154
+ 2. Parent issues maintain completion level ≥ max child completion level
2155
+
1135
2156
  Args:
2157
+ ----
1136
2158
  ticket_id: Linear issue identifier
1137
2159
  target_state: Target state to validate
1138
2160
 
1139
2161
  Returns:
1140
- True if transition is valid
2162
+ -------
2163
+ True if transition is valid, False otherwise
1141
2164
 
1142
2165
  """
1143
- # For now, allow all transitions
1144
- # In practice, you might want to implement Linear's workflow rules
1145
- return True
2166
+ # Call parent implementation for all validation logic
2167
+ return await super().validate_transition(ticket_id, target_state)
1146
2168
 
1147
2169
  async def add_comment(self, comment: Comment) -> Comment:
1148
2170
  """Add a comment to a Linear issue.
1149
2171
 
1150
2172
  Args:
2173
+ ----
1151
2174
  comment: Comment to add
1152
2175
 
1153
2176
  Returns:
2177
+ -------
1154
2178
  Created comment with ID
1155
2179
 
1156
2180
  """
@@ -1218,11 +2242,13 @@ class LinearAdapter(BaseAdapter[Task]):
1218
2242
  """Get comments for a Linear issue.
1219
2243
 
1220
2244
  Args:
2245
+ ----
1221
2246
  ticket_id: Linear issue identifier
1222
2247
  limit: Maximum number of comments to return
1223
2248
  offset: Number of comments to skip
1224
2249
 
1225
2250
  Returns:
2251
+ -------
1226
2252
  List of comments for the issue
1227
2253
 
1228
2254
  """
@@ -1272,6 +2298,7 @@ class LinearAdapter(BaseAdapter[Task]):
1272
2298
  """List all labels available in the Linear team.
1273
2299
 
1274
2300
  Returns:
2301
+ -------
1275
2302
  List of label dictionaries with 'id', 'name', and 'color' fields
1276
2303
 
1277
2304
  """
@@ -1303,13 +2330,16 @@ class LinearAdapter(BaseAdapter[Task]):
1303
2330
  3. Return the asset URL for use in attachments
1304
2331
 
1305
2332
  Args:
2333
+ ----
1306
2334
  file_path: Path to the file to upload
1307
2335
  mime_type: MIME type of the file. If None, will be auto-detected.
1308
2336
 
1309
2337
  Returns:
2338
+ -------
1310
2339
  Asset URL that can be used with attachmentCreate mutation
1311
2340
 
1312
2341
  Raises:
2342
+ ------
1313
2343
  ValueError: If file doesn't exist, upload fails, or httpx not available
1314
2344
  FileNotFoundError: If the specified file doesn't exist
1315
2345
 
@@ -1418,6 +2448,7 @@ class LinearAdapter(BaseAdapter[Task]):
1418
2448
  accessible URL.
1419
2449
 
1420
2450
  Args:
2451
+ ----
1421
2452
  issue_id: Linear issue identifier (e.g., "ENG-842") or UUID
1422
2453
  file_url: URL of the file (from upload_file() or external URL)
1423
2454
  title: Title for the attachment
@@ -1425,9 +2456,11 @@ class LinearAdapter(BaseAdapter[Task]):
1425
2456
  comment_body: Optional comment text to include with the attachment
1426
2457
 
1427
2458
  Returns:
2459
+ -------
1428
2460
  Dictionary with attachment details including id, title, url, etc.
1429
2461
 
1430
2462
  Raises:
2463
+ ------
1431
2464
  ValueError: If attachment creation fails or issue not found
1432
2465
 
1433
2466
  """
@@ -1497,15 +2530,18 @@ class LinearAdapter(BaseAdapter[Task]):
1497
2530
  accessible URL.
1498
2531
 
1499
2532
  Args:
2533
+ ----
1500
2534
  epic_id: Linear project UUID or slug-shortid
1501
2535
  file_url: URL of the file (from upload_file() or external URL)
1502
2536
  title: Title for the attachment
1503
2537
  subtitle: Optional subtitle for the attachment
1504
2538
 
1505
2539
  Returns:
2540
+ -------
1506
2541
  Dictionary with attachment details including id, title, url, etc.
1507
2542
 
1508
2543
  Raises:
2544
+ ------
1509
2545
  ValueError: If attachment creation fails or project not found
1510
2546
 
1511
2547
  """
@@ -1561,6 +2597,653 @@ class LinearAdapter(BaseAdapter[Task]):
1561
2597
  f"Failed to attach file to project '{epic_id}': {e}"
1562
2598
  ) from e
1563
2599
 
2600
+ async def get_attachments(self, ticket_id: str) -> builtins.list[Attachment]:
2601
+ """Get all attachments for a Linear issue or project.
2602
+
2603
+ This method retrieves attachment metadata from Linear's GraphQL API.
2604
+ Note that Linear attachment URLs require authentication to access.
2605
+
2606
+ Args:
2607
+ ----
2608
+ ticket_id: Linear issue identifier (e.g., "ENG-842") or project UUID
2609
+
2610
+ Returns:
2611
+ -------
2612
+ List of Attachment objects with metadata
2613
+
2614
+ Raises:
2615
+ ------
2616
+ ValueError: If credentials are invalid
2617
+
2618
+ Authentication Note:
2619
+ -------------------
2620
+ Linear attachment URLs require authentication headers:
2621
+ Authorization: Bearer {api_key}
2622
+
2623
+ URLs are in format: https://files.linear.app/workspace/attachment-id/filename
2624
+ Direct access without authentication will return 401 Unauthorized.
2625
+
2626
+ """
2627
+ logger = logging.getLogger(__name__)
2628
+
2629
+ # Validate credentials
2630
+ is_valid, error_message = self.validate_credentials()
2631
+ if not is_valid:
2632
+ raise ValueError(error_message)
2633
+
2634
+ # Try as issue first (most common case)
2635
+ issue_uuid = await self._resolve_issue_id(ticket_id)
2636
+
2637
+ if issue_uuid:
2638
+ # Query issue attachments
2639
+ query = """
2640
+ query GetIssueAttachments($issueId: String!) {
2641
+ issue(id: $issueId) {
2642
+ id
2643
+ identifier
2644
+ attachments {
2645
+ nodes {
2646
+ id
2647
+ title
2648
+ url
2649
+ subtitle
2650
+ metadata
2651
+ createdAt
2652
+ updatedAt
2653
+ }
2654
+ }
2655
+ }
2656
+ }
2657
+ """
2658
+
2659
+ try:
2660
+ result = await self.client.execute_query(query, {"issueId": issue_uuid})
2661
+
2662
+ if not result.get("issue"):
2663
+ logger.warning(f"Issue {ticket_id} not found")
2664
+ return []
2665
+
2666
+ attachments_data = (
2667
+ result["issue"].get("attachments", {}).get("nodes", [])
2668
+ )
2669
+
2670
+ # Map to Attachment objects using identifier (not UUID)
2671
+ return [
2672
+ map_linear_attachment_to_attachment(att, ticket_id)
2673
+ for att in attachments_data
2674
+ ]
2675
+
2676
+ except Exception as e:
2677
+ logger.error(f"Failed to get attachments for issue {ticket_id}: {e}")
2678
+ return []
2679
+
2680
+ # Try as project if not an issue
2681
+ project_uuid = await self._resolve_project_id(ticket_id)
2682
+
2683
+ if project_uuid:
2684
+ # Query project attachments (documents)
2685
+ query = """
2686
+ query GetProjectAttachments($projectId: String!) {
2687
+ project(id: $projectId) {
2688
+ id
2689
+ name
2690
+ documents {
2691
+ nodes {
2692
+ id
2693
+ title
2694
+ url
2695
+ createdAt
2696
+ updatedAt
2697
+ }
2698
+ }
2699
+ }
2700
+ }
2701
+ """
2702
+
2703
+ try:
2704
+ result = await self.client.execute_query(
2705
+ query, {"projectId": project_uuid}
2706
+ )
2707
+
2708
+ if not result.get("project"):
2709
+ logger.warning(f"Project {ticket_id} not found")
2710
+ return []
2711
+
2712
+ documents_data = result["project"].get("documents", {}).get("nodes", [])
2713
+
2714
+ # Map documents to Attachment objects
2715
+ return [
2716
+ map_linear_attachment_to_attachment(doc, ticket_id)
2717
+ for doc in documents_data
2718
+ ]
2719
+
2720
+ except Exception as e:
2721
+ logger.error(f"Failed to get attachments for project {ticket_id}: {e}")
2722
+ return []
2723
+
2724
+ # Not found as either issue or project
2725
+ logger.warning(f"Ticket {ticket_id} not found as issue or project")
2726
+ return []
2727
+
2728
+ async def list_cycles(
2729
+ self, team_id: str | None = None, limit: int = 50
2730
+ ) -> builtins.list[dict[str, Any]]:
2731
+ """List Linear Cycles (Sprints) for the team.
2732
+
2733
+ Args:
2734
+ ----
2735
+ team_id: Linear team UUID. If None, uses the configured team.
2736
+ limit: Maximum number of cycles to return (default: 50)
2737
+
2738
+ Returns:
2739
+ -------
2740
+ List of cycle dictionaries with fields:
2741
+ - id: Cycle UUID
2742
+ - name: Cycle name
2743
+ - number: Cycle number
2744
+ - startsAt: Start date (ISO format)
2745
+ - endsAt: End date (ISO format)
2746
+ - completedAt: Completion date (ISO format, None if not completed)
2747
+ - progress: Progress percentage (0-1)
2748
+
2749
+ Raises:
2750
+ ------
2751
+ ValueError: If credentials are invalid or query fails
2752
+
2753
+ """
2754
+ # Validate credentials
2755
+ is_valid, error_message = self.validate_credentials()
2756
+ if not is_valid:
2757
+ raise ValueError(error_message)
2758
+
2759
+ await self.initialize()
2760
+
2761
+ # Use configured team if not specified
2762
+ if team_id is None:
2763
+ team_id = await self._ensure_team_id()
2764
+
2765
+ try:
2766
+ # Fetch all cycles with pagination
2767
+ all_cycles: list[dict[str, Any]] = []
2768
+ has_next_page = True
2769
+ after_cursor = None
2770
+
2771
+ while has_next_page and len(all_cycles) < limit:
2772
+ # Calculate remaining items needed
2773
+ remaining = limit - len(all_cycles)
2774
+ page_size = min(remaining, 50) # Linear max page size is typically 50
2775
+
2776
+ variables = {"teamId": team_id, "first": page_size}
2777
+ if after_cursor:
2778
+ variables["after"] = after_cursor
2779
+
2780
+ result = await self.client.execute_query(LIST_CYCLES_QUERY, variables)
2781
+
2782
+ cycles_data = result.get("team", {}).get("cycles", {})
2783
+ page_cycles = cycles_data.get("nodes", [])
2784
+ page_info = cycles_data.get("pageInfo", {})
2785
+
2786
+ all_cycles.extend(page_cycles)
2787
+ has_next_page = page_info.get("hasNextPage", False)
2788
+ after_cursor = page_info.get("endCursor")
2789
+
2790
+ return all_cycles[:limit] # Ensure we don't exceed limit
2791
+
2792
+ except Exception as e:
2793
+ raise ValueError(f"Failed to list Linear cycles: {e}") from e
2794
+
2795
+ async def get_issue_status(self, issue_id: str) -> dict[str, Any] | None:
2796
+ """Get rich issue status information for a Linear issue.
2797
+
2798
+ Args:
2799
+ ----
2800
+ issue_id: Linear issue identifier (e.g., 'BTA-123') or UUID
2801
+
2802
+ Returns:
2803
+ -------
2804
+ Dictionary with workflow state details:
2805
+ - id: State UUID
2806
+ - name: State name (e.g., "In Progress")
2807
+ - type: State type (e.g., "started", "completed")
2808
+ - color: State color (hex format)
2809
+ - description: State description
2810
+ - position: Position in workflow
2811
+ Returns None if issue not found.
2812
+
2813
+ Raises:
2814
+ ------
2815
+ ValueError: If credentials are invalid or query fails
2816
+
2817
+ """
2818
+ # Validate credentials
2819
+ is_valid, error_message = self.validate_credentials()
2820
+ if not is_valid:
2821
+ raise ValueError(error_message)
2822
+
2823
+ await self.initialize()
2824
+
2825
+ # Resolve issue identifier to UUID if needed
2826
+ issue_uuid = await self._resolve_issue_id(issue_id)
2827
+ if not issue_uuid:
2828
+ return None
2829
+
2830
+ try:
2831
+ result = await self.client.execute_query(
2832
+ GET_ISSUE_STATUS_QUERY, {"issueId": issue_uuid}
2833
+ )
2834
+
2835
+ issue_data = result.get("issue")
2836
+ if not issue_data:
2837
+ return None
2838
+
2839
+ return issue_data.get("state")
2840
+
2841
+ except Exception as e:
2842
+ raise ValueError(f"Failed to get issue status for '{issue_id}': {e}") from e
2843
+
2844
+ async def list_issue_statuses(
2845
+ self, team_id: str | None = None
2846
+ ) -> builtins.list[dict[str, Any]]:
2847
+ """List all workflow states for the team.
2848
+
2849
+ Args:
2850
+ ----
2851
+ team_id: Linear team UUID. If None, uses the configured team.
2852
+
2853
+ Returns:
2854
+ -------
2855
+ List of workflow state dictionaries with fields:
2856
+ - id: State UUID
2857
+ - name: State name (e.g., "Backlog", "In Progress", "Done")
2858
+ - type: State type (e.g., "backlog", "unstarted", "started", "completed", "canceled")
2859
+ - color: State color (hex format)
2860
+ - description: State description
2861
+ - position: Position in workflow (lower = earlier)
2862
+
2863
+ Raises:
2864
+ ------
2865
+ ValueError: If credentials are invalid or query fails
2866
+
2867
+ """
2868
+ # Validate credentials
2869
+ is_valid, error_message = self.validate_credentials()
2870
+ if not is_valid:
2871
+ raise ValueError(error_message)
2872
+
2873
+ await self.initialize()
2874
+
2875
+ # Use configured team if not specified
2876
+ if team_id is None:
2877
+ team_id = await self._ensure_team_id()
2878
+
2879
+ try:
2880
+ result = await self.client.execute_query(
2881
+ LIST_ISSUE_STATUSES_QUERY, {"teamId": team_id}
2882
+ )
2883
+
2884
+ states_data = result.get("team", {}).get("states", {})
2885
+ states = states_data.get("nodes", [])
2886
+
2887
+ # Sort by position to maintain workflow order
2888
+ states.sort(key=lambda s: s.get("position", 0))
2889
+
2890
+ return states
2891
+
2892
+ except Exception as e:
2893
+ raise ValueError(f"Failed to list workflow states: {e}") from e
2894
+
2895
+ async def list_epics(
2896
+ self,
2897
+ limit: int = 50,
2898
+ offset: int = 0,
2899
+ state: str | None = None,
2900
+ include_completed: bool = True,
2901
+ **kwargs: Any,
2902
+ ) -> builtins.list[Epic]:
2903
+ """List Linear projects (epics) with efficient pagination.
2904
+
2905
+ Args:
2906
+ ----
2907
+ limit: Maximum number of projects to return (default: 50)
2908
+ offset: Number of projects to skip (note: Linear uses cursor-based pagination)
2909
+ state: Filter by project state (e.g., "planned", "started", "completed", "canceled")
2910
+ include_completed: Whether to include completed projects (default: True)
2911
+ **kwargs: Additional filter parameters (reserved for future use)
2912
+
2913
+ Returns:
2914
+ -------
2915
+ List of Epic objects mapped from Linear projects
2916
+
2917
+ Raises:
2918
+ ------
2919
+ ValueError: If credentials are invalid or query fails
2920
+
2921
+ """
2922
+ # Validate credentials
2923
+ is_valid, error_message = self.validate_credentials()
2924
+ if not is_valid:
2925
+ raise ValueError(error_message)
2926
+
2927
+ await self.initialize()
2928
+ team_id = await self._ensure_team_id()
2929
+
2930
+ # Build project filter using existing helper
2931
+ from .types import build_project_filter
2932
+
2933
+ project_filter = build_project_filter(
2934
+ state=state,
2935
+ team_id=team_id,
2936
+ include_completed=include_completed,
2937
+ )
2938
+
2939
+ try:
2940
+ # Fetch projects with pagination
2941
+ all_projects = []
2942
+ has_next_page = True
2943
+ after_cursor = None
2944
+ projects_fetched = 0
2945
+
2946
+ while has_next_page and projects_fetched < limit + offset:
2947
+ # Calculate how many more we need
2948
+ remaining = (limit + offset) - projects_fetched
2949
+ page_size = min(remaining, 50) # Linear max page size is typically 50
2950
+
2951
+ variables = {"filter": project_filter, "first": page_size}
2952
+ if after_cursor:
2953
+ variables["after"] = after_cursor
2954
+
2955
+ result = await self.client.execute_query(LIST_PROJECTS_QUERY, variables)
2956
+
2957
+ projects_data = result.get("projects", {})
2958
+ page_projects = projects_data.get("nodes", [])
2959
+ page_info = projects_data.get("pageInfo", {})
2960
+
2961
+ all_projects.extend(page_projects)
2962
+ projects_fetched += len(page_projects)
2963
+
2964
+ has_next_page = page_info.get("hasNextPage", False)
2965
+ after_cursor = page_info.get("endCursor")
2966
+
2967
+ # Stop if no more results on this page
2968
+ if not page_projects:
2969
+ break
2970
+
2971
+ # Apply offset and limit
2972
+ paginated_projects = all_projects[offset : offset + limit]
2973
+
2974
+ # Map Linear projects to Epic objects using existing mapper
2975
+ epics = []
2976
+ for project in paginated_projects:
2977
+ epics.append(map_linear_project_to_epic(project))
2978
+
2979
+ return epics
2980
+
2981
+ except Exception as e:
2982
+ raise ValueError(f"Failed to list Linear projects: {e}") from e
2983
+
2984
+ def _linear_update_to_model(self, linear_data: dict[str, Any]) -> ProjectUpdate:
2985
+ """Convert Linear GraphQL response to ProjectUpdate model (1M-238).
2986
+
2987
+ Maps Linear's ProjectUpdate entity fields to the universal ProjectUpdate model,
2988
+ handling health value transformations and optional fields.
2989
+
2990
+ Args:
2991
+ ----
2992
+ linear_data: GraphQL response data for a ProjectUpdate entity
2993
+
2994
+ Returns:
2995
+ -------
2996
+ ProjectUpdate instance with mapped fields
2997
+
2998
+ Linear Health Mapping:
2999
+ ---------------------
3000
+ Linear uses camelCase enum values: onTrack, atRisk, offTrack
3001
+ Universal model uses snake_case: ON_TRACK, AT_RISK, OFF_TRACK
3002
+
3003
+ """
3004
+ # Map Linear health values (camelCase) to universal enum (UPPER_SNAKE_CASE)
3005
+ health_mapping = {
3006
+ "onTrack": ProjectUpdateHealth.ON_TRACK,
3007
+ "atRisk": ProjectUpdateHealth.AT_RISK,
3008
+ "offTrack": ProjectUpdateHealth.OFF_TRACK,
3009
+ }
3010
+
3011
+ health_value = linear_data.get("health")
3012
+ health = health_mapping.get(health_value) if health_value else None
3013
+
3014
+ # Extract user info
3015
+ user_data = linear_data.get("user", {})
3016
+ author_id = user_data.get("id") if user_data else None
3017
+ author_name = user_data.get("name") if user_data else None
3018
+
3019
+ # Extract project info
3020
+ project_data = linear_data.get("project", {})
3021
+ project_id = project_data.get("id", "")
3022
+ project_name = project_data.get("name")
3023
+
3024
+ # Parse timestamps
3025
+ created_at = datetime.fromisoformat(
3026
+ linear_data["createdAt"].replace("Z", "+00:00")
3027
+ )
3028
+ updated_at = None
3029
+ if linear_data.get("updatedAt"):
3030
+ updated_at = datetime.fromisoformat(
3031
+ linear_data["updatedAt"].replace("Z", "+00:00")
3032
+ )
3033
+
3034
+ return ProjectUpdate(
3035
+ id=linear_data["id"],
3036
+ project_id=project_id,
3037
+ project_name=project_name,
3038
+ body=linear_data["body"],
3039
+ health=health,
3040
+ created_at=created_at,
3041
+ updated_at=updated_at,
3042
+ author_id=author_id,
3043
+ author_name=author_name,
3044
+ url=linear_data.get("url"),
3045
+ diff_markdown=linear_data.get("diffMarkdown"),
3046
+ )
3047
+
3048
+ async def create_project_update(
3049
+ self,
3050
+ project_id: str,
3051
+ body: str,
3052
+ health: ProjectUpdateHealth | None = None,
3053
+ ) -> ProjectUpdate:
3054
+ """Create a project status update in Linear (1M-238).
3055
+
3056
+ Creates a new status update for a Linear project with optional health indicator.
3057
+ Linear will automatically generate a diff showing changes since the last update.
3058
+
3059
+ Args:
3060
+ ----
3061
+ project_id: Linear project UUID, slugId, or short ID
3062
+ body: Markdown-formatted update content (required)
3063
+ health: Optional health status (ON_TRACK, AT_RISK, OFF_TRACK)
3064
+
3065
+ Returns:
3066
+ -------
3067
+ Created ProjectUpdate with Linear metadata including auto-generated diff
3068
+
3069
+ Raises:
3070
+ ------
3071
+ ValueError: If credentials invalid, project not found, or creation fails
3072
+
3073
+ Example:
3074
+ -------
3075
+ >>> update = await adapter.create_project_update(
3076
+ ... project_id="PROJ-123",
3077
+ ... body="Sprint 23 completed. 15/20 stories done.",
3078
+ ... health=ProjectUpdateHealth.AT_RISK
3079
+ ... )
3080
+
3081
+ """
3082
+ logger = logging.getLogger(__name__)
3083
+
3084
+ # Validate credentials
3085
+ is_valid, error_message = self.validate_credentials()
3086
+ if not is_valid:
3087
+ raise ValueError(error_message)
3088
+
3089
+ await self.initialize()
3090
+
3091
+ # Resolve project identifier to UUID if needed
3092
+ project_uuid = await self._resolve_project_id(project_id)
3093
+ if not project_uuid:
3094
+ raise ValueError(f"Project '{project_id}' not found")
3095
+
3096
+ # Build mutation variables
3097
+ variables: dict[str, Any] = {
3098
+ "projectId": project_uuid,
3099
+ "body": body,
3100
+ }
3101
+
3102
+ # Map health enum to Linear's camelCase format
3103
+ if health:
3104
+ health_mapping = {
3105
+ ProjectUpdateHealth.ON_TRACK: "onTrack",
3106
+ ProjectUpdateHealth.AT_RISK: "atRisk",
3107
+ ProjectUpdateHealth.OFF_TRACK: "offTrack",
3108
+ }
3109
+ variables["health"] = health_mapping.get(health)
3110
+
3111
+ try:
3112
+ result = await self.client.execute_mutation(
3113
+ CREATE_PROJECT_UPDATE_MUTATION, variables
3114
+ )
3115
+
3116
+ if not result["projectUpdateCreate"]["success"]:
3117
+ raise ValueError(f"Failed to create project update for '{project_id}'")
3118
+
3119
+ update_data = result["projectUpdateCreate"]["projectUpdate"]
3120
+ logger.info(
3121
+ f"Created project update for project '{project_id}' (UUID: {project_uuid})"
3122
+ )
3123
+
3124
+ return self._linear_update_to_model(update_data)
3125
+
3126
+ except Exception as e:
3127
+ raise ValueError(
3128
+ f"Failed to create project update for '{project_id}': {e}"
3129
+ ) from e
3130
+
3131
+ async def list_project_updates(
3132
+ self,
3133
+ project_id: str,
3134
+ limit: int = 10,
3135
+ ) -> list[ProjectUpdate]:
3136
+ """List project updates for a project (1M-238).
3137
+
3138
+ Retrieves recent status updates for a Linear project, ordered by creation date.
3139
+
3140
+ Args:
3141
+ ----
3142
+ project_id: Linear project UUID, slugId, or short ID
3143
+ limit: Maximum number of updates to return (default: 10, max: 250)
3144
+
3145
+ Returns:
3146
+ -------
3147
+ List of ProjectUpdate objects ordered by creation date (newest first)
3148
+
3149
+ Raises:
3150
+ ------
3151
+ ValueError: If credentials invalid or query fails
3152
+
3153
+ Example:
3154
+ -------
3155
+ >>> updates = await adapter.list_project_updates("PROJ-123", limit=5)
3156
+ >>> for update in updates:
3157
+ ... print(f"{update.created_at}: {update.health} - {update.body[:50]}")
3158
+
3159
+ """
3160
+ logger = logging.getLogger(__name__)
3161
+
3162
+ # Validate credentials
3163
+ is_valid, error_message = self.validate_credentials()
3164
+ if not is_valid:
3165
+ raise ValueError(error_message)
3166
+
3167
+ await self.initialize()
3168
+
3169
+ # Resolve project identifier to UUID if needed
3170
+ project_uuid = await self._resolve_project_id(project_id)
3171
+ if not project_uuid:
3172
+ raise ValueError(f"Project '{project_id}' not found")
3173
+
3174
+ try:
3175
+ result = await self.client.execute_query(
3176
+ LIST_PROJECT_UPDATES_QUERY,
3177
+ {"projectId": project_uuid, "first": min(limit, 250)},
3178
+ )
3179
+
3180
+ project_data = result.get("project")
3181
+ if not project_data:
3182
+ raise ValueError(f"Project '{project_id}' not found")
3183
+
3184
+ updates_data = project_data.get("projectUpdates", {}).get("nodes", [])
3185
+
3186
+ # Map Linear updates to ProjectUpdate models
3187
+ return [self._linear_update_to_model(update) for update in updates_data]
3188
+
3189
+ except Exception as e:
3190
+ logger.warning(f"Failed to list project updates for {project_id}: {e}")
3191
+ raise ValueError(
3192
+ f"Failed to list project updates for '{project_id}': {e}"
3193
+ ) from e
3194
+
3195
+ async def get_project_update(
3196
+ self,
3197
+ update_id: str,
3198
+ ) -> ProjectUpdate:
3199
+ """Get a specific project update by ID (1M-238).
3200
+
3201
+ Retrieves detailed information about a single project status update.
3202
+
3203
+ Args:
3204
+ ----
3205
+ update_id: Linear ProjectUpdate UUID
3206
+
3207
+ Returns:
3208
+ -------
3209
+ ProjectUpdate object with full details
3210
+
3211
+ Raises:
3212
+ ------
3213
+ ValueError: If credentials invalid, update not found, or query fails
3214
+
3215
+ Example:
3216
+ -------
3217
+ >>> update = await adapter.get_project_update("update-uuid-here")
3218
+ >>> print(f"Update: {update.body}")
3219
+ >>> print(f"Health: {update.health}")
3220
+ >>> print(f"Diff: {update.diff_markdown}")
3221
+
3222
+ """
3223
+ logger = logging.getLogger(__name__)
3224
+
3225
+ # Validate credentials
3226
+ is_valid, error_message = self.validate_credentials()
3227
+ if not is_valid:
3228
+ raise ValueError(error_message)
3229
+
3230
+ await self.initialize()
3231
+
3232
+ try:
3233
+ result = await self.client.execute_query(
3234
+ GET_PROJECT_UPDATE_QUERY, {"id": update_id}
3235
+ )
3236
+
3237
+ update_data = result.get("projectUpdate")
3238
+ if not update_data:
3239
+ raise ValueError(f"Project update '{update_id}' not found")
3240
+
3241
+ return self._linear_update_to_model(update_data)
3242
+
3243
+ except Exception as e:
3244
+ logger.error(f"Failed to get project update {update_id}: {e}")
3245
+ raise ValueError(f"Failed to get project update '{update_id}': {e}") from e
3246
+
1564
3247
  async def close(self) -> None:
1565
3248
  """Close the adapter and clean up resources."""
1566
3249
  await self.client.close()