mcp-ticketer 0.3.0__py3-none-any.whl → 2.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_ticketer/__init__.py +10 -10
- mcp_ticketer/__version__.py +3 -3
- mcp_ticketer/_version_scm.py +1 -0
- mcp_ticketer/adapters/__init__.py +2 -0
- mcp_ticketer/adapters/aitrackdown.py +930 -52
- mcp_ticketer/adapters/asana/__init__.py +15 -0
- mcp_ticketer/adapters/asana/adapter.py +1537 -0
- mcp_ticketer/adapters/asana/client.py +292 -0
- mcp_ticketer/adapters/asana/mappers.py +348 -0
- mcp_ticketer/adapters/asana/types.py +146 -0
- mcp_ticketer/adapters/github/__init__.py +26 -0
- mcp_ticketer/adapters/github/adapter.py +3229 -0
- mcp_ticketer/adapters/github/client.py +335 -0
- mcp_ticketer/adapters/github/mappers.py +797 -0
- mcp_ticketer/adapters/github/queries.py +692 -0
- mcp_ticketer/adapters/github/types.py +460 -0
- mcp_ticketer/adapters/hybrid.py +58 -16
- mcp_ticketer/adapters/jira/__init__.py +35 -0
- mcp_ticketer/adapters/jira/adapter.py +1351 -0
- mcp_ticketer/adapters/jira/client.py +271 -0
- mcp_ticketer/adapters/jira/mappers.py +246 -0
- mcp_ticketer/adapters/jira/queries.py +216 -0
- mcp_ticketer/adapters/jira/types.py +304 -0
- mcp_ticketer/adapters/linear/__init__.py +1 -1
- mcp_ticketer/adapters/linear/adapter.py +3810 -462
- mcp_ticketer/adapters/linear/client.py +312 -69
- mcp_ticketer/adapters/linear/mappers.py +305 -85
- mcp_ticketer/adapters/linear/queries.py +317 -17
- mcp_ticketer/adapters/linear/types.py +187 -64
- mcp_ticketer/adapters/linear.py +2 -2
- mcp_ticketer/analysis/__init__.py +56 -0
- mcp_ticketer/analysis/dependency_graph.py +255 -0
- mcp_ticketer/analysis/health_assessment.py +304 -0
- mcp_ticketer/analysis/orphaned.py +218 -0
- mcp_ticketer/analysis/project_status.py +594 -0
- mcp_ticketer/analysis/similarity.py +224 -0
- mcp_ticketer/analysis/staleness.py +266 -0
- mcp_ticketer/automation/__init__.py +11 -0
- mcp_ticketer/automation/project_updates.py +378 -0
- mcp_ticketer/cache/memory.py +9 -8
- mcp_ticketer/cli/adapter_diagnostics.py +91 -54
- mcp_ticketer/cli/auggie_configure.py +116 -15
- mcp_ticketer/cli/codex_configure.py +274 -82
- mcp_ticketer/cli/configure.py +1323 -151
- mcp_ticketer/cli/cursor_configure.py +314 -0
- mcp_ticketer/cli/diagnostics.py +209 -114
- mcp_ticketer/cli/discover.py +297 -26
- mcp_ticketer/cli/gemini_configure.py +119 -26
- mcp_ticketer/cli/init_command.py +880 -0
- mcp_ticketer/cli/install_mcp_server.py +418 -0
- mcp_ticketer/cli/instruction_commands.py +435 -0
- mcp_ticketer/cli/linear_commands.py +256 -130
- mcp_ticketer/cli/main.py +140 -1544
- mcp_ticketer/cli/mcp_configure.py +1013 -100
- mcp_ticketer/cli/mcp_server_commands.py +415 -0
- mcp_ticketer/cli/migrate_config.py +12 -8
- mcp_ticketer/cli/platform_commands.py +123 -0
- mcp_ticketer/cli/platform_detection.py +477 -0
- mcp_ticketer/cli/platform_installer.py +545 -0
- mcp_ticketer/cli/project_update_commands.py +350 -0
- mcp_ticketer/cli/python_detection.py +126 -0
- mcp_ticketer/cli/queue_commands.py +15 -15
- mcp_ticketer/cli/setup_command.py +794 -0
- mcp_ticketer/cli/simple_health.py +84 -59
- mcp_ticketer/cli/ticket_commands.py +1375 -0
- mcp_ticketer/cli/update_checker.py +313 -0
- mcp_ticketer/cli/utils.py +195 -72
- mcp_ticketer/core/__init__.py +64 -1
- mcp_ticketer/core/adapter.py +618 -18
- mcp_ticketer/core/config.py +77 -68
- mcp_ticketer/core/env_discovery.py +75 -16
- mcp_ticketer/core/env_loader.py +121 -97
- mcp_ticketer/core/exceptions.py +32 -24
- mcp_ticketer/core/http_client.py +26 -26
- mcp_ticketer/core/instructions.py +405 -0
- mcp_ticketer/core/label_manager.py +732 -0
- mcp_ticketer/core/mappers.py +42 -30
- mcp_ticketer/core/milestone_manager.py +252 -0
- mcp_ticketer/core/models.py +566 -19
- mcp_ticketer/core/onepassword_secrets.py +379 -0
- mcp_ticketer/core/priority_matcher.py +463 -0
- mcp_ticketer/core/project_config.py +189 -49
- mcp_ticketer/core/project_utils.py +281 -0
- mcp_ticketer/core/project_validator.py +376 -0
- mcp_ticketer/core/registry.py +3 -3
- mcp_ticketer/core/session_state.py +176 -0
- mcp_ticketer/core/state_matcher.py +592 -0
- mcp_ticketer/core/url_parser.py +425 -0
- mcp_ticketer/core/validators.py +69 -0
- mcp_ticketer/defaults/ticket_instructions.md +644 -0
- mcp_ticketer/mcp/__init__.py +29 -1
- mcp_ticketer/mcp/__main__.py +60 -0
- mcp_ticketer/mcp/server/__init__.py +25 -0
- mcp_ticketer/mcp/server/__main__.py +60 -0
- mcp_ticketer/mcp/server/constants.py +58 -0
- mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
- mcp_ticketer/mcp/server/dto.py +195 -0
- mcp_ticketer/mcp/server/main.py +1343 -0
- mcp_ticketer/mcp/server/response_builder.py +206 -0
- mcp_ticketer/mcp/server/routing.py +723 -0
- mcp_ticketer/mcp/server/server_sdk.py +151 -0
- mcp_ticketer/mcp/server/tools/__init__.py +69 -0
- mcp_ticketer/mcp/server/tools/analysis_tools.py +854 -0
- mcp_ticketer/mcp/server/tools/attachment_tools.py +224 -0
- mcp_ticketer/mcp/server/tools/bulk_tools.py +330 -0
- mcp_ticketer/mcp/server/tools/comment_tools.py +152 -0
- mcp_ticketer/mcp/server/tools/config_tools.py +1564 -0
- mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
- mcp_ticketer/mcp/server/tools/hierarchy_tools.py +942 -0
- mcp_ticketer/mcp/server/tools/instruction_tools.py +295 -0
- mcp_ticketer/mcp/server/tools/label_tools.py +942 -0
- mcp_ticketer/mcp/server/tools/milestone_tools.py +338 -0
- mcp_ticketer/mcp/server/tools/pr_tools.py +150 -0
- mcp_ticketer/mcp/server/tools/project_status_tools.py +158 -0
- mcp_ticketer/mcp/server/tools/project_update_tools.py +473 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +318 -0
- mcp_ticketer/mcp/server/tools/session_tools.py +308 -0
- mcp_ticketer/mcp/server/tools/ticket_tools.py +1413 -0
- mcp_ticketer/mcp/server/tools/user_ticket_tools.py +364 -0
- mcp_ticketer/queue/__init__.py +1 -0
- mcp_ticketer/queue/health_monitor.py +168 -136
- mcp_ticketer/queue/manager.py +78 -63
- mcp_ticketer/queue/queue.py +108 -21
- mcp_ticketer/queue/run_worker.py +2 -2
- mcp_ticketer/queue/ticket_registry.py +213 -155
- mcp_ticketer/queue/worker.py +96 -58
- mcp_ticketer/utils/__init__.py +5 -0
- mcp_ticketer/utils/token_utils.py +246 -0
- mcp_ticketer-2.2.9.dist-info/METADATA +1396 -0
- mcp_ticketer-2.2.9.dist-info/RECORD +158 -0
- mcp_ticketer-2.2.9.dist-info/top_level.txt +2 -0
- py_mcp_installer/examples/phase3_demo.py +178 -0
- py_mcp_installer/scripts/manage_version.py +54 -0
- py_mcp_installer/setup.py +6 -0
- py_mcp_installer/src/py_mcp_installer/__init__.py +153 -0
- py_mcp_installer/src/py_mcp_installer/command_builder.py +445 -0
- py_mcp_installer/src/py_mcp_installer/config_manager.py +541 -0
- py_mcp_installer/src/py_mcp_installer/exceptions.py +243 -0
- py_mcp_installer/src/py_mcp_installer/installation_strategy.py +617 -0
- py_mcp_installer/src/py_mcp_installer/installer.py +656 -0
- py_mcp_installer/src/py_mcp_installer/mcp_inspector.py +750 -0
- py_mcp_installer/src/py_mcp_installer/platform_detector.py +451 -0
- py_mcp_installer/src/py_mcp_installer/platforms/__init__.py +26 -0
- py_mcp_installer/src/py_mcp_installer/platforms/claude_code.py +225 -0
- py_mcp_installer/src/py_mcp_installer/platforms/codex.py +181 -0
- py_mcp_installer/src/py_mcp_installer/platforms/cursor.py +191 -0
- py_mcp_installer/src/py_mcp_installer/types.py +222 -0
- py_mcp_installer/src/py_mcp_installer/utils.py +463 -0
- py_mcp_installer/tests/__init__.py +0 -0
- py_mcp_installer/tests/platforms/__init__.py +0 -0
- py_mcp_installer/tests/test_platform_detector.py +17 -0
- mcp_ticketer/adapters/github.py +0 -1354
- mcp_ticketer/adapters/jira.py +0 -1011
- mcp_ticketer/mcp/server.py +0 -2030
- mcp_ticketer-0.3.0.dist-info/METADATA +0 -414
- mcp_ticketer-0.3.0.dist-info/RECORD +0 -59
- mcp_ticketer-0.3.0.dist-info/top_level.txt +0 -1
- {mcp_ticketer-0.3.0.dist-info → mcp_ticketer-2.2.9.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.3.0.dist-info → mcp_ticketer-2.2.9.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.3.0.dist-info → mcp_ticketer-2.2.9.dist-info}/licenses/LICENSE +0 -0
|
@@ -3,44 +3,67 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
5
|
import asyncio
|
|
6
|
+
import logging
|
|
7
|
+
import mimetypes
|
|
6
8
|
import os
|
|
7
9
|
from datetime import datetime
|
|
8
|
-
from
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any
|
|
9
12
|
|
|
10
13
|
try:
|
|
14
|
+
import httpx
|
|
11
15
|
from gql import gql
|
|
12
16
|
from gql.transport.exceptions import TransportQueryError
|
|
13
17
|
except ImportError:
|
|
14
18
|
gql = None
|
|
15
19
|
TransportQueryError = Exception
|
|
20
|
+
httpx = None
|
|
16
21
|
|
|
22
|
+
import builtins
|
|
23
|
+
|
|
24
|
+
from ...cache.memory import MemoryCache
|
|
17
25
|
from ...core.adapter import BaseAdapter
|
|
18
26
|
from ...core.models import (
|
|
27
|
+
Attachment,
|
|
19
28
|
Comment,
|
|
20
29
|
Epic,
|
|
21
|
-
|
|
30
|
+
Milestone,
|
|
31
|
+
ProjectUpdate,
|
|
32
|
+
ProjectUpdateHealth,
|
|
22
33
|
SearchQuery,
|
|
23
34
|
Task,
|
|
24
35
|
TicketState,
|
|
25
|
-
TicketType,
|
|
26
36
|
)
|
|
27
37
|
from ...core.registry import AdapterRegistry
|
|
28
|
-
|
|
38
|
+
from ...core.url_parser import URLParserError, normalize_project_id
|
|
29
39
|
from .client import LinearGraphQLClient
|
|
30
40
|
from .mappers import (
|
|
31
41
|
build_linear_issue_input,
|
|
32
42
|
build_linear_issue_update_input,
|
|
33
|
-
|
|
43
|
+
map_linear_attachment_to_attachment,
|
|
34
44
|
map_linear_comment_to_comment,
|
|
35
45
|
map_linear_issue_to_task,
|
|
36
46
|
map_linear_project_to_epic,
|
|
37
47
|
)
|
|
38
48
|
from .queries import (
|
|
39
49
|
ALL_FRAGMENTS,
|
|
50
|
+
ARCHIVE_CYCLE_MUTATION,
|
|
51
|
+
CREATE_CYCLE_MUTATION,
|
|
40
52
|
CREATE_ISSUE_MUTATION,
|
|
41
|
-
|
|
53
|
+
CREATE_LABEL_MUTATION,
|
|
54
|
+
CREATE_PROJECT_UPDATE_MUTATION,
|
|
55
|
+
GET_CUSTOM_VIEW_QUERY,
|
|
56
|
+
GET_CYCLE_ISSUES_QUERY,
|
|
57
|
+
GET_CYCLE_QUERY,
|
|
58
|
+
GET_ISSUE_STATUS_QUERY,
|
|
59
|
+
GET_PROJECT_UPDATE_QUERY,
|
|
60
|
+
LIST_CYCLES_QUERY,
|
|
61
|
+
LIST_ISSUE_STATUSES_QUERY,
|
|
42
62
|
LIST_ISSUES_QUERY,
|
|
63
|
+
LIST_PROJECT_UPDATES_QUERY,
|
|
64
|
+
LIST_PROJECTS_QUERY,
|
|
43
65
|
SEARCH_ISSUES_QUERY,
|
|
66
|
+
UPDATE_CYCLE_MUTATION,
|
|
44
67
|
UPDATE_ISSUE_MUTATION,
|
|
45
68
|
WORKFLOW_STATES_QUERY,
|
|
46
69
|
)
|
|
@@ -49,22 +72,21 @@ from .types import (
|
|
|
49
72
|
build_issue_filter,
|
|
50
73
|
get_linear_priority,
|
|
51
74
|
get_linear_state_type,
|
|
52
|
-
get_universal_state,
|
|
53
75
|
)
|
|
54
76
|
|
|
55
77
|
|
|
56
78
|
class LinearAdapter(BaseAdapter[Task]):
|
|
57
79
|
"""Adapter for Linear issue tracking system using native GraphQL API.
|
|
58
|
-
|
|
80
|
+
|
|
59
81
|
This adapter provides comprehensive integration with Linear's GraphQL API,
|
|
60
82
|
supporting all major ticket management operations including:
|
|
61
|
-
|
|
83
|
+
|
|
62
84
|
- CRUD operations for issues and projects
|
|
63
85
|
- State transitions and workflow management
|
|
64
86
|
- User assignment and search functionality
|
|
65
87
|
- Comment management
|
|
66
88
|
- Epic/Issue/Task hierarchy support
|
|
67
|
-
|
|
89
|
+
|
|
68
90
|
The adapter is organized into multiple modules for better maintainability:
|
|
69
91
|
- client.py: GraphQL client management
|
|
70
92
|
- queries.py: GraphQL queries and fragments
|
|
@@ -72,312 +94,743 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
72
94
|
- mappers.py: Data transformation logic
|
|
73
95
|
"""
|
|
74
96
|
|
|
75
|
-
def __init__(self, config:
|
|
97
|
+
def __init__(self, config: dict[str, Any]):
|
|
76
98
|
"""Initialize Linear adapter.
|
|
77
99
|
|
|
78
100
|
Args:
|
|
101
|
+
----
|
|
79
102
|
config: Configuration with:
|
|
80
103
|
- api_key: Linear API key (or LINEAR_API_KEY env var)
|
|
81
104
|
- workspace: Linear workspace name (optional, for documentation)
|
|
82
105
|
- team_key: Linear team key (e.g., 'BTA') OR
|
|
83
106
|
- team_id: Linear team UUID (e.g., '02d15669-7351-4451-9719-807576c16049')
|
|
84
107
|
- api_url: Optional Linear API URL (defaults to https://api.linear.app/graphql)
|
|
108
|
+
- labels_ttl: TTL for label cache in seconds (default: 300)
|
|
85
109
|
|
|
86
110
|
Raises:
|
|
111
|
+
------
|
|
87
112
|
ValueError: If required configuration is missing
|
|
113
|
+
|
|
88
114
|
"""
|
|
89
115
|
# Initialize instance variables before calling super().__init__
|
|
90
116
|
# because parent constructor calls _get_state_mapping()
|
|
91
|
-
self._team_data:
|
|
92
|
-
self._workflow_states:
|
|
93
|
-
self.
|
|
94
|
-
self.
|
|
117
|
+
self._team_data: dict[str, Any] | None = None
|
|
118
|
+
self._workflow_states: dict[str, dict[str, Any]] | None = None
|
|
119
|
+
self._labels_ttl = config.get("labels_ttl", 300.0) # 5 min default
|
|
120
|
+
self._labels_cache = MemoryCache(default_ttl=self._labels_ttl)
|
|
121
|
+
self._users_cache: dict[str, dict[str, Any]] | None = None
|
|
95
122
|
self._initialized = False
|
|
96
123
|
|
|
97
124
|
super().__init__(config)
|
|
98
|
-
|
|
125
|
+
|
|
99
126
|
# Extract configuration
|
|
100
127
|
self.api_key = config.get("api_key") or os.getenv("LINEAR_API_KEY")
|
|
101
128
|
if not self.api_key:
|
|
102
|
-
raise ValueError(
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
129
|
+
raise ValueError(
|
|
130
|
+
"Linear API key is required (api_key or LINEAR_API_KEY env var)"
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
# Clean API key - remove common prefixes if accidentally included in config
|
|
134
|
+
# (The client will add Bearer back when making requests)
|
|
135
|
+
if isinstance(self.api_key, str):
|
|
136
|
+
# Remove Bearer prefix
|
|
137
|
+
if self.api_key.startswith("Bearer "):
|
|
138
|
+
self.api_key = self.api_key.replace("Bearer ", "")
|
|
139
|
+
# Remove environment variable name prefix (e.g., "LINEAR_API_KEY=")
|
|
140
|
+
if "=" in self.api_key:
|
|
141
|
+
parts = self.api_key.split("=", 1)
|
|
142
|
+
if len(parts) == 2 and parts[0].upper() in (
|
|
143
|
+
"LINEAR_API_KEY",
|
|
144
|
+
"API_KEY",
|
|
145
|
+
):
|
|
146
|
+
self.api_key = parts[1]
|
|
147
|
+
|
|
148
|
+
# Validate API key format (Linear keys start with "lin_api_")
|
|
149
|
+
if not self.api_key.startswith("lin_api_"):
|
|
150
|
+
raise ValueError(
|
|
151
|
+
f"Invalid Linear API key format. Expected key starting with 'lin_api_', "
|
|
152
|
+
f"got: {self.api_key[:15]}... "
|
|
153
|
+
f"Please check your configuration and ensure the API key is correct."
|
|
154
|
+
)
|
|
155
|
+
|
|
108
156
|
self.workspace = config.get("workspace", "")
|
|
109
157
|
self.team_key = config.get("team_key")
|
|
110
158
|
self.team_id = config.get("team_id")
|
|
159
|
+
self.user_email = config.get("user_email") # Optional default assignee
|
|
111
160
|
self.api_url = config.get("api_url", "https://api.linear.app/graphql")
|
|
112
|
-
|
|
161
|
+
|
|
113
162
|
# Validate team configuration
|
|
114
163
|
if not self.team_key and not self.team_id:
|
|
115
164
|
raise ValueError("Either team_key or team_id must be provided")
|
|
116
|
-
|
|
117
|
-
# Initialize client
|
|
118
|
-
|
|
119
|
-
self.client = LinearGraphQLClient(api_key_clean)
|
|
165
|
+
|
|
166
|
+
# Initialize client with clean API key
|
|
167
|
+
self.client = LinearGraphQLClient(self.api_key)
|
|
120
168
|
|
|
121
169
|
def validate_credentials(self) -> tuple[bool, str]:
|
|
122
170
|
"""Validate Linear API credentials.
|
|
123
|
-
|
|
171
|
+
|
|
124
172
|
Returns:
|
|
173
|
+
-------
|
|
125
174
|
Tuple of (is_valid, error_message)
|
|
175
|
+
|
|
126
176
|
"""
|
|
127
177
|
if not self.api_key:
|
|
128
178
|
return False, "Linear API key is required"
|
|
129
|
-
|
|
179
|
+
|
|
130
180
|
if not self.team_key and not self.team_id:
|
|
131
181
|
return False, "Either team_key or team_id must be provided"
|
|
132
|
-
|
|
182
|
+
|
|
133
183
|
return True, ""
|
|
134
184
|
|
|
135
185
|
async def initialize(self) -> None:
|
|
136
|
-
"""Initialize adapter by preloading team, states, and labels data concurrently.
|
|
186
|
+
"""Initialize adapter by preloading team, states, and labels data concurrently.
|
|
187
|
+
|
|
188
|
+
Design Decision: Enhanced Error Handling (1M-431)
|
|
189
|
+
--------------------------------------------------
|
|
190
|
+
Improved error messages to provide actionable troubleshooting guidance.
|
|
191
|
+
Added logging to track initialization progress and identify failure points.
|
|
192
|
+
Preserves original ValueError type for backward compatibility.
|
|
193
|
+
|
|
194
|
+
Raises:
|
|
195
|
+
------
|
|
196
|
+
ValueError: If connection fails or initialization encounters errors
|
|
197
|
+
with detailed troubleshooting information
|
|
198
|
+
|
|
199
|
+
"""
|
|
137
200
|
if self._initialized:
|
|
138
201
|
return
|
|
139
|
-
|
|
202
|
+
|
|
203
|
+
import logging
|
|
204
|
+
|
|
205
|
+
logger = logging.getLogger(__name__)
|
|
206
|
+
|
|
140
207
|
try:
|
|
141
208
|
# Test connection first
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
209
|
+
logger.info(
|
|
210
|
+
f"Testing Linear API connection for team {self.team_key or self.team_id}..."
|
|
211
|
+
)
|
|
212
|
+
connection_ok = await self.client.test_connection()
|
|
213
|
+
|
|
214
|
+
if not connection_ok:
|
|
215
|
+
raise ValueError(
|
|
216
|
+
"Failed to connect to Linear API. Troubleshooting:\n"
|
|
217
|
+
"1. Verify API key is valid (starts with 'lin_api_')\n"
|
|
218
|
+
"2. Check team_key matches your Linear workspace\n"
|
|
219
|
+
"3. Ensure API key has proper permissions\n"
|
|
220
|
+
"4. Review logs for detailed error information\n"
|
|
221
|
+
f" API key preview: {self.api_key[:20] if self.api_key else 'None'}...\n"
|
|
222
|
+
f" Team: {self.team_key or self.team_id}"
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
logger.info("Linear API connection successful")
|
|
226
|
+
|
|
145
227
|
# Load team data and workflow states concurrently
|
|
228
|
+
logger.debug("Loading team data and workflow states...")
|
|
146
229
|
team_id = await self._ensure_team_id()
|
|
147
|
-
|
|
148
|
-
#
|
|
230
|
+
|
|
231
|
+
# Validate team_id before initialization
|
|
232
|
+
if not team_id:
|
|
233
|
+
raise ValueError(
|
|
234
|
+
"Cannot initialize Linear adapter without team_id. "
|
|
235
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
# Load workflow states and labels for the team
|
|
149
239
|
await self._load_workflow_states(team_id)
|
|
150
|
-
|
|
240
|
+
await self._load_team_labels(team_id)
|
|
241
|
+
|
|
151
242
|
self._initialized = True
|
|
152
|
-
|
|
243
|
+
logger.info("Linear adapter initialized successfully")
|
|
244
|
+
|
|
245
|
+
except ValueError:
|
|
246
|
+
# Re-raise ValueError with original message (for connection failures)
|
|
247
|
+
raise
|
|
153
248
|
except Exception as e:
|
|
154
|
-
|
|
249
|
+
logger.error(
|
|
250
|
+
f"Linear adapter initialization failed: {type(e).__name__}: {e}",
|
|
251
|
+
exc_info=True,
|
|
252
|
+
)
|
|
253
|
+
raise ValueError(
|
|
254
|
+
f"Failed to initialize Linear adapter: {type(e).__name__}: {e}\n"
|
|
255
|
+
"Check your credentials and network connection."
|
|
256
|
+
) from e
|
|
155
257
|
|
|
156
258
|
async def _ensure_team_id(self) -> str:
|
|
157
259
|
"""Ensure we have a team ID, resolving from team_key if needed.
|
|
158
|
-
|
|
260
|
+
|
|
261
|
+
Validates that team_id is a UUID. If it looks like a team_key,
|
|
262
|
+
resolves it to the actual UUID.
|
|
263
|
+
|
|
159
264
|
Returns:
|
|
160
|
-
|
|
161
|
-
|
|
265
|
+
-------
|
|
266
|
+
Valid Linear team UUID
|
|
267
|
+
|
|
162
268
|
Raises:
|
|
163
|
-
|
|
269
|
+
------
|
|
270
|
+
ValueError: If neither team_id nor team_key provided, or resolution fails
|
|
271
|
+
|
|
164
272
|
"""
|
|
273
|
+
logger = logging.getLogger(__name__)
|
|
274
|
+
|
|
275
|
+
# If we have a team_id, validate it's actually a UUID
|
|
165
276
|
if self.team_id:
|
|
166
|
-
|
|
167
|
-
|
|
277
|
+
# Check if it looks like a UUID (36 chars with hyphens)
|
|
278
|
+
import re
|
|
279
|
+
|
|
280
|
+
uuid_pattern = re.compile(
|
|
281
|
+
r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$",
|
|
282
|
+
re.IGNORECASE,
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
if uuid_pattern.match(self.team_id):
|
|
286
|
+
# Already a valid UUID
|
|
287
|
+
return str(self.team_id)
|
|
288
|
+
# Looks like a team_key string - need to resolve it
|
|
289
|
+
logger.warning(
|
|
290
|
+
f"team_id '{self.team_id}' is not a UUID - treating as team_key and resolving"
|
|
291
|
+
)
|
|
292
|
+
teams = await self._get_team_by_key(self.team_id)
|
|
293
|
+
if teams and len(teams) > 0:
|
|
294
|
+
resolved_id = teams[0]["id"]
|
|
295
|
+
logger.info(
|
|
296
|
+
f"Resolved team_key '{self.team_id}' to UUID: {resolved_id}"
|
|
297
|
+
)
|
|
298
|
+
# Cache the resolved UUID
|
|
299
|
+
self.team_id = resolved_id
|
|
300
|
+
return resolved_id
|
|
301
|
+
raise ValueError(
|
|
302
|
+
f"Cannot resolve team_id '{self.team_id}' to a valid Linear team UUID. "
|
|
303
|
+
f"Please use team_key instead for team short codes like 'ENG'."
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
# No team_id, must have team_key
|
|
168
307
|
if not self.team_key:
|
|
169
|
-
raise ValueError(
|
|
170
|
-
|
|
308
|
+
raise ValueError(
|
|
309
|
+
"Either team_id (UUID) or team_key (short code) must be provided"
|
|
310
|
+
)
|
|
311
|
+
|
|
171
312
|
# Query team by key
|
|
313
|
+
teams = await self._get_team_by_key(self.team_key)
|
|
314
|
+
|
|
315
|
+
if not teams or len(teams) == 0:
|
|
316
|
+
raise ValueError(f"Team with key '{self.team_key}' not found")
|
|
317
|
+
|
|
318
|
+
team = teams[0]
|
|
319
|
+
team_id = team["id"]
|
|
320
|
+
|
|
321
|
+
# Cache the resolved team_id
|
|
322
|
+
self.team_id = team_id
|
|
323
|
+
self._team_data = team
|
|
324
|
+
logger.info(f"Resolved team_key '{self.team_key}' to team_id: {team_id}")
|
|
325
|
+
|
|
326
|
+
return team_id
|
|
327
|
+
|
|
328
|
+
async def _get_team_by_key(self, team_key: str) -> list[dict[str, Any]]:
|
|
329
|
+
"""Query Linear API to get team by key.
|
|
330
|
+
|
|
331
|
+
Args:
|
|
332
|
+
----
|
|
333
|
+
team_key: Short team identifier (e.g., 'ENG', 'BTA')
|
|
334
|
+
|
|
335
|
+
Returns:
|
|
336
|
+
-------
|
|
337
|
+
List of matching teams
|
|
338
|
+
|
|
339
|
+
"""
|
|
172
340
|
query = """
|
|
173
341
|
query GetTeamByKey($key: String!) {
|
|
174
342
|
teams(filter: { key: { eq: $key } }) {
|
|
175
343
|
nodes {
|
|
176
344
|
id
|
|
177
|
-
name
|
|
178
345
|
key
|
|
179
|
-
|
|
346
|
+
name
|
|
180
347
|
}
|
|
181
348
|
}
|
|
182
349
|
}
|
|
183
350
|
"""
|
|
184
|
-
|
|
185
|
-
try:
|
|
186
|
-
result = await self.client.execute_query(query, {"key": self.team_key})
|
|
187
|
-
teams = result.get("teams", {}).get("nodes", [])
|
|
188
|
-
|
|
189
|
-
if not teams:
|
|
190
|
-
raise ValueError(f"Team with key '{self.team_key}' not found")
|
|
191
|
-
|
|
192
|
-
team = teams[0]
|
|
193
|
-
self.team_id = team["id"]
|
|
194
|
-
self._team_data = team
|
|
195
|
-
|
|
196
|
-
return self.team_id
|
|
197
|
-
|
|
198
|
-
except Exception as e:
|
|
199
|
-
raise ValueError(f"Failed to resolve team '{self.team_key}': {e}")
|
|
200
351
|
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
352
|
+
result = await self.client.execute_query(query, {"key": team_key})
|
|
353
|
+
|
|
354
|
+
if "teams" in result and "nodes" in result["teams"]:
|
|
355
|
+
return result["teams"]["nodes"]
|
|
356
|
+
|
|
357
|
+
return []
|
|
358
|
+
|
|
359
|
+
async def _get_custom_view(self, view_id: str) -> dict[str, Any] | None:
|
|
360
|
+
"""Get a Linear custom view by ID to check if it exists.
|
|
361
|
+
|
|
204
362
|
Args:
|
|
205
|
-
|
|
363
|
+
----
|
|
364
|
+
view_id: View identifier (slug-uuid format)
|
|
365
|
+
|
|
366
|
+
Returns:
|
|
367
|
+
-------
|
|
368
|
+
View dict with fields (id, name, description, issues) or None if not found
|
|
369
|
+
|
|
206
370
|
"""
|
|
371
|
+
logging.debug(f"[VIEW DEBUG] _get_custom_view called with view_id: {view_id}")
|
|
372
|
+
|
|
373
|
+
if not view_id:
|
|
374
|
+
logging.debug("[VIEW DEBUG] view_id is empty, returning None")
|
|
375
|
+
return None
|
|
376
|
+
|
|
207
377
|
try:
|
|
378
|
+
logging.debug(
|
|
379
|
+
f"[VIEW DEBUG] Executing GET_CUSTOM_VIEW_QUERY for view_id: {view_id}"
|
|
380
|
+
)
|
|
208
381
|
result = await self.client.execute_query(
|
|
209
|
-
|
|
210
|
-
{"teamId": team_id}
|
|
382
|
+
GET_CUSTOM_VIEW_QUERY, {"viewId": view_id, "first": 10}
|
|
211
383
|
)
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
384
|
+
logging.debug(f"[VIEW DEBUG] Query result: {result}")
|
|
385
|
+
|
|
386
|
+
if result.get("customView"):
|
|
387
|
+
logging.debug(
|
|
388
|
+
f"[VIEW DEBUG] customView found in result: {result.get('customView')}"
|
|
389
|
+
)
|
|
390
|
+
return result["customView"]
|
|
391
|
+
|
|
392
|
+
logging.debug(
|
|
393
|
+
f"[VIEW DEBUG] No customView in result. Checking pattern: has_hyphen={'-' in view_id}, length={len(view_id)}"
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
# API query failed but check if this looks like a view identifier
|
|
397
|
+
# View IDs from URLs have format: slug-uuid (e.g., "mcp-skills-issues-0d0359fabcf9")
|
|
398
|
+
# If it has hyphens and is longer than 12 chars, it's likely a view URL identifier
|
|
399
|
+
if "-" in view_id and len(view_id) > 12:
|
|
400
|
+
logging.debug(
|
|
401
|
+
"[VIEW DEBUG] Pattern matched! Returning minimal view object"
|
|
402
|
+
)
|
|
403
|
+
# Return minimal view object to trigger helpful error message
|
|
404
|
+
# We can't fetch the actual name, so use generic "Linear View"
|
|
405
|
+
return {
|
|
406
|
+
"id": view_id,
|
|
407
|
+
"name": "Linear View",
|
|
408
|
+
"issues": {"nodes": [], "pageInfo": {"hasNextPage": False}},
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
logging.debug("[VIEW DEBUG] Pattern did not match, returning None")
|
|
412
|
+
return None
|
|
413
|
+
|
|
223
414
|
except Exception as e:
|
|
224
|
-
|
|
415
|
+
logging.debug(
|
|
416
|
+
f"[VIEW DEBUG] Exception caught: {type(e).__name__}: {str(e)}"
|
|
417
|
+
)
|
|
418
|
+
# Linear returns error if view not found
|
|
419
|
+
# Check if this looks like a view identifier to provide helpful error
|
|
420
|
+
if "-" in view_id and len(view_id) > 12:
|
|
421
|
+
logging.debug(
|
|
422
|
+
"[VIEW DEBUG] Exception handler: Pattern matched! Returning minimal view object"
|
|
423
|
+
)
|
|
424
|
+
# Return minimal view object to trigger helpful error message
|
|
425
|
+
return {
|
|
426
|
+
"id": view_id,
|
|
427
|
+
"name": "Linear View",
|
|
428
|
+
"issues": {"nodes": [], "pageInfo": {"hasNextPage": False}},
|
|
429
|
+
}
|
|
430
|
+
logging.debug(
|
|
431
|
+
"[VIEW DEBUG] Exception handler: Pattern did not match, returning None"
|
|
432
|
+
)
|
|
433
|
+
return None
|
|
225
434
|
|
|
226
|
-
def
|
|
227
|
-
"""Get
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
"""
|
|
232
|
-
if not self._workflow_states:
|
|
233
|
-
# Return type-based mapping if states not loaded
|
|
234
|
-
return {
|
|
235
|
-
TicketState.OPEN: "unstarted",
|
|
236
|
-
TicketState.IN_PROGRESS: "started",
|
|
237
|
-
TicketState.READY: "unstarted",
|
|
238
|
-
TicketState.TESTED: "started",
|
|
239
|
-
TicketState.DONE: "completed",
|
|
240
|
-
TicketState.CLOSED: "canceled",
|
|
241
|
-
TicketState.WAITING: "unstarted",
|
|
242
|
-
TicketState.BLOCKED: "unstarted",
|
|
243
|
-
}
|
|
244
|
-
|
|
245
|
-
# Return ID-based mapping using cached workflow states
|
|
246
|
-
mapping = {}
|
|
247
|
-
for universal_state, linear_type in LinearStateMapping.TO_LINEAR.items():
|
|
248
|
-
if linear_type in self._workflow_states:
|
|
249
|
-
mapping[universal_state] = self._workflow_states[linear_type]["id"]
|
|
250
|
-
else:
|
|
251
|
-
# Fallback to type name
|
|
252
|
-
mapping[universal_state] = linear_type
|
|
253
|
-
|
|
254
|
-
return mapping
|
|
435
|
+
async def get_project(self, project_id: str) -> dict[str, Any] | None:
|
|
436
|
+
"""Get a Linear project by ID using direct query.
|
|
437
|
+
|
|
438
|
+
This method uses Linear's direct project(id:) GraphQL query for efficient lookups.
|
|
439
|
+
Supports UUID, slugId, or short ID formats.
|
|
255
440
|
|
|
256
|
-
async def _get_user_id(self, user_identifier: str) -> Optional[str]:
|
|
257
|
-
"""Get Linear user ID from email or display name.
|
|
258
|
-
|
|
259
441
|
Args:
|
|
260
|
-
|
|
261
|
-
|
|
442
|
+
----
|
|
443
|
+
project_id: Project UUID, slugId, or short ID
|
|
444
|
+
|
|
262
445
|
Returns:
|
|
263
|
-
|
|
446
|
+
-------
|
|
447
|
+
Project dict with fields (id, name, description, state, etc.) or None if not found
|
|
448
|
+
|
|
449
|
+
Examples:
|
|
450
|
+
--------
|
|
451
|
+
- "a1b2c3d4-e5f6-7890-abcd-ef1234567890" (UUID)
|
|
452
|
+
- "crm-smart-monitoring-system-f59a41a96c52" (slugId)
|
|
453
|
+
- "6cf55cfcfad4" (short ID - 12 hex chars)
|
|
454
|
+
|
|
264
455
|
"""
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
if user:
|
|
268
|
-
return user["id"]
|
|
269
|
-
|
|
270
|
-
# If not found by email, could implement search by display name
|
|
271
|
-
# For now, assume the identifier is already a user ID
|
|
272
|
-
return user_identifier if user_identifier else None
|
|
456
|
+
if not project_id:
|
|
457
|
+
return None
|
|
273
458
|
|
|
274
|
-
|
|
459
|
+
# Direct query using Linear's project(id:) endpoint
|
|
460
|
+
query = """
|
|
461
|
+
query GetProject($id: String!) {
|
|
462
|
+
project(id: $id) {
|
|
463
|
+
id
|
|
464
|
+
name
|
|
465
|
+
description
|
|
466
|
+
state
|
|
467
|
+
slugId
|
|
468
|
+
createdAt
|
|
469
|
+
updatedAt
|
|
470
|
+
url
|
|
471
|
+
icon
|
|
472
|
+
color
|
|
473
|
+
targetDate
|
|
474
|
+
startedAt
|
|
475
|
+
completedAt
|
|
476
|
+
teams {
|
|
477
|
+
nodes {
|
|
478
|
+
id
|
|
479
|
+
name
|
|
480
|
+
key
|
|
481
|
+
description
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
"""
|
|
275
487
|
|
|
276
|
-
|
|
277
|
-
|
|
488
|
+
try:
|
|
489
|
+
result = await self.client.execute_query(query, {"id": project_id})
|
|
490
|
+
|
|
491
|
+
if result.get("project"):
|
|
492
|
+
return result["project"]
|
|
493
|
+
|
|
494
|
+
# No match found
|
|
495
|
+
return None
|
|
496
|
+
|
|
497
|
+
except Exception:
|
|
498
|
+
# Linear returns error if project not found - return None instead of raising
|
|
499
|
+
return None
|
|
500
|
+
|
|
501
|
+
async def get_epic(self, epic_id: str, include_issues: bool = True) -> Epic | None:
|
|
502
|
+
"""Get Linear project as Epic with optional issue loading.
|
|
503
|
+
|
|
504
|
+
This is the preferred method for reading projects/epics as it provides
|
|
505
|
+
explicit control over whether to load child issues.
|
|
278
506
|
|
|
279
507
|
Args:
|
|
280
|
-
|
|
508
|
+
----
|
|
509
|
+
epic_id: Project UUID, slugId, or short ID
|
|
510
|
+
include_issues: Whether to fetch and populate child_issues (default True)
|
|
281
511
|
|
|
282
512
|
Returns:
|
|
283
|
-
|
|
513
|
+
-------
|
|
514
|
+
Epic object with child_issues populated if include_issues=True,
|
|
515
|
+
or None if project not found
|
|
284
516
|
|
|
285
517
|
Raises:
|
|
286
|
-
|
|
518
|
+
------
|
|
519
|
+
ValueError: If credentials invalid
|
|
520
|
+
|
|
521
|
+
Example:
|
|
522
|
+
-------
|
|
523
|
+
# Get project with issues
|
|
524
|
+
epic = await adapter.get_epic("c0e6db5a-03b6-479f-8796-5070b8fb7895")
|
|
525
|
+
|
|
526
|
+
# Get project metadata only (faster)
|
|
527
|
+
epic = await adapter.get_epic("c0e6db5a-03b6-479f-8796-5070b8fb7895", include_issues=False)
|
|
528
|
+
|
|
287
529
|
"""
|
|
288
|
-
# Validate credentials
|
|
530
|
+
# Validate credentials
|
|
289
531
|
is_valid, error_message = self.validate_credentials()
|
|
290
532
|
if not is_valid:
|
|
291
533
|
raise ValueError(error_message)
|
|
292
534
|
|
|
293
|
-
#
|
|
294
|
-
await self.
|
|
535
|
+
# Fetch project data
|
|
536
|
+
project_data = await self.get_project(epic_id)
|
|
537
|
+
if not project_data:
|
|
538
|
+
return None
|
|
295
539
|
|
|
296
|
-
#
|
|
297
|
-
|
|
298
|
-
return await self._create_epic(ticket)
|
|
540
|
+
# Map to Epic
|
|
541
|
+
epic = map_linear_project_to_epic(project_data)
|
|
299
542
|
|
|
300
|
-
#
|
|
301
|
-
|
|
543
|
+
# Optionally fetch and populate child issues
|
|
544
|
+
if include_issues:
|
|
545
|
+
issues = await self._get_project_issues(epic_id)
|
|
546
|
+
epic.child_issues = [issue.id for issue in issues if issue.id is not None]
|
|
302
547
|
|
|
303
|
-
|
|
304
|
-
|
|
548
|
+
return epic
|
|
549
|
+
|
|
550
|
+
def _validate_linear_uuid(self, uuid_value: str, field_name: str = "UUID") -> bool:
|
|
551
|
+
"""Validate Linear UUID format (36 chars, 8-4-4-4-12 pattern).
|
|
552
|
+
|
|
553
|
+
Linear UUIDs follow standard UUID v4 format:
|
|
554
|
+
- Total length: 36 characters
|
|
555
|
+
- Pattern: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
|
|
556
|
+
- Contains exactly 4 hyphens at positions 8, 13, 18, 23
|
|
305
557
|
|
|
306
558
|
Args:
|
|
307
|
-
|
|
559
|
+
----
|
|
560
|
+
uuid_value: UUID string to validate
|
|
561
|
+
field_name: Name of field for error messages (default: "UUID")
|
|
308
562
|
|
|
309
563
|
Returns:
|
|
310
|
-
|
|
564
|
+
-------
|
|
565
|
+
True if valid UUID format, False otherwise
|
|
566
|
+
|
|
567
|
+
Examples:
|
|
568
|
+
--------
|
|
569
|
+
>>> _validate_linear_uuid("12345678-1234-1234-1234-123456789012", "projectId")
|
|
570
|
+
True
|
|
571
|
+
>>> _validate_linear_uuid("invalid-uuid", "projectId")
|
|
572
|
+
False
|
|
311
573
|
"""
|
|
312
|
-
|
|
574
|
+
logger = logging.getLogger(__name__)
|
|
313
575
|
|
|
314
|
-
|
|
315
|
-
|
|
576
|
+
if not isinstance(uuid_value, str):
|
|
577
|
+
logger.warning(f"{field_name} is not a string: {type(uuid_value).__name__}")
|
|
578
|
+
return False
|
|
316
579
|
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
580
|
+
if len(uuid_value) != 36:
|
|
581
|
+
logger.warning(
|
|
582
|
+
f"{field_name} has invalid length {len(uuid_value)}, expected 36 characters"
|
|
583
|
+
)
|
|
584
|
+
return False
|
|
585
|
+
|
|
586
|
+
if uuid_value.count("-") != 4:
|
|
587
|
+
logger.warning(
|
|
588
|
+
f"{field_name} has invalid format: {uuid_value}. "
|
|
589
|
+
f"Expected xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx pattern"
|
|
590
|
+
)
|
|
591
|
+
return False
|
|
592
|
+
|
|
593
|
+
return True
|
|
594
|
+
|
|
595
|
+
async def _resolve_project_id(self, project_identifier: str) -> str | None:
|
|
596
|
+
"""Resolve project identifier (slug, name, short ID, or URL) to full UUID.
|
|
597
|
+
|
|
598
|
+
Args:
|
|
599
|
+
----
|
|
600
|
+
project_identifier: Project slug, name, short ID, or URL
|
|
601
|
+
|
|
602
|
+
Returns:
|
|
603
|
+
-------
|
|
604
|
+
Full Linear project UUID, or None if not found
|
|
605
|
+
|
|
606
|
+
Raises:
|
|
607
|
+
------
|
|
608
|
+
ValueError: If project lookup fails
|
|
609
|
+
|
|
610
|
+
Examples:
|
|
611
|
+
--------
|
|
612
|
+
- "crm-smart-monitoring-system" (slug)
|
|
613
|
+
- "CRM Smart Monitoring System" (name)
|
|
614
|
+
- "f59a41a96c52" (short ID from URL)
|
|
615
|
+
- "https://linear.app/travel-bta/project/crm-smart-monitoring-system-f59a41a96c52/overview" (full URL)
|
|
616
|
+
|
|
617
|
+
"""
|
|
618
|
+
if not project_identifier:
|
|
619
|
+
return None
|
|
322
620
|
|
|
621
|
+
# Use tested URL parser to normalize the identifier
|
|
622
|
+
# This correctly extracts project IDs from URLs and handles:
|
|
623
|
+
# - Full URLs: https://linear.app/team/project/slug-id/overview
|
|
624
|
+
# - Slug-ID format: slug-id
|
|
625
|
+
# - Plain identifiers: id
|
|
323
626
|
try:
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
{"input": issue_input}
|
|
627
|
+
project_identifier = normalize_project_id(
|
|
628
|
+
project_identifier, adapter_type="linear"
|
|
327
629
|
)
|
|
630
|
+
except URLParserError as e:
|
|
631
|
+
logging.getLogger(__name__).warning(
|
|
632
|
+
f"Failed to parse project identifier: {e}"
|
|
633
|
+
)
|
|
634
|
+
# Continue with original identifier - may still work if it's a name
|
|
328
635
|
|
|
329
|
-
|
|
330
|
-
|
|
636
|
+
# If it looks like a full UUID already (exactly 36 chars with exactly 4 dashes), return it
|
|
637
|
+
# UUID format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
|
|
638
|
+
if len(project_identifier) == 36 and project_identifier.count("-") == 4:
|
|
639
|
+
return project_identifier
|
|
331
640
|
|
|
332
|
-
|
|
333
|
-
|
|
641
|
+
# OPTIMIZATION: Try direct query first if it looks like a UUID, slugId, or short ID
|
|
642
|
+
# This is more efficient than listing all projects
|
|
643
|
+
should_try_direct_query = False
|
|
644
|
+
|
|
645
|
+
# Check if it looks like a short ID (exactly 12 hex characters)
|
|
646
|
+
if len(project_identifier) == 12 and all(
|
|
647
|
+
c in "0123456789abcdefABCDEF" for c in project_identifier
|
|
648
|
+
):
|
|
649
|
+
should_try_direct_query = True
|
|
650
|
+
|
|
651
|
+
# Check if it looks like a slugId format (contains dashes and ends with 12 hex chars)
|
|
652
|
+
if "-" in project_identifier:
|
|
653
|
+
parts = project_identifier.rsplit("-", 1)
|
|
654
|
+
if len(parts) > 1:
|
|
655
|
+
potential_short_id = parts[1]
|
|
656
|
+
if len(potential_short_id) == 12 and all(
|
|
657
|
+
c in "0123456789abcdefABCDEF" for c in potential_short_id
|
|
658
|
+
):
|
|
659
|
+
should_try_direct_query = True
|
|
660
|
+
|
|
661
|
+
# Try direct query first if identifier format suggests it might work
|
|
662
|
+
if should_try_direct_query:
|
|
663
|
+
try:
|
|
664
|
+
project = await self.get_project(project_identifier)
|
|
665
|
+
if project:
|
|
666
|
+
return project["id"]
|
|
667
|
+
except Exception as e:
|
|
668
|
+
# Direct query failed - fall through to list-based search
|
|
669
|
+
logging.getLogger(__name__).debug(
|
|
670
|
+
f"Direct project query failed for '{project_identifier}': {e}. "
|
|
671
|
+
f"Falling back to listing all projects."
|
|
672
|
+
)
|
|
673
|
+
|
|
674
|
+
# FALLBACK: Query all projects with pagination support
|
|
675
|
+
# This is less efficient but handles name-based lookups and edge cases
|
|
676
|
+
query = """
|
|
677
|
+
query GetProjects($first: Int!, $after: String) {
|
|
678
|
+
projects(first: $first, after: $after) {
|
|
679
|
+
nodes {
|
|
680
|
+
id
|
|
681
|
+
name
|
|
682
|
+
slugId
|
|
683
|
+
}
|
|
684
|
+
pageInfo {
|
|
685
|
+
hasNextPage
|
|
686
|
+
endCursor
|
|
687
|
+
}
|
|
688
|
+
}
|
|
689
|
+
}
|
|
690
|
+
"""
|
|
691
|
+
|
|
692
|
+
try:
|
|
693
|
+
# Fetch all projects across multiple pages
|
|
694
|
+
all_projects = []
|
|
695
|
+
has_next_page = True
|
|
696
|
+
after_cursor = None
|
|
697
|
+
|
|
698
|
+
while has_next_page:
|
|
699
|
+
variables = {"first": 100}
|
|
700
|
+
if after_cursor:
|
|
701
|
+
variables["after"] = after_cursor
|
|
702
|
+
|
|
703
|
+
result = await self.client.execute_query(query, variables)
|
|
704
|
+
projects_data = result.get("projects", {})
|
|
705
|
+
page_projects = projects_data.get("nodes", [])
|
|
706
|
+
page_info = projects_data.get("pageInfo", {})
|
|
707
|
+
|
|
708
|
+
all_projects.extend(page_projects)
|
|
709
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
710
|
+
after_cursor = page_info.get("endCursor")
|
|
711
|
+
|
|
712
|
+
# Search for match by slug, slugId, name (case-insensitive)
|
|
713
|
+
project_lower = project_identifier.lower()
|
|
714
|
+
for project in all_projects:
|
|
715
|
+
# Check if identifier matches slug pattern (extracted from slugId)
|
|
716
|
+
slug_id = project.get("slugId", "")
|
|
717
|
+
if slug_id:
|
|
718
|
+
# slugId format: "crm-smart-monitoring-system-f59a41a96c52"
|
|
719
|
+
# Linear short IDs are always exactly 12 hexadecimal characters
|
|
720
|
+
# Extract both the slug part and short ID
|
|
721
|
+
if "-" in slug_id:
|
|
722
|
+
parts = slug_id.rsplit("-", 1)
|
|
723
|
+
potential_short_id = parts[1] if len(parts) > 1 else ""
|
|
724
|
+
|
|
725
|
+
# Validate it's exactly 12 hex characters
|
|
726
|
+
if len(potential_short_id) == 12 and all(
|
|
727
|
+
c in "0123456789abcdefABCDEF" for c in potential_short_id
|
|
728
|
+
):
|
|
729
|
+
slug_part = parts[0]
|
|
730
|
+
short_id = potential_short_id
|
|
731
|
+
else:
|
|
732
|
+
# Fallback: treat entire slugId as slug if last part isn't valid
|
|
733
|
+
slug_part = slug_id
|
|
734
|
+
short_id = ""
|
|
735
|
+
|
|
736
|
+
# Match full slugId, slug part, or short ID
|
|
737
|
+
if (
|
|
738
|
+
slug_id.lower() == project_lower
|
|
739
|
+
or slug_part.lower() == project_lower
|
|
740
|
+
or short_id.lower() == project_lower
|
|
741
|
+
):
|
|
742
|
+
project_uuid = project["id"]
|
|
743
|
+
# Validate UUID format before returning
|
|
744
|
+
if not self._validate_linear_uuid(
|
|
745
|
+
project_uuid, "projectId"
|
|
746
|
+
):
|
|
747
|
+
logging.getLogger(__name__).error(
|
|
748
|
+
f"Project '{project_identifier}' resolved to invalid UUID format: '{project_uuid}'. "
|
|
749
|
+
f"Expected 36-character UUID (xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx). "
|
|
750
|
+
f"This indicates a data inconsistency in Linear API response."
|
|
751
|
+
)
|
|
752
|
+
return None
|
|
753
|
+
return project_uuid
|
|
754
|
+
|
|
755
|
+
# Also check exact name match (case-insensitive)
|
|
756
|
+
if project["name"].lower() == project_lower:
|
|
757
|
+
project_uuid = project["id"]
|
|
758
|
+
# Validate UUID format before returning
|
|
759
|
+
if not self._validate_linear_uuid(project_uuid, "projectId"):
|
|
760
|
+
logging.getLogger(__name__).error(
|
|
761
|
+
f"Project '{project_identifier}' resolved to invalid UUID format: '{project_uuid}'. "
|
|
762
|
+
f"Expected 36-character UUID (xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx). "
|
|
763
|
+
f"This indicates a data inconsistency in Linear API response."
|
|
764
|
+
)
|
|
765
|
+
return None
|
|
766
|
+
return project_uuid
|
|
767
|
+
|
|
768
|
+
# No match found
|
|
769
|
+
return None
|
|
334
770
|
|
|
335
771
|
except Exception as e:
|
|
336
|
-
raise ValueError(
|
|
772
|
+
raise ValueError(
|
|
773
|
+
f"Failed to resolve project '{project_identifier}': {e}"
|
|
774
|
+
) from e
|
|
337
775
|
|
|
338
|
-
async def
|
|
339
|
-
|
|
776
|
+
async def _validate_project_team_association(
|
|
777
|
+
self, project_id: str, team_id: str
|
|
778
|
+
) -> tuple[bool, list[str]]:
|
|
779
|
+
"""Check if team is associated with project.
|
|
340
780
|
|
|
341
781
|
Args:
|
|
342
|
-
|
|
782
|
+
----
|
|
783
|
+
project_id: Linear project UUID
|
|
784
|
+
team_id: Linear team UUID
|
|
343
785
|
|
|
344
786
|
Returns:
|
|
345
|
-
|
|
787
|
+
-------
|
|
788
|
+
Tuple of (is_associated, list_of_project_team_ids)
|
|
789
|
+
|
|
346
790
|
"""
|
|
347
|
-
|
|
791
|
+
project = await self.get_project(project_id)
|
|
792
|
+
if not project:
|
|
793
|
+
return False, []
|
|
348
794
|
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
"
|
|
352
|
-
|
|
795
|
+
# Extract team IDs from project's teams
|
|
796
|
+
project_team_ids = [
|
|
797
|
+
team["id"] for team in project.get("teams", {}).get("nodes", [])
|
|
798
|
+
]
|
|
353
799
|
|
|
354
|
-
|
|
355
|
-
project_input["description"] = epic.description
|
|
800
|
+
return team_id in project_team_ids, project_team_ids
|
|
356
801
|
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
802
|
+
async def _ensure_team_in_project(self, project_id: str, team_id: str) -> bool:
|
|
803
|
+
"""Add team to project if not already associated.
|
|
804
|
+
|
|
805
|
+
Args:
|
|
806
|
+
----
|
|
807
|
+
project_id: Linear project UUID
|
|
808
|
+
team_id: Linear team UUID to add
|
|
809
|
+
|
|
810
|
+
Returns:
|
|
811
|
+
-------
|
|
812
|
+
True if successful, False otherwise
|
|
813
|
+
|
|
814
|
+
"""
|
|
815
|
+
# First check current association
|
|
816
|
+
is_associated, existing_team_ids = (
|
|
817
|
+
await self._validate_project_team_association(project_id, team_id)
|
|
818
|
+
)
|
|
819
|
+
|
|
820
|
+
if is_associated:
|
|
821
|
+
return True # Already associated, nothing to do
|
|
822
|
+
|
|
823
|
+
# Add team to project by updating project's teamIds
|
|
824
|
+
update_query = """
|
|
825
|
+
mutation UpdateProject($id: String!, $input: ProjectUpdateInput!) {
|
|
826
|
+
projectUpdate(id: $id, input: $input) {
|
|
361
827
|
success
|
|
362
828
|
project {
|
|
363
829
|
id
|
|
364
|
-
name
|
|
365
|
-
description
|
|
366
|
-
state
|
|
367
|
-
createdAt
|
|
368
|
-
updatedAt
|
|
369
|
-
url
|
|
370
|
-
icon
|
|
371
|
-
color
|
|
372
|
-
targetDate
|
|
373
|
-
startedAt
|
|
374
|
-
completedAt
|
|
375
830
|
teams {
|
|
376
831
|
nodes {
|
|
377
832
|
id
|
|
378
833
|
name
|
|
379
|
-
key
|
|
380
|
-
description
|
|
381
834
|
}
|
|
382
835
|
}
|
|
383
836
|
}
|
|
@@ -385,156 +838,3037 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
385
838
|
}
|
|
386
839
|
"""
|
|
387
840
|
|
|
841
|
+
# Include existing teams + new team
|
|
842
|
+
all_team_ids = existing_team_ids + [team_id]
|
|
843
|
+
|
|
388
844
|
try:
|
|
389
845
|
result = await self.client.execute_mutation(
|
|
390
|
-
|
|
391
|
-
{"input": project_input}
|
|
846
|
+
update_query, {"id": project_id, "input": {"teamIds": all_team_ids}}
|
|
392
847
|
)
|
|
848
|
+
success = result.get("projectUpdate", {}).get("success", False)
|
|
393
849
|
|
|
394
|
-
if
|
|
395
|
-
|
|
850
|
+
if success:
|
|
851
|
+
logging.getLogger(__name__).info(
|
|
852
|
+
f"Successfully added team {team_id} to project {project_id}"
|
|
853
|
+
)
|
|
854
|
+
else:
|
|
855
|
+
logging.getLogger(__name__).warning(
|
|
856
|
+
f"Failed to add team {team_id} to project {project_id}"
|
|
857
|
+
)
|
|
396
858
|
|
|
397
|
-
|
|
859
|
+
return success
|
|
860
|
+
except Exception as e:
|
|
861
|
+
logging.getLogger(__name__).error(
|
|
862
|
+
f"Error adding team {team_id} to project {project_id}: {e}"
|
|
863
|
+
)
|
|
864
|
+
return False
|
|
865
|
+
|
|
866
|
+
async def _get_project_issues(
|
|
867
|
+
self, project_id: str, limit: int = 100
|
|
868
|
+
) -> list[Task]:
|
|
869
|
+
"""Fetch all issues belonging to a Linear project.
|
|
870
|
+
|
|
871
|
+
Uses existing build_issue_filter() and LIST_ISSUES_QUERY infrastructure
|
|
872
|
+
to fetch issues filtered by project_id.
|
|
873
|
+
|
|
874
|
+
Args:
|
|
875
|
+
----
|
|
876
|
+
project_id: Project UUID, slugId, or short ID
|
|
877
|
+
limit: Maximum issues to return (default 100, max 250)
|
|
878
|
+
|
|
879
|
+
Returns:
|
|
880
|
+
-------
|
|
881
|
+
List of Task objects representing project's issues
|
|
882
|
+
|
|
883
|
+
Raises:
|
|
884
|
+
------
|
|
885
|
+
ValueError: If credentials invalid or query fails
|
|
886
|
+
|
|
887
|
+
"""
|
|
888
|
+
logger = logging.getLogger(__name__)
|
|
889
|
+
|
|
890
|
+
# Build filter for issues belonging to this project
|
|
891
|
+
issue_filter = build_issue_filter(project_id=project_id)
|
|
892
|
+
|
|
893
|
+
variables = {
|
|
894
|
+
"filter": issue_filter,
|
|
895
|
+
"first": min(limit, 250), # Linear API max per page
|
|
896
|
+
}
|
|
897
|
+
|
|
898
|
+
try:
|
|
899
|
+
result = await self.client.execute_query(LIST_ISSUES_QUERY, variables)
|
|
900
|
+
issues = result.get("issues", {}).get("nodes", [])
|
|
901
|
+
|
|
902
|
+
# Map Linear issues to Task objects
|
|
903
|
+
return [map_linear_issue_to_task(issue) for issue in issues]
|
|
904
|
+
|
|
905
|
+
except Exception as e:
|
|
906
|
+
# Log but don't fail - return empty list if issues can't be fetched
|
|
907
|
+
logger.warning(f"Failed to fetch project issues for {project_id}: {e}")
|
|
908
|
+
return []
|
|
909
|
+
|
|
910
|
+
async def _resolve_issue_id(self, issue_identifier: str) -> str | None:
|
|
911
|
+
"""Resolve issue identifier (like "ENG-842") to full UUID.
|
|
912
|
+
|
|
913
|
+
Args:
|
|
914
|
+
----
|
|
915
|
+
issue_identifier: Issue identifier (e.g., "ENG-842") or UUID
|
|
916
|
+
|
|
917
|
+
Returns:
|
|
918
|
+
-------
|
|
919
|
+
Full Linear issue UUID, or None if not found
|
|
920
|
+
|
|
921
|
+
Raises:
|
|
922
|
+
------
|
|
923
|
+
ValueError: If issue lookup fails
|
|
924
|
+
|
|
925
|
+
Examples:
|
|
926
|
+
--------
|
|
927
|
+
- "ENG-842" (issue identifier)
|
|
928
|
+
- "BTA-123" (issue identifier)
|
|
929
|
+
- "a1b2c3d4-e5f6-7890-abcd-ef1234567890" (already a UUID)
|
|
930
|
+
|
|
931
|
+
"""
|
|
932
|
+
if not issue_identifier:
|
|
933
|
+
return None
|
|
934
|
+
|
|
935
|
+
# If it looks like a full UUID already (exactly 36 chars with exactly 4 dashes), return it
|
|
936
|
+
# UUID format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
|
|
937
|
+
if len(issue_identifier) == 36 and issue_identifier.count("-") == 4:
|
|
938
|
+
return issue_identifier
|
|
939
|
+
|
|
940
|
+
# Query issue by identifier to get its UUID
|
|
941
|
+
query = """
|
|
942
|
+
query GetIssueId($identifier: String!) {
|
|
943
|
+
issue(id: $identifier) {
|
|
944
|
+
id
|
|
945
|
+
}
|
|
946
|
+
}
|
|
947
|
+
"""
|
|
948
|
+
|
|
949
|
+
try:
|
|
950
|
+
result = await self.client.execute_query(
|
|
951
|
+
query, {"identifier": issue_identifier}
|
|
952
|
+
)
|
|
953
|
+
|
|
954
|
+
if result.get("issue"):
|
|
955
|
+
return result["issue"]["id"]
|
|
956
|
+
|
|
957
|
+
# No match found
|
|
958
|
+
return None
|
|
959
|
+
|
|
960
|
+
except Exception as e:
|
|
961
|
+
raise ValueError(
|
|
962
|
+
f"Failed to resolve issue '{issue_identifier}': {e}"
|
|
963
|
+
) from e
|
|
964
|
+
|
|
965
|
+
async def _load_workflow_states(self, team_id: str) -> None:
|
|
966
|
+
"""Load and cache workflow states for the team with semantic name matching.
|
|
967
|
+
|
|
968
|
+
Implements two-level mapping strategy to handle Linear workflows with
|
|
969
|
+
multiple states of the same type (e.g., "Todo", "Backlog", "Ready" all
|
|
970
|
+
being "unstarted"):
|
|
971
|
+
|
|
972
|
+
1. Semantic name matching: Match state names to universal states using
|
|
973
|
+
predefined mappings (flexible, respects custom workflows)
|
|
974
|
+
2. State type fallback: Use first state of matching type for unmapped
|
|
975
|
+
universal states (backward compatible)
|
|
976
|
+
|
|
977
|
+
This fixes issue 1M-552 where transitions to READY/TESTED/WAITING states
|
|
978
|
+
failed with "Discrepancy between issue state and state type" errors.
|
|
979
|
+
|
|
980
|
+
Args:
|
|
981
|
+
----
|
|
982
|
+
team_id: Linear team ID
|
|
983
|
+
|
|
984
|
+
"""
|
|
985
|
+
logger = logging.getLogger(__name__)
|
|
986
|
+
try:
|
|
987
|
+
result = await self.client.execute_query(
|
|
988
|
+
WORKFLOW_STATES_QUERY, {"teamId": team_id}
|
|
989
|
+
)
|
|
990
|
+
|
|
991
|
+
states = result["team"]["states"]["nodes"]
|
|
992
|
+
|
|
993
|
+
# Build auxiliary mappings for efficient lookup
|
|
994
|
+
state_by_name: dict[str, tuple[str, str]] = {} # name → (state_id, type)
|
|
995
|
+
state_by_type: dict[str, str] = {} # type → state_id (first occurrence)
|
|
996
|
+
|
|
997
|
+
# Sort states by position to ensure consistent selection
|
|
998
|
+
sorted_states = sorted(states, key=lambda s: s["position"])
|
|
999
|
+
|
|
1000
|
+
for state in sorted_states:
|
|
1001
|
+
state_id = state["id"]
|
|
1002
|
+
state_name = state["name"].lower()
|
|
1003
|
+
state_type = state["type"].lower()
|
|
1004
|
+
|
|
1005
|
+
# Store by name for semantic matching (first occurrence wins)
|
|
1006
|
+
if state_name not in state_by_name:
|
|
1007
|
+
state_by_name[state_name] = (state_id, state_type)
|
|
1008
|
+
|
|
1009
|
+
# Store by type for fallback (keep first occurrence by position)
|
|
1010
|
+
if state_type not in state_by_type:
|
|
1011
|
+
state_by_type[state_type] = state_id
|
|
1012
|
+
|
|
1013
|
+
# Build final state map with semantic matching
|
|
1014
|
+
workflow_states = {}
|
|
1015
|
+
|
|
1016
|
+
for universal_state in TicketState:
|
|
1017
|
+
state_id = None
|
|
1018
|
+
matched_strategy = None
|
|
1019
|
+
|
|
1020
|
+
# Strategy 1: Try semantic name matching
|
|
1021
|
+
if universal_state in LinearStateMapping.SEMANTIC_NAMES:
|
|
1022
|
+
for semantic_name in LinearStateMapping.SEMANTIC_NAMES[
|
|
1023
|
+
universal_state
|
|
1024
|
+
]:
|
|
1025
|
+
if semantic_name in state_by_name:
|
|
1026
|
+
state_id = state_by_name[semantic_name][0]
|
|
1027
|
+
matched_strategy = f"name:{semantic_name}"
|
|
1028
|
+
break
|
|
1029
|
+
|
|
1030
|
+
# Strategy 2: Fallback to type mapping
|
|
1031
|
+
if not state_id:
|
|
1032
|
+
linear_type = LinearStateMapping.TO_LINEAR.get(universal_state)
|
|
1033
|
+
if linear_type:
|
|
1034
|
+
state_id = state_by_type.get(linear_type)
|
|
1035
|
+
if state_id:
|
|
1036
|
+
matched_strategy = f"type:{linear_type}"
|
|
1037
|
+
|
|
1038
|
+
if state_id:
|
|
1039
|
+
workflow_states[universal_state.value] = state_id
|
|
1040
|
+
logger.debug(
|
|
1041
|
+
f"Mapped {universal_state.value} → {state_id} "
|
|
1042
|
+
f"(strategy: {matched_strategy})"
|
|
1043
|
+
)
|
|
1044
|
+
|
|
1045
|
+
self._workflow_states = workflow_states
|
|
1046
|
+
|
|
1047
|
+
# Log warning if multiple states of same type detected
|
|
1048
|
+
type_counts: dict[str, int] = {}
|
|
1049
|
+
for state in states:
|
|
1050
|
+
state_type = state["type"].lower()
|
|
1051
|
+
type_counts[state_type] = type_counts.get(state_type, 0) + 1
|
|
1052
|
+
|
|
1053
|
+
multi_state_types = {
|
|
1054
|
+
type_: count for type_, count in type_counts.items() if count > 1
|
|
1055
|
+
}
|
|
1056
|
+
if multi_state_types:
|
|
1057
|
+
logger.info(
|
|
1058
|
+
f"Team {team_id} has multiple states per type: {multi_state_types}. "
|
|
1059
|
+
"Using semantic name matching for state resolution."
|
|
1060
|
+
)
|
|
1061
|
+
|
|
1062
|
+
except Exception as e:
|
|
1063
|
+
raise ValueError(f"Failed to load workflow states: {e}") from e
|
|
1064
|
+
|
|
1065
|
+
async def _load_team_labels(self, team_id: str) -> None:
|
|
1066
|
+
"""Load and cache labels for the team with retry logic and pagination.
|
|
1067
|
+
|
|
1068
|
+
Fetches ALL labels for the team using cursor-based pagination.
|
|
1069
|
+
Handles teams with >250 labels (Linear's default page size).
|
|
1070
|
+
|
|
1071
|
+
Args:
|
|
1072
|
+
----
|
|
1073
|
+
team_id: Linear team ID
|
|
1074
|
+
|
|
1075
|
+
"""
|
|
1076
|
+
logger = logging.getLogger(__name__)
|
|
1077
|
+
|
|
1078
|
+
query = """
|
|
1079
|
+
query GetTeamLabels($teamId: String!, $first: Int!, $after: String) {
|
|
1080
|
+
team(id: $teamId) {
|
|
1081
|
+
labels(first: $first, after: $after) {
|
|
1082
|
+
nodes {
|
|
1083
|
+
id
|
|
1084
|
+
name
|
|
1085
|
+
color
|
|
1086
|
+
description
|
|
1087
|
+
}
|
|
1088
|
+
pageInfo {
|
|
1089
|
+
hasNextPage
|
|
1090
|
+
endCursor
|
|
1091
|
+
}
|
|
1092
|
+
}
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
"""
|
|
1096
|
+
|
|
1097
|
+
max_retries = 3
|
|
1098
|
+
for attempt in range(max_retries):
|
|
1099
|
+
try:
|
|
1100
|
+
# Fetch all labels with pagination
|
|
1101
|
+
all_labels: list[dict] = []
|
|
1102
|
+
has_next_page = True
|
|
1103
|
+
after_cursor = None
|
|
1104
|
+
page_count = 0
|
|
1105
|
+
max_pages = 10 # Safety limit: 10 pages * 250 labels = 2500 labels max
|
|
1106
|
+
|
|
1107
|
+
while has_next_page and page_count < max_pages:
|
|
1108
|
+
page_count += 1
|
|
1109
|
+
variables = {"teamId": team_id, "first": 250}
|
|
1110
|
+
if after_cursor:
|
|
1111
|
+
variables["after"] = after_cursor
|
|
1112
|
+
|
|
1113
|
+
result = await self.client.execute_query(query, variables)
|
|
1114
|
+
labels_data = result.get("team", {}).get("labels", {})
|
|
1115
|
+
page_labels = labels_data.get("nodes", [])
|
|
1116
|
+
page_info = labels_data.get("pageInfo", {})
|
|
1117
|
+
|
|
1118
|
+
all_labels.extend(page_labels)
|
|
1119
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
1120
|
+
after_cursor = page_info.get("endCursor")
|
|
1121
|
+
|
|
1122
|
+
if page_count >= max_pages and has_next_page:
|
|
1123
|
+
logger.warning(
|
|
1124
|
+
f"Reached max page limit ({max_pages}) for team {team_id}. "
|
|
1125
|
+
f"Loaded {len(all_labels)} labels, but more may exist."
|
|
1126
|
+
)
|
|
1127
|
+
|
|
1128
|
+
# Store in TTL-based cache
|
|
1129
|
+
cache_key = f"linear_labels:{team_id}"
|
|
1130
|
+
await self._labels_cache.set(cache_key, all_labels)
|
|
1131
|
+
logger.info(
|
|
1132
|
+
f"Loaded {len(all_labels)} labels for team {team_id} ({page_count} page(s))"
|
|
1133
|
+
)
|
|
1134
|
+
return # Success
|
|
1135
|
+
|
|
1136
|
+
except Exception as e:
|
|
1137
|
+
if attempt < max_retries - 1:
|
|
1138
|
+
wait_time = 2**attempt
|
|
1139
|
+
logger.warning(
|
|
1140
|
+
f"Failed to load labels (attempt {attempt + 1}/{max_retries}): {e}. "
|
|
1141
|
+
f"Retrying in {wait_time}s..."
|
|
1142
|
+
)
|
|
1143
|
+
await asyncio.sleep(wait_time)
|
|
1144
|
+
else:
|
|
1145
|
+
logger.error(
|
|
1146
|
+
f"Failed to load team labels after {max_retries} attempts: {e}",
|
|
1147
|
+
exc_info=True,
|
|
1148
|
+
)
|
|
1149
|
+
# Store empty list in cache on failure
|
|
1150
|
+
cache_key = f"linear_labels:{team_id}"
|
|
1151
|
+
await self._labels_cache.set(cache_key, [])
|
|
1152
|
+
|
|
1153
|
+
async def _find_label_by_name(
|
|
1154
|
+
self, name: str, team_id: str, max_retries: int = 3
|
|
1155
|
+
) -> dict | None:
|
|
1156
|
+
"""Find a label by name using Linear API (server-side check) with retry logic and pagination.
|
|
1157
|
+
|
|
1158
|
+
Handles cache staleness by checking Linear's server-side state.
|
|
1159
|
+
This method is used when cache lookup misses to prevent duplicate
|
|
1160
|
+
label creation attempts.
|
|
1161
|
+
|
|
1162
|
+
Implements retry logic with exponential backoff to handle transient
|
|
1163
|
+
network failures and distinguish between "label not found" (None) and
|
|
1164
|
+
"check failed" (exception).
|
|
1165
|
+
|
|
1166
|
+
Uses cursor-based pagination with early exit optimization to handle
|
|
1167
|
+
teams with >250 labels efficiently. Stops searching as soon as the
|
|
1168
|
+
label is found.
|
|
1169
|
+
|
|
1170
|
+
Args:
|
|
1171
|
+
----
|
|
1172
|
+
name: Label name to search for (case-insensitive)
|
|
1173
|
+
team_id: Linear team ID
|
|
1174
|
+
max_retries: Maximum retry attempts for transient failures (default: 3)
|
|
1175
|
+
|
|
1176
|
+
Returns:
|
|
1177
|
+
-------
|
|
1178
|
+
dict: Label data if found (with id, name, color, description)
|
|
1179
|
+
None: Label definitively doesn't exist (checked successfully)
|
|
1180
|
+
|
|
1181
|
+
Raises:
|
|
1182
|
+
------
|
|
1183
|
+
Exception: Unable to check label existence after retries exhausted
|
|
1184
|
+
(network/API failure). Caller must handle to prevent
|
|
1185
|
+
duplicate label creation.
|
|
1186
|
+
|
|
1187
|
+
Related:
|
|
1188
|
+
-------
|
|
1189
|
+
1M-443: Fix duplicate label error when setting existing labels
|
|
1190
|
+
1M-443 hotfix: Add retry logic to prevent ambiguous error handling
|
|
1191
|
+
|
|
1192
|
+
"""
|
|
1193
|
+
logger = logging.getLogger(__name__)
|
|
1194
|
+
|
|
1195
|
+
query = """
|
|
1196
|
+
query GetTeamLabels($teamId: String!, $first: Int!, $after: String) {
|
|
1197
|
+
team(id: $teamId) {
|
|
1198
|
+
labels(first: $first, after: $after) {
|
|
1199
|
+
nodes {
|
|
1200
|
+
id
|
|
1201
|
+
name
|
|
1202
|
+
color
|
|
1203
|
+
description
|
|
1204
|
+
}
|
|
1205
|
+
pageInfo {
|
|
1206
|
+
hasNextPage
|
|
1207
|
+
endCursor
|
|
1208
|
+
}
|
|
1209
|
+
}
|
|
1210
|
+
}
|
|
1211
|
+
}
|
|
1212
|
+
"""
|
|
1213
|
+
|
|
1214
|
+
for attempt in range(max_retries):
|
|
1215
|
+
try:
|
|
1216
|
+
# Search with pagination and early exit
|
|
1217
|
+
name_lower = name.lower()
|
|
1218
|
+
has_next_page = True
|
|
1219
|
+
after_cursor = None
|
|
1220
|
+
page_count = 0
|
|
1221
|
+
max_pages = 10 # Safety limit: 10 pages * 250 labels = 2500 labels max
|
|
1222
|
+
total_checked = 0
|
|
1223
|
+
|
|
1224
|
+
while has_next_page and page_count < max_pages:
|
|
1225
|
+
page_count += 1
|
|
1226
|
+
variables = {"teamId": team_id, "first": 250}
|
|
1227
|
+
if after_cursor:
|
|
1228
|
+
variables["after"] = after_cursor
|
|
1229
|
+
|
|
1230
|
+
result = await self.client.execute_query(query, variables)
|
|
1231
|
+
labels_data = result.get("team", {}).get("labels", {})
|
|
1232
|
+
page_labels = labels_data.get("nodes", [])
|
|
1233
|
+
page_info = labels_data.get("pageInfo", {})
|
|
1234
|
+
|
|
1235
|
+
total_checked += len(page_labels)
|
|
1236
|
+
|
|
1237
|
+
# Case-insensitive search in current page
|
|
1238
|
+
for label in page_labels:
|
|
1239
|
+
if label["name"].lower() == name_lower:
|
|
1240
|
+
logger.debug(
|
|
1241
|
+
f"Found label '{name}' via server-side search "
|
|
1242
|
+
f"(ID: {label['id']}, checked {total_checked} labels)"
|
|
1243
|
+
)
|
|
1244
|
+
return label
|
|
1245
|
+
|
|
1246
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
1247
|
+
after_cursor = page_info.get("endCursor")
|
|
1248
|
+
|
|
1249
|
+
if page_count >= max_pages and has_next_page:
|
|
1250
|
+
logger.warning(
|
|
1251
|
+
f"Reached max page limit ({max_pages}) searching for label '{name}'. "
|
|
1252
|
+
f"Checked {total_checked} labels, but more exist."
|
|
1253
|
+
)
|
|
1254
|
+
|
|
1255
|
+
# Label definitively doesn't exist (successful check)
|
|
1256
|
+
logger.debug(f"Label '{name}' not found in {total_checked} team labels")
|
|
1257
|
+
return None
|
|
1258
|
+
|
|
1259
|
+
except Exception as e:
|
|
1260
|
+
if attempt < max_retries - 1:
|
|
1261
|
+
# Transient failure, retry with exponential backoff
|
|
1262
|
+
wait_time = 2**attempt
|
|
1263
|
+
await asyncio.sleep(wait_time)
|
|
1264
|
+
logger.debug(
|
|
1265
|
+
f"Retry {attempt + 1}/{max_retries} for label '{name}' search: {e}"
|
|
1266
|
+
)
|
|
1267
|
+
continue
|
|
1268
|
+
else:
|
|
1269
|
+
# All retries exhausted, propagate exception
|
|
1270
|
+
# CRITICAL: Caller must handle to prevent duplicate creation
|
|
1271
|
+
logger.error(
|
|
1272
|
+
f"Failed to check label '{name}' after {max_retries} attempts: {e}"
|
|
1273
|
+
)
|
|
1274
|
+
raise
|
|
1275
|
+
|
|
1276
|
+
# This should never be reached (all paths return/raise in loop)
|
|
1277
|
+
return None
|
|
1278
|
+
|
|
1279
|
+
async def _create_label(
|
|
1280
|
+
self, name: str, team_id: str, color: str = "#0366d6"
|
|
1281
|
+
) -> str:
|
|
1282
|
+
"""Create a new label in Linear.
|
|
1283
|
+
|
|
1284
|
+
Implements race condition recovery: if creation fails due to duplicate,
|
|
1285
|
+
retry lookup from server (Tier 2) to get the existing label ID.
|
|
1286
|
+
|
|
1287
|
+
Related: 1M-398 - Label duplicate error handling
|
|
1288
|
+
|
|
1289
|
+
Args:
|
|
1290
|
+
----
|
|
1291
|
+
name: Label name
|
|
1292
|
+
team_id: Linear team ID
|
|
1293
|
+
color: Label color (hex format, default: blue)
|
|
1294
|
+
|
|
1295
|
+
Returns:
|
|
1296
|
+
-------
|
|
1297
|
+
str: Label ID (either newly created or existing after recovery)
|
|
1298
|
+
|
|
1299
|
+
Raises:
|
|
1300
|
+
------
|
|
1301
|
+
ValueError: If label creation fails and recovery lookup also fails
|
|
1302
|
+
|
|
1303
|
+
"""
|
|
1304
|
+
logger = logging.getLogger(__name__)
|
|
1305
|
+
|
|
1306
|
+
label_input = {
|
|
1307
|
+
"name": name,
|
|
1308
|
+
"teamId": team_id,
|
|
1309
|
+
"color": color,
|
|
1310
|
+
}
|
|
1311
|
+
|
|
1312
|
+
try:
|
|
1313
|
+
result = await self.client.execute_mutation(
|
|
1314
|
+
CREATE_LABEL_MUTATION, {"input": label_input}
|
|
1315
|
+
)
|
|
1316
|
+
|
|
1317
|
+
if not result["issueLabelCreate"]["success"]:
|
|
1318
|
+
raise ValueError(f"Failed to create label '{name}'")
|
|
1319
|
+
|
|
1320
|
+
created_label = result["issueLabelCreate"]["issueLabel"]
|
|
1321
|
+
label_id = created_label["id"]
|
|
1322
|
+
|
|
1323
|
+
# Invalidate cache to force refresh on next access
|
|
1324
|
+
if self._labels_cache is not None:
|
|
1325
|
+
await self._labels_cache.clear()
|
|
1326
|
+
|
|
1327
|
+
logger.info(f"Created new label '{name}' with ID: {label_id}")
|
|
1328
|
+
return label_id
|
|
1329
|
+
|
|
1330
|
+
except Exception as e:
|
|
1331
|
+
"""
|
|
1332
|
+
Race condition recovery: Another process may have created this label
|
|
1333
|
+
between our Tier 2 lookup and creation attempt.
|
|
1334
|
+
|
|
1335
|
+
Graceful recovery:
|
|
1336
|
+
1. Check if error is duplicate label error
|
|
1337
|
+
2. Retry Tier 2 lookup (query server)
|
|
1338
|
+
3. Return existing label ID if found
|
|
1339
|
+
4. Raise error if recovery fails
|
|
1340
|
+
"""
|
|
1341
|
+
error_str = str(e).lower()
|
|
1342
|
+
|
|
1343
|
+
# Check if this is a duplicate label error
|
|
1344
|
+
if "duplicate" in error_str and "label" in error_str:
|
|
1345
|
+
logger.debug(
|
|
1346
|
+
f"Duplicate label detected for '{name}', attempting recovery lookup"
|
|
1347
|
+
)
|
|
1348
|
+
|
|
1349
|
+
# Retry Tier 2 with backoff: API eventual consistency requires delay
|
|
1350
|
+
# Linear API has 100-500ms propagation delay between write and read
|
|
1351
|
+
max_recovery_attempts = 5
|
|
1352
|
+
backoff_delays = [0.1, 0.2, 0.5, 1.0, 1.5] # Total: 3.3s max
|
|
1353
|
+
|
|
1354
|
+
for attempt in range(max_recovery_attempts):
|
|
1355
|
+
try:
|
|
1356
|
+
if attempt > 0:
|
|
1357
|
+
# Wait before retry (skip delay on first attempt)
|
|
1358
|
+
delay = backoff_delays[
|
|
1359
|
+
min(attempt - 1, len(backoff_delays) - 1)
|
|
1360
|
+
]
|
|
1361
|
+
logger.debug(
|
|
1362
|
+
f"Label '{name}' duplicate detected. "
|
|
1363
|
+
f"Retrying retrieval (attempt {attempt + 1}/{max_recovery_attempts}) "
|
|
1364
|
+
f"after {delay}s delay for API propagation..."
|
|
1365
|
+
)
|
|
1366
|
+
await asyncio.sleep(delay)
|
|
1367
|
+
|
|
1368
|
+
# Query server for existing label
|
|
1369
|
+
server_label = await self._find_label_by_name(name, team_id)
|
|
1370
|
+
|
|
1371
|
+
if server_label:
|
|
1372
|
+
label_id = server_label["id"]
|
|
1373
|
+
|
|
1374
|
+
# Invalidate cache to force refresh on next access
|
|
1375
|
+
if self._labels_cache is not None:
|
|
1376
|
+
await self._labels_cache.clear()
|
|
1377
|
+
|
|
1378
|
+
logger.info(
|
|
1379
|
+
f"Successfully recovered existing label '{name}' (ID: {label_id}) "
|
|
1380
|
+
f"after {attempt + 1} attempt(s)"
|
|
1381
|
+
)
|
|
1382
|
+
return label_id
|
|
1383
|
+
|
|
1384
|
+
# Label still not found, log and continue to next retry
|
|
1385
|
+
logger.debug(
|
|
1386
|
+
f"Label '{name}' not found in recovery attempt {attempt + 1}/{max_recovery_attempts}"
|
|
1387
|
+
)
|
|
1388
|
+
|
|
1389
|
+
except Exception as lookup_error:
|
|
1390
|
+
logger.warning(
|
|
1391
|
+
f"Recovery lookup failed on attempt {attempt + 1}/{max_recovery_attempts}: {lookup_error}"
|
|
1392
|
+
)
|
|
1393
|
+
|
|
1394
|
+
# If this is the last attempt, raise with context
|
|
1395
|
+
if attempt == max_recovery_attempts - 1:
|
|
1396
|
+
raise ValueError(
|
|
1397
|
+
f"Failed to recover label '{name}' after {max_recovery_attempts} attempts. "
|
|
1398
|
+
f"Last error: {lookup_error}. This may indicate:\n"
|
|
1399
|
+
f" 1. Network connectivity issues\n"
|
|
1400
|
+
f" 2. API propagation delay >{sum(backoff_delays):.1f}s (very unusual)\n"
|
|
1401
|
+
f" 3. Label exists beyond first 250 labels in team\n"
|
|
1402
|
+
f" 4. Permissions issue preventing label query\n"
|
|
1403
|
+
f"Please retry the operation or check Linear workspace status."
|
|
1404
|
+
) from lookup_error
|
|
1405
|
+
|
|
1406
|
+
# Not the last attempt, continue to next retry
|
|
1407
|
+
continue
|
|
1408
|
+
|
|
1409
|
+
# If we get here, all recovery attempts failed (label never found, no exceptions)
|
|
1410
|
+
raise ValueError(
|
|
1411
|
+
f"Label '{name}' already exists but could not retrieve ID after "
|
|
1412
|
+
f"{max_recovery_attempts} attempts. The label query succeeded but returned no results.\n"
|
|
1413
|
+
f"This may indicate:\n"
|
|
1414
|
+
f" 1. API propagation delay >{sum(backoff_delays):.1f}s (very unusual)\n"
|
|
1415
|
+
f" 2. Label exists beyond first 250 labels in team\n"
|
|
1416
|
+
f" 3. Permissions issue preventing label query\n"
|
|
1417
|
+
f" 4. Team ID mismatch\n"
|
|
1418
|
+
f"Please retry the operation or check Linear workspace permissions."
|
|
1419
|
+
) from e
|
|
1420
|
+
|
|
1421
|
+
# Not a duplicate error - re-raise original exception
|
|
1422
|
+
logger.error(f"Failed to create label '{name}': {e}")
|
|
1423
|
+
raise ValueError(f"Failed to create label '{name}': {e}") from e
|
|
1424
|
+
|
|
1425
|
+
async def _ensure_labels_exist(self, label_names: list[str]) -> list[str]:
|
|
1426
|
+
"""Ensure labels exist, creating them if necessary.
|
|
1427
|
+
|
|
1428
|
+
This method implements a three-tier label resolution flow to prevent
|
|
1429
|
+
duplicate label creation errors:
|
|
1430
|
+
|
|
1431
|
+
1. **Tier 1 (Cache)**: Check local cache (fast, 0 API calls)
|
|
1432
|
+
2. **Tier 2 (Server)**: Query Linear API for label (handles staleness, +1 API call)
|
|
1433
|
+
3. **Tier 3 (Create)**: Create new label only if truly doesn't exist
|
|
1434
|
+
|
|
1435
|
+
The three-tier approach solves cache staleness issues where labels exist
|
|
1436
|
+
in Linear but not in local cache, preventing "label already exists" errors.
|
|
1437
|
+
|
|
1438
|
+
Behavior (1M-396):
|
|
1439
|
+
- Fail-fast: If any label creation fails, the exception is propagated
|
|
1440
|
+
- All-or-nothing: Partial label updates are not allowed
|
|
1441
|
+
- Clear errors: Callers receive actionable error messages
|
|
1442
|
+
|
|
1443
|
+
Performance:
|
|
1444
|
+
- Cached labels: 0 additional API calls (Tier 1 hit)
|
|
1445
|
+
- New labels: +1 API call for existence check (Tier 2) + 1 for creation (Tier 3)
|
|
1446
|
+
- Trade-off: Accepts +1 API call to prevent duplicate errors
|
|
1447
|
+
|
|
1448
|
+
Args:
|
|
1449
|
+
----
|
|
1450
|
+
label_names: List of label names (strings)
|
|
1451
|
+
|
|
1452
|
+
Returns:
|
|
1453
|
+
-------
|
|
1454
|
+
List of Linear label IDs (UUIDs)
|
|
1455
|
+
|
|
1456
|
+
Raises:
|
|
1457
|
+
------
|
|
1458
|
+
ValueError: If any label creation fails
|
|
1459
|
+
|
|
1460
|
+
Related:
|
|
1461
|
+
-------
|
|
1462
|
+
1M-443: Fix duplicate label error when setting existing labels
|
|
1463
|
+
1M-396: Fail-fast label creation behavior
|
|
1464
|
+
|
|
1465
|
+
"""
|
|
1466
|
+
logger = logging.getLogger(__name__)
|
|
1467
|
+
|
|
1468
|
+
if not label_names:
|
|
1469
|
+
return []
|
|
1470
|
+
|
|
1471
|
+
# Get team ID for label operations
|
|
1472
|
+
team_id = await self._ensure_team_id()
|
|
1473
|
+
|
|
1474
|
+
# Validate team_id before loading labels
|
|
1475
|
+
if not team_id:
|
|
1476
|
+
raise ValueError(
|
|
1477
|
+
"Cannot resolve Linear labels without team_id. "
|
|
1478
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
1479
|
+
)
|
|
1480
|
+
|
|
1481
|
+
# Check cache for labels
|
|
1482
|
+
cache_key = f"linear_labels:{team_id}"
|
|
1483
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
1484
|
+
|
|
1485
|
+
# Load labels if not cached
|
|
1486
|
+
if cached_labels is None:
|
|
1487
|
+
await self._load_team_labels(team_id)
|
|
1488
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
1489
|
+
|
|
1490
|
+
if not cached_labels:
|
|
1491
|
+
logger.error(
|
|
1492
|
+
"Label cache is empty after load attempt. Tags will be skipped."
|
|
1493
|
+
)
|
|
1494
|
+
return []
|
|
1495
|
+
|
|
1496
|
+
# Create name -> ID mapping (case-insensitive)
|
|
1497
|
+
label_map = {label["name"].lower(): label["id"] for label in cached_labels}
|
|
1498
|
+
|
|
1499
|
+
logger.debug(f"Available labels in team: {list(label_map.keys())}")
|
|
1500
|
+
|
|
1501
|
+
# Map or create each label
|
|
1502
|
+
label_ids = []
|
|
1503
|
+
for name in label_names:
|
|
1504
|
+
name_lower = name.lower()
|
|
1505
|
+
|
|
1506
|
+
# Tier 1: Check cache (fast path, 0 API calls)
|
|
1507
|
+
if name_lower in label_map:
|
|
1508
|
+
label_id = label_map[name_lower]
|
|
1509
|
+
label_ids.append(label_id)
|
|
1510
|
+
logger.debug(
|
|
1511
|
+
f"[Tier 1] Resolved cached label '{name}' to ID: {label_id}"
|
|
1512
|
+
)
|
|
1513
|
+
else:
|
|
1514
|
+
# Tier 2: Check server for label (handles cache staleness)
|
|
1515
|
+
try:
|
|
1516
|
+
server_label = await self._find_label_by_name(name, team_id)
|
|
1517
|
+
except Exception as e:
|
|
1518
|
+
# Server check failed after retries (1M-443 hotfix)
|
|
1519
|
+
# CRITICAL: Do NOT proceed to creation to prevent duplicates
|
|
1520
|
+
# Re-raise to signal failure to verify label existence
|
|
1521
|
+
logger.error(
|
|
1522
|
+
f"Unable to verify label '{name}' existence. "
|
|
1523
|
+
f"Cannot safely create to avoid duplicates. Error: {e}"
|
|
1524
|
+
)
|
|
1525
|
+
raise ValueError(
|
|
1526
|
+
f"Unable to verify label '{name}' existence. "
|
|
1527
|
+
f"Cannot safely create to avoid duplicates. Error: {e}"
|
|
1528
|
+
) from e
|
|
1529
|
+
|
|
1530
|
+
if server_label:
|
|
1531
|
+
# Label exists on server but not in cache - invalidate cache
|
|
1532
|
+
label_id = server_label["id"]
|
|
1533
|
+
label_ids.append(label_id)
|
|
1534
|
+
label_map[name_lower] = label_id
|
|
1535
|
+
|
|
1536
|
+
# Invalidate cache to force refresh on next access
|
|
1537
|
+
if self._labels_cache is not None:
|
|
1538
|
+
await self._labels_cache.clear()
|
|
1539
|
+
|
|
1540
|
+
logger.info(
|
|
1541
|
+
f"[Tier 2] Found stale label '{name}' on server (ID: {label_id}), "
|
|
1542
|
+
"invalidated cache for refresh"
|
|
1543
|
+
)
|
|
1544
|
+
else:
|
|
1545
|
+
# Tier 3: Label truly doesn't exist - create it
|
|
1546
|
+
# Propagate exceptions for fail-fast behavior (1M-396)
|
|
1547
|
+
new_label_id = await self._create_label(name, team_id)
|
|
1548
|
+
label_ids.append(new_label_id)
|
|
1549
|
+
# Update local map for subsequent labels in same call
|
|
1550
|
+
label_map[name_lower] = new_label_id
|
|
1551
|
+
logger.info(
|
|
1552
|
+
f"[Tier 3] Created new label '{name}' with ID: {new_label_id}"
|
|
1553
|
+
)
|
|
1554
|
+
|
|
1555
|
+
return label_ids
|
|
1556
|
+
|
|
1557
|
+
async def _resolve_label_ids(self, label_names: list[str]) -> list[str]:
|
|
1558
|
+
"""Resolve label names to Linear label IDs, creating labels if needed.
|
|
1559
|
+
|
|
1560
|
+
This method wraps _ensure_labels_exist for backward compatibility.
|
|
1561
|
+
|
|
1562
|
+
Args:
|
|
1563
|
+
----
|
|
1564
|
+
label_names: List of label names
|
|
1565
|
+
|
|
1566
|
+
Returns:
|
|
1567
|
+
-------
|
|
1568
|
+
List of Linear label IDs
|
|
1569
|
+
|
|
1570
|
+
"""
|
|
1571
|
+
return await self._ensure_labels_exist(label_names)
|
|
1572
|
+
|
|
1573
|
+
def _get_state_mapping(self) -> dict[TicketState, str]:
|
|
1574
|
+
"""Get mapping from universal states to Linear workflow state IDs.
|
|
1575
|
+
|
|
1576
|
+
Returns:
|
|
1577
|
+
-------
|
|
1578
|
+
Dictionary mapping TicketState to Linear state ID (UUID)
|
|
1579
|
+
|
|
1580
|
+
"""
|
|
1581
|
+
if not self._workflow_states:
|
|
1582
|
+
# Return type-based mapping if states not loaded
|
|
1583
|
+
return {
|
|
1584
|
+
TicketState.OPEN: "unstarted",
|
|
1585
|
+
TicketState.IN_PROGRESS: "started",
|
|
1586
|
+
TicketState.READY: "unstarted",
|
|
1587
|
+
TicketState.TESTED: "started",
|
|
1588
|
+
TicketState.DONE: "completed",
|
|
1589
|
+
TicketState.CLOSED: "canceled",
|
|
1590
|
+
TicketState.WAITING: "unstarted",
|
|
1591
|
+
TicketState.BLOCKED: "unstarted",
|
|
1592
|
+
}
|
|
1593
|
+
|
|
1594
|
+
# Return ID-based mapping using cached workflow states
|
|
1595
|
+
# _workflow_states is keyed by universal_state.value (e.g., "open")
|
|
1596
|
+
# and contains state UUIDs directly
|
|
1597
|
+
mapping = {}
|
|
1598
|
+
for universal_state in TicketState:
|
|
1599
|
+
state_uuid = self._workflow_states.get(universal_state.value)
|
|
1600
|
+
if state_uuid:
|
|
1601
|
+
mapping[universal_state] = state_uuid
|
|
1602
|
+
else:
|
|
1603
|
+
# Fallback to type name if state not found in cache
|
|
1604
|
+
linear_type = LinearStateMapping.TO_LINEAR.get(universal_state)
|
|
1605
|
+
if linear_type:
|
|
1606
|
+
mapping[universal_state] = linear_type
|
|
1607
|
+
|
|
1608
|
+
return mapping
|
|
1609
|
+
|
|
1610
|
+
async def _get_user_id(self, user_identifier: str) -> str | None:
|
|
1611
|
+
"""Get Linear user ID from email, display name, or user ID.
|
|
1612
|
+
|
|
1613
|
+
Args:
|
|
1614
|
+
----
|
|
1615
|
+
user_identifier: Email, display name, or user ID
|
|
1616
|
+
|
|
1617
|
+
Returns:
|
|
1618
|
+
-------
|
|
1619
|
+
Linear user ID or None if not found
|
|
1620
|
+
|
|
1621
|
+
"""
|
|
1622
|
+
if not user_identifier:
|
|
1623
|
+
return None
|
|
1624
|
+
|
|
1625
|
+
# Try email lookup first (most specific)
|
|
1626
|
+
user = await self.client.get_user_by_email(user_identifier)
|
|
1627
|
+
if user:
|
|
1628
|
+
return user["id"]
|
|
1629
|
+
|
|
1630
|
+
# Try name search (displayName or full name)
|
|
1631
|
+
users = await self.client.get_users_by_name(user_identifier)
|
|
1632
|
+
if users:
|
|
1633
|
+
if len(users) == 1:
|
|
1634
|
+
# Exact match found
|
|
1635
|
+
return users[0]["id"]
|
|
1636
|
+
else:
|
|
1637
|
+
# Multiple matches - try exact match
|
|
1638
|
+
for u in users:
|
|
1639
|
+
if (
|
|
1640
|
+
u.get("displayName", "").lower() == user_identifier.lower()
|
|
1641
|
+
or u.get("name", "").lower() == user_identifier.lower()
|
|
1642
|
+
):
|
|
1643
|
+
return u["id"]
|
|
1644
|
+
|
|
1645
|
+
# No exact match - log ambiguity and return first
|
|
1646
|
+
logging.getLogger(__name__).warning(
|
|
1647
|
+
f"Multiple users match '{user_identifier}': "
|
|
1648
|
+
f"{[u.get('displayName', u.get('name')) for u in users]}. "
|
|
1649
|
+
f"Using first match: {users[0].get('displayName')}"
|
|
1650
|
+
)
|
|
1651
|
+
return users[0]["id"]
|
|
1652
|
+
|
|
1653
|
+
# Assume it's already a user ID
|
|
1654
|
+
return user_identifier
|
|
1655
|
+
|
|
1656
|
+
# CRUD Operations
|
|
1657
|
+
|
|
1658
|
+
async def create(self, ticket: Epic | Task) -> Epic | Task:
|
|
1659
|
+
"""Create a new Linear issue or project with full field support.
|
|
1660
|
+
|
|
1661
|
+
Args:
|
|
1662
|
+
----
|
|
1663
|
+
ticket: Epic or Task to create
|
|
1664
|
+
|
|
1665
|
+
Returns:
|
|
1666
|
+
-------
|
|
1667
|
+
Created ticket with populated ID and metadata
|
|
1668
|
+
|
|
1669
|
+
Raises:
|
|
1670
|
+
------
|
|
1671
|
+
ValueError: If credentials are invalid or creation fails
|
|
1672
|
+
|
|
1673
|
+
"""
|
|
1674
|
+
# Validate credentials before attempting operation
|
|
1675
|
+
is_valid, error_message = self.validate_credentials()
|
|
1676
|
+
if not is_valid:
|
|
1677
|
+
raise ValueError(error_message)
|
|
1678
|
+
|
|
1679
|
+
# Ensure adapter is initialized
|
|
1680
|
+
await self.initialize()
|
|
1681
|
+
|
|
1682
|
+
# Handle Epic creation (Linear Projects)
|
|
1683
|
+
if isinstance(ticket, Epic):
|
|
1684
|
+
return await self._create_epic(ticket)
|
|
1685
|
+
|
|
1686
|
+
# Handle Task creation (Linear Issues)
|
|
1687
|
+
return await self._create_task(ticket)
|
|
1688
|
+
|
|
1689
|
+
async def _create_task(self, task: Task) -> Task:
|
|
1690
|
+
"""Create a Linear issue or sub-issue from a Task.
|
|
1691
|
+
|
|
1692
|
+
Creates a top-level issue when task.parent_issue is not set, or a
|
|
1693
|
+
sub-issue (child of another issue) when task.parent_issue is provided.
|
|
1694
|
+
In Linear terminology:
|
|
1695
|
+
- Issue: Top-level work item (no parent)
|
|
1696
|
+
- Sub-issue: Child work item (has parent issue)
|
|
1697
|
+
|
|
1698
|
+
Args:
|
|
1699
|
+
----
|
|
1700
|
+
task: Task to create
|
|
1701
|
+
|
|
1702
|
+
Returns:
|
|
1703
|
+
-------
|
|
1704
|
+
Created task with Linear metadata
|
|
1705
|
+
|
|
1706
|
+
"""
|
|
1707
|
+
logger = logging.getLogger(__name__)
|
|
1708
|
+
team_id = await self._ensure_team_id()
|
|
1709
|
+
|
|
1710
|
+
# Validate team_id before creating issue
|
|
1711
|
+
if not team_id:
|
|
1712
|
+
raise ValueError(
|
|
1713
|
+
"Cannot create Linear issue without team_id. "
|
|
1714
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
1715
|
+
)
|
|
1716
|
+
|
|
1717
|
+
# Build issue input using mapper
|
|
1718
|
+
issue_input = build_linear_issue_input(task, team_id)
|
|
1719
|
+
|
|
1720
|
+
# Set default state if not provided
|
|
1721
|
+
# Map OPEN to "unstarted" state (typically "To-Do" in Linear)
|
|
1722
|
+
if task.state == TicketState.OPEN and self._workflow_states:
|
|
1723
|
+
state_mapping = self._get_state_mapping()
|
|
1724
|
+
if TicketState.OPEN in state_mapping:
|
|
1725
|
+
issue_input["stateId"] = state_mapping[TicketState.OPEN]
|
|
1726
|
+
|
|
1727
|
+
# Resolve assignee to user ID if provided
|
|
1728
|
+
# Use configured default user if no assignee specified
|
|
1729
|
+
assignee = task.assignee
|
|
1730
|
+
if not assignee and self.user_email:
|
|
1731
|
+
assignee = self.user_email
|
|
1732
|
+
logger.debug(f"Using default assignee from config: {assignee}")
|
|
1733
|
+
|
|
1734
|
+
if assignee:
|
|
1735
|
+
user_id = await self._get_user_id(assignee)
|
|
1736
|
+
if user_id:
|
|
1737
|
+
issue_input["assigneeId"] = user_id
|
|
1738
|
+
|
|
1739
|
+
# Resolve label names to IDs if provided
|
|
1740
|
+
if task.tags:
|
|
1741
|
+
label_ids = await self._resolve_label_ids(task.tags)
|
|
1742
|
+
if label_ids:
|
|
1743
|
+
issue_input["labelIds"] = label_ids
|
|
1744
|
+
else:
|
|
1745
|
+
# Remove labelIds if no labels resolved
|
|
1746
|
+
issue_input.pop("labelIds", None)
|
|
1747
|
+
|
|
1748
|
+
# Resolve project ID if parent_epic is provided (supports slug, name, short ID, or URL)
|
|
1749
|
+
if task.parent_epic:
|
|
1750
|
+
project_id = await self._resolve_project_id(task.parent_epic)
|
|
1751
|
+
if project_id:
|
|
1752
|
+
# Validate team-project association before assigning
|
|
1753
|
+
is_valid, _ = await self._validate_project_team_association(
|
|
1754
|
+
project_id, team_id
|
|
1755
|
+
)
|
|
1756
|
+
|
|
1757
|
+
if not is_valid:
|
|
1758
|
+
# Attempt to add team to project automatically
|
|
1759
|
+
logging.getLogger(__name__).info(
|
|
1760
|
+
f"Team {team_id} not associated with project {project_id}. "
|
|
1761
|
+
f"Attempting to add team to project..."
|
|
1762
|
+
)
|
|
1763
|
+
success = await self._ensure_team_in_project(project_id, team_id)
|
|
1764
|
+
|
|
1765
|
+
if success:
|
|
1766
|
+
issue_input["projectId"] = project_id
|
|
1767
|
+
logging.getLogger(__name__).info(
|
|
1768
|
+
"Successfully associated team with project. "
|
|
1769
|
+
"Issue will be assigned to project."
|
|
1770
|
+
)
|
|
1771
|
+
else:
|
|
1772
|
+
logging.getLogger(__name__).warning(
|
|
1773
|
+
"Could not associate team with project. "
|
|
1774
|
+
"Issue will be created without project assignment. "
|
|
1775
|
+
"Manual assignment required."
|
|
1776
|
+
)
|
|
1777
|
+
issue_input.pop("projectId", None)
|
|
1778
|
+
else:
|
|
1779
|
+
# Team already associated - safe to assign
|
|
1780
|
+
issue_input["projectId"] = project_id
|
|
1781
|
+
else:
|
|
1782
|
+
# Log warning but don't fail - user may have provided invalid project
|
|
1783
|
+
logging.getLogger(__name__).warning(
|
|
1784
|
+
f"Could not resolve project identifier '{task.parent_epic}' to UUID. "
|
|
1785
|
+
"Issue will be created without project assignment."
|
|
1786
|
+
)
|
|
1787
|
+
# Remove projectId if we couldn't resolve it
|
|
1788
|
+
issue_input.pop("projectId", None)
|
|
1789
|
+
|
|
1790
|
+
# Resolve parent issue ID if provided (creates a sub-issue when parent is set)
|
|
1791
|
+
# Supports identifiers like "ENG-842" or UUIDs
|
|
1792
|
+
if task.parent_issue:
|
|
1793
|
+
issue_id = await self._resolve_issue_id(task.parent_issue)
|
|
1794
|
+
if issue_id:
|
|
1795
|
+
issue_input["parentId"] = issue_id
|
|
1796
|
+
else:
|
|
1797
|
+
# Log warning but don't fail - user may have provided invalid issue
|
|
1798
|
+
logging.getLogger(__name__).warning(
|
|
1799
|
+
f"Could not resolve issue identifier '{task.parent_issue}' to UUID. "
|
|
1800
|
+
"Sub-issue will be created without parent assignment."
|
|
1801
|
+
)
|
|
1802
|
+
# Remove parentId if we couldn't resolve it
|
|
1803
|
+
issue_input.pop("parentId", None)
|
|
1804
|
+
|
|
1805
|
+
# Validate labelIds are proper UUIDs before sending to Linear API
|
|
1806
|
+
# Bug Fix (v1.1.1): This validation prevents "Argument Validation Error"
|
|
1807
|
+
# by ensuring labelIds contains UUIDs (e.g., "uuid-1"), not names (e.g., "bug").
|
|
1808
|
+
# Linear's GraphQL API requires labelIds to be [String!]! (non-null array of
|
|
1809
|
+
# non-null UUID strings). If tag names leak through, we detect and remove them
|
|
1810
|
+
# here to prevent API errors.
|
|
1811
|
+
#
|
|
1812
|
+
# See: docs/TROUBLESHOOTING.md#issue-argument-validation-error-when-creating-issues-with-labels
|
|
1813
|
+
if "labelIds" in issue_input:
|
|
1814
|
+
invalid_labels = []
|
|
1815
|
+
for label_id in issue_input["labelIds"]:
|
|
1816
|
+
# Linear UUIDs are 36 characters with hyphens (8-4-4-4-12 format)
|
|
1817
|
+
if not isinstance(label_id, str) or len(label_id) != 36:
|
|
1818
|
+
invalid_labels.append(label_id)
|
|
1819
|
+
|
|
1820
|
+
if invalid_labels:
|
|
1821
|
+
logging.getLogger(__name__).error(
|
|
1822
|
+
f"Invalid label ID format detected: {invalid_labels}. "
|
|
1823
|
+
f"Labels must be UUIDs (36 chars), not names. Removing labelIds from request."
|
|
1824
|
+
)
|
|
1825
|
+
issue_input.pop("labelIds")
|
|
1826
|
+
|
|
1827
|
+
# Debug logging: Log mutation input before execution for troubleshooting
|
|
1828
|
+
logger.debug(
|
|
1829
|
+
"Creating Linear issue with input: %s",
|
|
1830
|
+
{
|
|
1831
|
+
"title": task.title,
|
|
1832
|
+
"teamId": team_id,
|
|
1833
|
+
"projectId": issue_input.get("projectId"),
|
|
1834
|
+
"parentId": issue_input.get("parentId"),
|
|
1835
|
+
"stateId": issue_input.get("stateId"),
|
|
1836
|
+
"priority": issue_input.get("priority"),
|
|
1837
|
+
"labelIds": issue_input.get("labelIds"),
|
|
1838
|
+
"assigneeId": issue_input.get("assigneeId"),
|
|
1839
|
+
"hasDescription": bool(task.description),
|
|
1840
|
+
},
|
|
1841
|
+
)
|
|
1842
|
+
|
|
1843
|
+
try:
|
|
1844
|
+
result = await self.client.execute_mutation(
|
|
1845
|
+
CREATE_ISSUE_MUTATION, {"input": issue_input}
|
|
1846
|
+
)
|
|
1847
|
+
|
|
1848
|
+
if not result["issueCreate"]["success"]:
|
|
1849
|
+
item_type = "sub-issue" if task.parent_issue else "issue"
|
|
1850
|
+
raise ValueError(f"Failed to create Linear {item_type}")
|
|
1851
|
+
|
|
1852
|
+
created_issue = result["issueCreate"]["issue"]
|
|
1853
|
+
return map_linear_issue_to_task(created_issue)
|
|
1854
|
+
|
|
1855
|
+
except Exception as e:
|
|
1856
|
+
item_type = "sub-issue" if task.parent_issue else "issue"
|
|
1857
|
+
raise ValueError(f"Failed to create Linear {item_type}: {e}") from e
|
|
1858
|
+
|
|
1859
|
+
async def _create_epic(self, epic: Epic) -> Epic:
|
|
1860
|
+
"""Create a Linear project from an Epic.
|
|
1861
|
+
|
|
1862
|
+
Args:
|
|
1863
|
+
----
|
|
1864
|
+
epic: Epic to create
|
|
1865
|
+
|
|
1866
|
+
Returns:
|
|
1867
|
+
-------
|
|
1868
|
+
Created epic with Linear metadata
|
|
1869
|
+
|
|
1870
|
+
"""
|
|
1871
|
+
team_id = await self._ensure_team_id()
|
|
1872
|
+
|
|
1873
|
+
# Validate team_id before creating teamIds array
|
|
1874
|
+
if not team_id:
|
|
1875
|
+
raise ValueError(
|
|
1876
|
+
"Cannot create Linear project without team_id. "
|
|
1877
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
1878
|
+
)
|
|
1879
|
+
|
|
1880
|
+
project_input = {
|
|
1881
|
+
"name": epic.title,
|
|
1882
|
+
"teamIds": [team_id],
|
|
1883
|
+
}
|
|
1884
|
+
|
|
1885
|
+
if epic.description:
|
|
1886
|
+
# Validate description length (Linear limit: 255 chars for project description)
|
|
1887
|
+
# Matches validation in update_epic() for consistency
|
|
1888
|
+
from mcp_ticketer.core.validators import FieldValidator, ValidationError
|
|
1889
|
+
|
|
1890
|
+
try:
|
|
1891
|
+
validated_description = FieldValidator.validate_field(
|
|
1892
|
+
"linear", "epic_description", epic.description, truncate=False
|
|
1893
|
+
)
|
|
1894
|
+
project_input["description"] = validated_description
|
|
1895
|
+
except ValidationError as e:
|
|
1896
|
+
raise ValueError(
|
|
1897
|
+
f"Epic description validation failed: {e}. "
|
|
1898
|
+
f"Linear projects have a 255 character limit for descriptions. "
|
|
1899
|
+
f"Current length: {len(epic.description)} characters."
|
|
1900
|
+
) from e
|
|
1901
|
+
|
|
1902
|
+
# Debug logging: Log mutation input before execution for troubleshooting
|
|
1903
|
+
logging.getLogger(__name__).debug(
|
|
1904
|
+
"Creating Linear project with input: %s",
|
|
1905
|
+
{
|
|
1906
|
+
"name": epic.title,
|
|
1907
|
+
"teamIds": [team_id],
|
|
1908
|
+
"hasDescription": bool(project_input.get("description")),
|
|
1909
|
+
"leadId": project_input.get("leadId"),
|
|
1910
|
+
},
|
|
1911
|
+
)
|
|
1912
|
+
|
|
1913
|
+
# Create project mutation
|
|
1914
|
+
create_query = """
|
|
1915
|
+
mutation CreateProject($input: ProjectCreateInput!) {
|
|
1916
|
+
projectCreate(input: $input) {
|
|
1917
|
+
success
|
|
1918
|
+
project {
|
|
1919
|
+
id
|
|
1920
|
+
name
|
|
1921
|
+
description
|
|
1922
|
+
state
|
|
1923
|
+
createdAt
|
|
1924
|
+
updatedAt
|
|
1925
|
+
url
|
|
1926
|
+
icon
|
|
1927
|
+
color
|
|
1928
|
+
targetDate
|
|
1929
|
+
startedAt
|
|
1930
|
+
completedAt
|
|
1931
|
+
teams {
|
|
1932
|
+
nodes {
|
|
1933
|
+
id
|
|
1934
|
+
name
|
|
1935
|
+
key
|
|
1936
|
+
description
|
|
1937
|
+
}
|
|
1938
|
+
}
|
|
1939
|
+
}
|
|
1940
|
+
}
|
|
1941
|
+
}
|
|
1942
|
+
"""
|
|
1943
|
+
|
|
1944
|
+
try:
|
|
1945
|
+
result = await self.client.execute_mutation(
|
|
1946
|
+
create_query, {"input": project_input}
|
|
1947
|
+
)
|
|
1948
|
+
|
|
1949
|
+
if not result["projectCreate"]["success"]:
|
|
1950
|
+
raise ValueError("Failed to create Linear project")
|
|
1951
|
+
|
|
1952
|
+
created_project = result["projectCreate"]["project"]
|
|
398
1953
|
return map_linear_project_to_epic(created_project)
|
|
399
1954
|
|
|
400
1955
|
except Exception as e:
|
|
401
|
-
raise ValueError(f"Failed to create Linear project: {e}")
|
|
1956
|
+
raise ValueError(f"Failed to create Linear project: {e}") from e
|
|
1957
|
+
|
|
1958
|
+
async def update_epic(self, epic_id: str, updates: dict[str, Any]) -> Epic | None:
|
|
1959
|
+
"""Update a Linear project (Epic) with specified fields.
|
|
1960
|
+
|
|
1961
|
+
Args:
|
|
1962
|
+
----
|
|
1963
|
+
epic_id: Linear project UUID or slug-shortid
|
|
1964
|
+
updates: Dictionary of fields to update. Supported fields:
|
|
1965
|
+
- title: Project name
|
|
1966
|
+
- description: Project description
|
|
1967
|
+
- state: Project state (e.g., "planned", "started", "completed", "canceled")
|
|
1968
|
+
- target_date: Target completion date (ISO format YYYY-MM-DD)
|
|
1969
|
+
- color: Project color
|
|
1970
|
+
- icon: Project icon
|
|
1971
|
+
|
|
1972
|
+
Returns:
|
|
1973
|
+
-------
|
|
1974
|
+
Updated Epic object or None if not found
|
|
1975
|
+
|
|
1976
|
+
Raises:
|
|
1977
|
+
------
|
|
1978
|
+
ValueError: If update fails or project not found
|
|
1979
|
+
|
|
1980
|
+
"""
|
|
1981
|
+
# Validate credentials before attempting operation
|
|
1982
|
+
is_valid, error_message = self.validate_credentials()
|
|
1983
|
+
if not is_valid:
|
|
1984
|
+
raise ValueError(error_message)
|
|
1985
|
+
|
|
1986
|
+
# Resolve project identifier to UUID if needed
|
|
1987
|
+
project_uuid = await self._resolve_project_id(epic_id)
|
|
1988
|
+
if not project_uuid:
|
|
1989
|
+
raise ValueError(f"Project '{epic_id}' not found")
|
|
1990
|
+
|
|
1991
|
+
# Validate field lengths before building update input
|
|
1992
|
+
from mcp_ticketer.core.validators import FieldValidator, ValidationError
|
|
1993
|
+
|
|
1994
|
+
# Build update input from updates dict
|
|
1995
|
+
update_input = {}
|
|
1996
|
+
|
|
1997
|
+
if "title" in updates:
|
|
1998
|
+
try:
|
|
1999
|
+
validated_title = FieldValidator.validate_field(
|
|
2000
|
+
"linear", "epic_name", updates["title"], truncate=False
|
|
2001
|
+
)
|
|
2002
|
+
update_input["name"] = validated_title
|
|
2003
|
+
except ValidationError as e:
|
|
2004
|
+
raise ValueError(str(e)) from e
|
|
2005
|
+
|
|
2006
|
+
if "description" in updates:
|
|
2007
|
+
try:
|
|
2008
|
+
validated_description = FieldValidator.validate_field(
|
|
2009
|
+
"linear", "epic_description", updates["description"], truncate=False
|
|
2010
|
+
)
|
|
2011
|
+
update_input["description"] = validated_description
|
|
2012
|
+
except ValidationError as e:
|
|
2013
|
+
raise ValueError(str(e)) from e
|
|
2014
|
+
if "state" in updates:
|
|
2015
|
+
update_input["state"] = updates["state"]
|
|
2016
|
+
if "target_date" in updates:
|
|
2017
|
+
update_input["targetDate"] = updates["target_date"]
|
|
2018
|
+
if "color" in updates:
|
|
2019
|
+
update_input["color"] = updates["color"]
|
|
2020
|
+
if "icon" in updates:
|
|
2021
|
+
update_input["icon"] = updates["icon"]
|
|
2022
|
+
|
|
2023
|
+
# Debug logging: Log mutation input before execution for troubleshooting
|
|
2024
|
+
logging.getLogger(__name__).debug(
|
|
2025
|
+
"Updating Linear project %s with input: %s",
|
|
2026
|
+
epic_id,
|
|
2027
|
+
{
|
|
2028
|
+
"name": update_input.get("name"),
|
|
2029
|
+
"hasDescription": bool(update_input.get("description")),
|
|
2030
|
+
"state": update_input.get("state"),
|
|
2031
|
+
"targetDate": update_input.get("targetDate"),
|
|
2032
|
+
"color": update_input.get("color"),
|
|
2033
|
+
"icon": update_input.get("icon"),
|
|
2034
|
+
},
|
|
2035
|
+
)
|
|
2036
|
+
|
|
2037
|
+
# ProjectUpdate mutation
|
|
2038
|
+
update_query = """
|
|
2039
|
+
mutation UpdateProject($id: String!, $input: ProjectUpdateInput!) {
|
|
2040
|
+
projectUpdate(id: $id, input: $input) {
|
|
2041
|
+
success
|
|
2042
|
+
project {
|
|
2043
|
+
id
|
|
2044
|
+
name
|
|
2045
|
+
description
|
|
2046
|
+
state
|
|
2047
|
+
createdAt
|
|
2048
|
+
updatedAt
|
|
2049
|
+
url
|
|
2050
|
+
icon
|
|
2051
|
+
color
|
|
2052
|
+
targetDate
|
|
2053
|
+
startedAt
|
|
2054
|
+
completedAt
|
|
2055
|
+
teams {
|
|
2056
|
+
nodes {
|
|
2057
|
+
id
|
|
2058
|
+
name
|
|
2059
|
+
key
|
|
2060
|
+
description
|
|
2061
|
+
}
|
|
2062
|
+
}
|
|
2063
|
+
}
|
|
2064
|
+
}
|
|
2065
|
+
}
|
|
2066
|
+
"""
|
|
2067
|
+
|
|
2068
|
+
try:
|
|
2069
|
+
result = await self.client.execute_mutation(
|
|
2070
|
+
update_query, {"id": project_uuid, "input": update_input}
|
|
2071
|
+
)
|
|
2072
|
+
|
|
2073
|
+
if not result["projectUpdate"]["success"]:
|
|
2074
|
+
raise ValueError(f"Failed to update Linear project '{epic_id}'")
|
|
2075
|
+
|
|
2076
|
+
updated_project = result["projectUpdate"]["project"]
|
|
2077
|
+
return map_linear_project_to_epic(updated_project)
|
|
2078
|
+
|
|
2079
|
+
except Exception as e:
|
|
2080
|
+
raise ValueError(f"Failed to update Linear project: {e}") from e
|
|
2081
|
+
|
|
2082
|
+
async def read(self, ticket_id: str) -> Task | Epic | None:
|
|
2083
|
+
"""Read a Linear issue OR project by identifier with full details.
|
|
2084
|
+
|
|
2085
|
+
Args:
|
|
2086
|
+
----
|
|
2087
|
+
ticket_id: Linear issue identifier (e.g., 'BTA-123') or project UUID
|
|
2088
|
+
|
|
2089
|
+
Returns:
|
|
2090
|
+
-------
|
|
2091
|
+
Task with full details if issue found,
|
|
2092
|
+
Epic with full details if project found,
|
|
2093
|
+
None if not found
|
|
2094
|
+
|
|
2095
|
+
Raises:
|
|
2096
|
+
------
|
|
2097
|
+
ValueError: If ticket_id is a view URL (views are not supported in ticket_read)
|
|
2098
|
+
|
|
2099
|
+
"""
|
|
2100
|
+
# Validate credentials before attempting operation
|
|
2101
|
+
is_valid, error_message = self.validate_credentials()
|
|
2102
|
+
if not is_valid:
|
|
2103
|
+
raise ValueError(error_message)
|
|
2104
|
+
|
|
2105
|
+
# Try reading as an issue first (most common case)
|
|
2106
|
+
query = (
|
|
2107
|
+
ALL_FRAGMENTS
|
|
2108
|
+
+ """
|
|
2109
|
+
query GetIssue($identifier: String!) {
|
|
2110
|
+
issue(id: $identifier) {
|
|
2111
|
+
...IssueFullFields
|
|
2112
|
+
}
|
|
2113
|
+
}
|
|
2114
|
+
"""
|
|
2115
|
+
)
|
|
2116
|
+
|
|
2117
|
+
try:
|
|
2118
|
+
result = await self.client.execute_query(query, {"identifier": ticket_id})
|
|
2119
|
+
|
|
2120
|
+
if result.get("issue"):
|
|
2121
|
+
return map_linear_issue_to_task(result["issue"])
|
|
2122
|
+
|
|
2123
|
+
except Exception:
|
|
2124
|
+
# Not found as issue, continue to project/view check
|
|
2125
|
+
pass
|
|
2126
|
+
|
|
2127
|
+
# If not found as issue, try reading as project
|
|
2128
|
+
try:
|
|
2129
|
+
project_data = await self.get_project(ticket_id)
|
|
2130
|
+
if project_data:
|
|
2131
|
+
# Fetch project's issues to populate child_issues field
|
|
2132
|
+
issues = await self._get_project_issues(ticket_id)
|
|
2133
|
+
|
|
2134
|
+
# Map to Epic
|
|
2135
|
+
epic = map_linear_project_to_epic(project_data)
|
|
2136
|
+
|
|
2137
|
+
# Populate child_issues with issue IDs
|
|
2138
|
+
epic.child_issues = [issue.id for issue in issues]
|
|
2139
|
+
|
|
2140
|
+
return epic
|
|
2141
|
+
except Exception:
|
|
2142
|
+
# Not found as project either
|
|
2143
|
+
pass
|
|
2144
|
+
|
|
2145
|
+
# If not found as issue or project, check if it's a view URL
|
|
2146
|
+
# Views are collections of issues, not individual tickets
|
|
2147
|
+
logging.debug(
|
|
2148
|
+
f"[VIEW DEBUG] read() checking if ticket_id is a view: {ticket_id}"
|
|
2149
|
+
)
|
|
2150
|
+
try:
|
|
2151
|
+
view_data = await self._get_custom_view(ticket_id)
|
|
2152
|
+
logging.debug(f"[VIEW DEBUG] read() _get_custom_view returned: {view_data}")
|
|
2153
|
+
|
|
2154
|
+
if view_data:
|
|
2155
|
+
logging.debug(
|
|
2156
|
+
"[VIEW DEBUG] read() view_data is truthy, preparing to raise ValueError"
|
|
2157
|
+
)
|
|
2158
|
+
# View found - raise informative error
|
|
2159
|
+
view_name = view_data.get("name", "Unknown")
|
|
2160
|
+
issues_data = view_data.get("issues", {})
|
|
2161
|
+
issue_count = len(issues_data.get("nodes", []))
|
|
2162
|
+
has_more = issues_data.get("pageInfo", {}).get("hasNextPage", False)
|
|
2163
|
+
count_str = f"{issue_count}+" if has_more else str(issue_count)
|
|
2164
|
+
|
|
2165
|
+
logging.debug(
|
|
2166
|
+
f"[VIEW DEBUG] read() raising ValueError with view_name={view_name}, count={count_str}"
|
|
2167
|
+
)
|
|
2168
|
+
raise ValueError(
|
|
2169
|
+
f"Linear view URLs are not supported in ticket_read.\n"
|
|
2170
|
+
f"\n"
|
|
2171
|
+
f"View: '{view_name}' ({ticket_id})\n"
|
|
2172
|
+
f"This view contains {count_str} issues.\n"
|
|
2173
|
+
f"\n"
|
|
2174
|
+
f"Use ticket_list or ticket_search to query issues instead."
|
|
2175
|
+
)
|
|
2176
|
+
else:
|
|
2177
|
+
logging.debug("[VIEW DEBUG] read() view_data is falsy (None or empty)")
|
|
2178
|
+
except ValueError:
|
|
2179
|
+
# Re-raise ValueError (our informative error message)
|
|
2180
|
+
logging.debug("[VIEW DEBUG] read() re-raising ValueError")
|
|
2181
|
+
raise
|
|
2182
|
+
except Exception as e:
|
|
2183
|
+
# View query failed - not a view
|
|
2184
|
+
logging.debug(
|
|
2185
|
+
f"[VIEW DEBUG] read() caught exception in view check: {type(e).__name__}: {str(e)}"
|
|
2186
|
+
)
|
|
2187
|
+
pass
|
|
2188
|
+
|
|
2189
|
+
# Not found as either issue, project, or view
|
|
2190
|
+
logging.debug(
|
|
2191
|
+
"[VIEW DEBUG] read() returning None - not found as issue, project, or view"
|
|
2192
|
+
)
|
|
2193
|
+
return None
|
|
2194
|
+
|
|
2195
|
+
async def update(self, ticket_id: str, updates: dict[str, Any]) -> Task | None:
|
|
2196
|
+
"""Update a Linear issue with comprehensive field support.
|
|
2197
|
+
|
|
2198
|
+
Args:
|
|
2199
|
+
----
|
|
2200
|
+
ticket_id: Linear issue identifier
|
|
2201
|
+
updates: Dictionary of fields to update
|
|
2202
|
+
|
|
2203
|
+
Returns:
|
|
2204
|
+
-------
|
|
2205
|
+
Updated task or None if not found
|
|
2206
|
+
|
|
2207
|
+
"""
|
|
2208
|
+
# Validate credentials before attempting operation
|
|
2209
|
+
is_valid, error_message = self.validate_credentials()
|
|
2210
|
+
if not is_valid:
|
|
2211
|
+
raise ValueError(error_message)
|
|
2212
|
+
|
|
2213
|
+
# Ensure adapter is initialized (loads workflow states for state transitions)
|
|
2214
|
+
await self.initialize()
|
|
2215
|
+
|
|
2216
|
+
# First get the Linear internal ID
|
|
2217
|
+
id_query = """
|
|
2218
|
+
query GetIssueId($identifier: String!) {
|
|
2219
|
+
issue(id: $identifier) {
|
|
2220
|
+
id
|
|
2221
|
+
}
|
|
2222
|
+
}
|
|
2223
|
+
"""
|
|
2224
|
+
|
|
2225
|
+
try:
|
|
2226
|
+
result = await self.client.execute_query(
|
|
2227
|
+
id_query, {"identifier": ticket_id}
|
|
2228
|
+
)
|
|
2229
|
+
|
|
2230
|
+
if not result.get("issue"):
|
|
2231
|
+
return None
|
|
2232
|
+
|
|
2233
|
+
linear_id = result["issue"]["id"]
|
|
2234
|
+
|
|
2235
|
+
# Build update input using mapper
|
|
2236
|
+
update_input = build_linear_issue_update_input(updates)
|
|
2237
|
+
|
|
2238
|
+
# Handle state transitions
|
|
2239
|
+
if "state" in updates:
|
|
2240
|
+
target_state = (
|
|
2241
|
+
TicketState(updates["state"])
|
|
2242
|
+
if isinstance(updates["state"], str)
|
|
2243
|
+
else updates["state"]
|
|
2244
|
+
)
|
|
2245
|
+
state_mapping = self._get_state_mapping()
|
|
2246
|
+
if target_state in state_mapping:
|
|
2247
|
+
update_input["stateId"] = state_mapping[target_state]
|
|
2248
|
+
|
|
2249
|
+
# Resolve assignee to user ID if provided
|
|
2250
|
+
if "assignee" in updates and updates["assignee"]:
|
|
2251
|
+
user_id = await self._get_user_id(updates["assignee"])
|
|
2252
|
+
if user_id:
|
|
2253
|
+
update_input["assigneeId"] = user_id
|
|
2254
|
+
|
|
2255
|
+
# Resolve label names to IDs if provided
|
|
2256
|
+
if "tags" in updates:
|
|
2257
|
+
if updates["tags"]: # Non-empty list
|
|
2258
|
+
try:
|
|
2259
|
+
label_ids = await self._resolve_label_ids(updates["tags"])
|
|
2260
|
+
if label_ids:
|
|
2261
|
+
update_input["labelIds"] = label_ids
|
|
2262
|
+
except ValueError as e:
|
|
2263
|
+
# Label creation failed - provide clear error message (1M-396)
|
|
2264
|
+
raise ValueError(
|
|
2265
|
+
f"Failed to update labels for issue {ticket_id}. "
|
|
2266
|
+
f"Label creation error: {e}. "
|
|
2267
|
+
f"Tip: Use the 'label_list' tool to check existing labels, "
|
|
2268
|
+
f"or verify you have permissions to create new labels."
|
|
2269
|
+
) from e
|
|
2270
|
+
else: # Empty list = remove all labels
|
|
2271
|
+
update_input["labelIds"] = []
|
|
2272
|
+
|
|
2273
|
+
# Resolve project ID if parent_epic is provided (supports slug, name, short ID, or URL)
|
|
2274
|
+
if "parent_epic" in updates and updates["parent_epic"]:
|
|
2275
|
+
project_id = await self._resolve_project_id(updates["parent_epic"])
|
|
2276
|
+
if project_id:
|
|
2277
|
+
update_input["projectId"] = project_id
|
|
2278
|
+
else:
|
|
2279
|
+
logging.getLogger(__name__).warning(
|
|
2280
|
+
f"Could not resolve project identifier '{updates['parent_epic']}'"
|
|
2281
|
+
)
|
|
2282
|
+
|
|
2283
|
+
# Validate labelIds are proper UUIDs before sending to Linear API
|
|
2284
|
+
if "labelIds" in update_input and update_input["labelIds"]:
|
|
2285
|
+
invalid_labels = []
|
|
2286
|
+
for label_id in update_input["labelIds"]:
|
|
2287
|
+
# Linear UUIDs are 36 characters with hyphens (8-4-4-4-12 format)
|
|
2288
|
+
if not isinstance(label_id, str) or len(label_id) != 36:
|
|
2289
|
+
invalid_labels.append(label_id)
|
|
2290
|
+
|
|
2291
|
+
if invalid_labels:
|
|
2292
|
+
logging.getLogger(__name__).error(
|
|
2293
|
+
f"Invalid label ID format detected in update: {invalid_labels}. "
|
|
2294
|
+
f"Labels must be UUIDs (36 chars), not names. Removing labelIds from request."
|
|
2295
|
+
)
|
|
2296
|
+
update_input.pop("labelIds")
|
|
2297
|
+
|
|
2298
|
+
# Execute update
|
|
2299
|
+
result = await self.client.execute_mutation(
|
|
2300
|
+
UPDATE_ISSUE_MUTATION, {"id": linear_id, "input": update_input}
|
|
2301
|
+
)
|
|
2302
|
+
|
|
2303
|
+
if not result["issueUpdate"]["success"]:
|
|
2304
|
+
raise ValueError("Failed to update Linear issue")
|
|
2305
|
+
|
|
2306
|
+
updated_issue = result["issueUpdate"]["issue"]
|
|
2307
|
+
return map_linear_issue_to_task(updated_issue)
|
|
2308
|
+
|
|
2309
|
+
except Exception as e:
|
|
2310
|
+
raise ValueError(f"Failed to update Linear issue: {e}") from e
|
|
2311
|
+
|
|
2312
|
+
async def delete(self, ticket_id: str) -> bool:
|
|
2313
|
+
"""Delete a Linear issue (archive it).
|
|
2314
|
+
|
|
2315
|
+
Args:
|
|
2316
|
+
----
|
|
2317
|
+
ticket_id: Linear issue identifier
|
|
2318
|
+
|
|
2319
|
+
Returns:
|
|
2320
|
+
-------
|
|
2321
|
+
True if successfully deleted/archived
|
|
2322
|
+
|
|
2323
|
+
"""
|
|
2324
|
+
# Linear doesn't support true deletion, so we archive the issue
|
|
2325
|
+
try:
|
|
2326
|
+
result = await self.update(ticket_id, {"archived": True})
|
|
2327
|
+
return result is not None
|
|
2328
|
+
except Exception:
|
|
2329
|
+
return False
|
|
2330
|
+
|
|
2331
|
+
async def list(
|
|
2332
|
+
self,
|
|
2333
|
+
limit: int = 20,
|
|
2334
|
+
offset: int = 0,
|
|
2335
|
+
filters: dict[str, Any] | None = None,
|
|
2336
|
+
compact: bool = False,
|
|
2337
|
+
) -> dict[str, Any] | builtins.list[Task]:
|
|
2338
|
+
"""List Linear issues with optional filtering and compact output.
|
|
2339
|
+
|
|
2340
|
+
Args:
|
|
2341
|
+
----
|
|
2342
|
+
limit: Maximum number of issues to return (default: 20, max: 100)
|
|
2343
|
+
offset: Number of issues to skip (Note: Linear uses cursor-based pagination)
|
|
2344
|
+
filters: Optional filters (state, assignee, priority, etc.)
|
|
2345
|
+
compact: Return compact format for token efficiency (default: False for backward compatibility)
|
|
2346
|
+
|
|
2347
|
+
Returns:
|
|
2348
|
+
-------
|
|
2349
|
+
When compact=True: Dictionary with items and pagination metadata
|
|
2350
|
+
When compact=False: List of Task objects (backward compatible, default)
|
|
2351
|
+
|
|
2352
|
+
Design Decision: Backward Compatible Default (1M-554)
|
|
2353
|
+
------------------------------------------------------
|
|
2354
|
+
Rationale: Backward compatibility prioritized to avoid breaking existing code.
|
|
2355
|
+
Compact mode available via explicit compact=True for new code.
|
|
2356
|
+
|
|
2357
|
+
Default compact=False maintains existing return type (list[Task]).
|
|
2358
|
+
Users can opt-in to compact mode for 77% token reduction.
|
|
2359
|
+
|
|
2360
|
+
Recommended: Use compact=True for new code to reduce token usage by ~77%.
|
|
2361
|
+
|
|
2362
|
+
"""
|
|
2363
|
+
# Validate credentials
|
|
2364
|
+
is_valid, error_message = self.validate_credentials()
|
|
2365
|
+
if not is_valid:
|
|
2366
|
+
raise ValueError(error_message)
|
|
2367
|
+
|
|
2368
|
+
await self.initialize()
|
|
2369
|
+
team_id = await self._ensure_team_id()
|
|
2370
|
+
|
|
2371
|
+
# Validate team_id before filtering
|
|
2372
|
+
if not team_id:
|
|
2373
|
+
raise ValueError(
|
|
2374
|
+
"Cannot list Linear issues without team_id. "
|
|
2375
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
2376
|
+
)
|
|
2377
|
+
|
|
2378
|
+
# Enforce maximum limit to prevent excessive responses
|
|
2379
|
+
if limit > 100:
|
|
2380
|
+
limit = 100
|
|
2381
|
+
|
|
2382
|
+
# Build issue filter
|
|
2383
|
+
issue_filter = build_issue_filter(
|
|
2384
|
+
team_id=team_id,
|
|
2385
|
+
state=filters.get("state") if filters else None,
|
|
2386
|
+
priority=filters.get("priority") if filters else None,
|
|
2387
|
+
include_archived=(
|
|
2388
|
+
filters.get("includeArchived", False) if filters else False
|
|
2389
|
+
),
|
|
2390
|
+
)
|
|
2391
|
+
|
|
2392
|
+
# Add additional filters
|
|
2393
|
+
if filters:
|
|
2394
|
+
if "assignee" in filters:
|
|
2395
|
+
user_id = await self._get_user_id(filters["assignee"])
|
|
2396
|
+
if user_id:
|
|
2397
|
+
issue_filter["assignee"] = {"id": {"eq": user_id}}
|
|
2398
|
+
|
|
2399
|
+
# Support parent_issue filter for listing children (critical for parent state constraints)
|
|
2400
|
+
if "parent_issue" in filters:
|
|
2401
|
+
parent_id = await self._resolve_issue_id(filters["parent_issue"])
|
|
2402
|
+
if parent_id:
|
|
2403
|
+
issue_filter["parent"] = {"id": {"eq": parent_id}}
|
|
2404
|
+
|
|
2405
|
+
if "created_after" in filters:
|
|
2406
|
+
issue_filter["createdAt"] = {"gte": filters["created_after"]}
|
|
2407
|
+
if "updated_after" in filters:
|
|
2408
|
+
issue_filter["updatedAt"] = {"gte": filters["updated_after"]}
|
|
2409
|
+
if "due_before" in filters:
|
|
2410
|
+
issue_filter["dueDate"] = {"lte": filters["due_before"]}
|
|
2411
|
+
|
|
2412
|
+
try:
|
|
2413
|
+
result = await self.client.execute_query(
|
|
2414
|
+
LIST_ISSUES_QUERY, {"filter": issue_filter, "first": limit}
|
|
2415
|
+
)
|
|
2416
|
+
|
|
2417
|
+
tasks = []
|
|
2418
|
+
for issue in result["issues"]["nodes"]:
|
|
2419
|
+
tasks.append(map_linear_issue_to_task(issue))
|
|
2420
|
+
|
|
2421
|
+
# Return compact format with pagination metadata
|
|
2422
|
+
if compact:
|
|
2423
|
+
from .mappers import task_to_compact_format
|
|
2424
|
+
|
|
2425
|
+
compact_items = [task_to_compact_format(task) for task in tasks]
|
|
2426
|
+
return {
|
|
2427
|
+
"status": "success",
|
|
2428
|
+
"items": compact_items,
|
|
2429
|
+
"pagination": {
|
|
2430
|
+
"total_returned": len(compact_items),
|
|
2431
|
+
"limit": limit,
|
|
2432
|
+
"offset": offset,
|
|
2433
|
+
"has_more": len(tasks)
|
|
2434
|
+
== limit, # Heuristic: full page likely means more
|
|
2435
|
+
},
|
|
2436
|
+
}
|
|
2437
|
+
|
|
2438
|
+
# Backward compatible: return list of Task objects
|
|
2439
|
+
return tasks
|
|
2440
|
+
|
|
2441
|
+
except Exception as e:
|
|
2442
|
+
raise ValueError(f"Failed to list Linear issues: {e}") from e
|
|
2443
|
+
|
|
2444
|
+
async def search(self, query: SearchQuery) -> builtins.list[Task]:
|
|
2445
|
+
"""Search Linear issues using comprehensive filters.
|
|
2446
|
+
|
|
2447
|
+
Args:
|
|
2448
|
+
----
|
|
2449
|
+
query: Search query with filters and criteria
|
|
2450
|
+
|
|
2451
|
+
Returns:
|
|
2452
|
+
-------
|
|
2453
|
+
List of tasks matching the search criteria
|
|
2454
|
+
|
|
2455
|
+
"""
|
|
2456
|
+
# Validate credentials
|
|
2457
|
+
is_valid, error_message = self.validate_credentials()
|
|
2458
|
+
if not is_valid:
|
|
2459
|
+
raise ValueError(error_message)
|
|
2460
|
+
|
|
2461
|
+
await self.initialize()
|
|
2462
|
+
team_id = await self._ensure_team_id()
|
|
2463
|
+
|
|
2464
|
+
# Validate team_id before searching
|
|
2465
|
+
if not team_id:
|
|
2466
|
+
raise ValueError(
|
|
2467
|
+
"Cannot search Linear issues without team_id. "
|
|
2468
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
2469
|
+
)
|
|
2470
|
+
|
|
2471
|
+
# Build comprehensive issue filter
|
|
2472
|
+
issue_filter = {"team": {"id": {"eq": team_id}}}
|
|
2473
|
+
|
|
2474
|
+
# Text search (Linear supports full-text search)
|
|
2475
|
+
if query.query:
|
|
2476
|
+
# Linear's search is quite sophisticated, but we'll use a simple approach
|
|
2477
|
+
# In practice, you might want to use Linear's search API endpoint
|
|
2478
|
+
issue_filter["title"] = {"containsIgnoreCase": query.query}
|
|
2479
|
+
|
|
2480
|
+
# State filter
|
|
2481
|
+
# Bug fix: Handle OPEN state specially to include both unstarted AND backlog
|
|
2482
|
+
# tickets, as both Linear states map to TicketState.OPEN
|
|
2483
|
+
if query.state:
|
|
2484
|
+
if query.state == TicketState.OPEN:
|
|
2485
|
+
# Include both "unstarted" and "backlog" states for OPEN
|
|
2486
|
+
issue_filter["state"] = {"type": {"in": ["unstarted", "backlog"]}}
|
|
2487
|
+
else:
|
|
2488
|
+
state_type = get_linear_state_type(query.state)
|
|
2489
|
+
issue_filter["state"] = {"type": {"eq": state_type}}
|
|
2490
|
+
|
|
2491
|
+
# Priority filter
|
|
2492
|
+
if query.priority:
|
|
2493
|
+
linear_priority = get_linear_priority(query.priority)
|
|
2494
|
+
issue_filter["priority"] = {"eq": linear_priority}
|
|
2495
|
+
|
|
2496
|
+
# Assignee filter
|
|
2497
|
+
if query.assignee:
|
|
2498
|
+
user_id = await self._get_user_id(query.assignee)
|
|
2499
|
+
if user_id:
|
|
2500
|
+
issue_filter["assignee"] = {"id": {"eq": user_id}}
|
|
2501
|
+
|
|
2502
|
+
# Project filter (Bug fix: Add support for filtering by project/epic)
|
|
2503
|
+
if query.project:
|
|
2504
|
+
# Resolve project ID (supports ID, name, or URL)
|
|
2505
|
+
project_id = await self._resolve_project_id(query.project)
|
|
2506
|
+
if project_id:
|
|
2507
|
+
issue_filter["project"] = {"id": {"eq": project_id}}
|
|
2508
|
+
|
|
2509
|
+
# Tags filter (labels in Linear)
|
|
2510
|
+
if query.tags:
|
|
2511
|
+
issue_filter["labels"] = {"some": {"name": {"in": query.tags}}}
|
|
2512
|
+
|
|
2513
|
+
# Exclude archived by default
|
|
2514
|
+
issue_filter["archivedAt"] = {"null": True}
|
|
2515
|
+
|
|
2516
|
+
try:
|
|
2517
|
+
result = await self.client.execute_query(
|
|
2518
|
+
SEARCH_ISSUES_QUERY, {"filter": issue_filter, "first": query.limit}
|
|
2519
|
+
)
|
|
2520
|
+
|
|
2521
|
+
tasks = []
|
|
2522
|
+
for issue in result["issues"]["nodes"]:
|
|
2523
|
+
tasks.append(map_linear_issue_to_task(issue))
|
|
2524
|
+
|
|
2525
|
+
return tasks
|
|
2526
|
+
|
|
2527
|
+
except Exception as e:
|
|
2528
|
+
raise ValueError(f"Failed to search Linear issues: {e}") from e
|
|
2529
|
+
|
|
2530
|
+
async def transition_state(
|
|
2531
|
+
self, ticket_id: str, target_state: TicketState
|
|
2532
|
+
) -> Task | None:
|
|
2533
|
+
"""Transition Linear issue to new state with workflow validation.
|
|
2534
|
+
|
|
2535
|
+
Args:
|
|
2536
|
+
----
|
|
2537
|
+
ticket_id: Linear issue identifier
|
|
2538
|
+
target_state: Target state to transition to
|
|
2539
|
+
|
|
2540
|
+
Returns:
|
|
2541
|
+
-------
|
|
2542
|
+
Updated task or None if transition failed
|
|
2543
|
+
|
|
2544
|
+
"""
|
|
2545
|
+
# Validate transition
|
|
2546
|
+
if not await self.validate_transition(ticket_id, target_state):
|
|
2547
|
+
return None
|
|
2548
|
+
|
|
2549
|
+
# Update state
|
|
2550
|
+
return await self.update(ticket_id, {"state": target_state})
|
|
2551
|
+
|
|
2552
|
+
async def validate_transition(
|
|
2553
|
+
self, ticket_id: str, target_state: TicketState
|
|
2554
|
+
) -> bool:
|
|
2555
|
+
"""Validate if state transition is allowed.
|
|
2556
|
+
|
|
2557
|
+
Delegates to BaseAdapter for:
|
|
2558
|
+
- Workflow state machine validation
|
|
2559
|
+
- Parent/child state constraint validation (from 1M-93 requirement)
|
|
2560
|
+
|
|
2561
|
+
The BaseAdapter implementation (core/adapter.py lines 312-370) ensures:
|
|
2562
|
+
1. Valid workflow state transitions (OPEN → IN_PROGRESS → READY → etc.)
|
|
2563
|
+
2. Parent issues maintain completion level ≥ max child completion level
|
|
2564
|
+
|
|
2565
|
+
Args:
|
|
2566
|
+
----
|
|
2567
|
+
ticket_id: Linear issue identifier
|
|
2568
|
+
target_state: Target state to validate
|
|
2569
|
+
|
|
2570
|
+
Returns:
|
|
2571
|
+
-------
|
|
2572
|
+
True if transition is valid, False otherwise
|
|
2573
|
+
|
|
2574
|
+
"""
|
|
2575
|
+
# Call parent implementation for all validation logic
|
|
2576
|
+
return await super().validate_transition(ticket_id, target_state)
|
|
2577
|
+
|
|
2578
|
+
async def add_comment(self, comment: Comment) -> Comment:
|
|
2579
|
+
"""Add a comment to a Linear issue.
|
|
2580
|
+
|
|
2581
|
+
Args:
|
|
2582
|
+
----
|
|
2583
|
+
comment: Comment to add
|
|
2584
|
+
|
|
2585
|
+
Returns:
|
|
2586
|
+
-------
|
|
2587
|
+
Created comment with ID
|
|
2588
|
+
|
|
2589
|
+
"""
|
|
2590
|
+
# First get the Linear internal ID
|
|
2591
|
+
id_query = """
|
|
2592
|
+
query GetIssueId($identifier: String!) {
|
|
2593
|
+
issue(id: $identifier) {
|
|
2594
|
+
id
|
|
2595
|
+
}
|
|
2596
|
+
}
|
|
2597
|
+
"""
|
|
2598
|
+
|
|
2599
|
+
try:
|
|
2600
|
+
result = await self.client.execute_query(
|
|
2601
|
+
id_query, {"identifier": comment.ticket_id}
|
|
2602
|
+
)
|
|
2603
|
+
|
|
2604
|
+
if not result.get("issue"):
|
|
2605
|
+
raise ValueError(f"Issue {comment.ticket_id} not found")
|
|
2606
|
+
|
|
2607
|
+
linear_id = result["issue"]["id"]
|
|
2608
|
+
|
|
2609
|
+
# Create comment mutation
|
|
2610
|
+
create_comment_query = """
|
|
2611
|
+
mutation CreateComment($input: CommentCreateInput!) {
|
|
2612
|
+
commentCreate(input: $input) {
|
|
2613
|
+
success
|
|
2614
|
+
comment {
|
|
2615
|
+
id
|
|
2616
|
+
body
|
|
2617
|
+
createdAt
|
|
2618
|
+
updatedAt
|
|
2619
|
+
user {
|
|
2620
|
+
id
|
|
2621
|
+
name
|
|
2622
|
+
email
|
|
2623
|
+
displayName
|
|
2624
|
+
}
|
|
2625
|
+
}
|
|
2626
|
+
}
|
|
2627
|
+
}
|
|
2628
|
+
"""
|
|
2629
|
+
|
|
2630
|
+
comment_input = {
|
|
2631
|
+
"issueId": linear_id,
|
|
2632
|
+
"body": comment.content,
|
|
2633
|
+
}
|
|
2634
|
+
|
|
2635
|
+
result = await self.client.execute_mutation(
|
|
2636
|
+
create_comment_query, {"input": comment_input}
|
|
2637
|
+
)
|
|
2638
|
+
|
|
2639
|
+
if not result["commentCreate"]["success"]:
|
|
2640
|
+
raise ValueError("Failed to create comment")
|
|
2641
|
+
|
|
2642
|
+
created_comment = result["commentCreate"]["comment"]
|
|
2643
|
+
return map_linear_comment_to_comment(created_comment, comment.ticket_id)
|
|
2644
|
+
|
|
2645
|
+
except Exception as e:
|
|
2646
|
+
raise ValueError(f"Failed to add comment: {e}") from e
|
|
2647
|
+
|
|
2648
|
+
async def get_comments(
|
|
2649
|
+
self, ticket_id: str, limit: int = 10, offset: int = 0
|
|
2650
|
+
) -> builtins.list[Comment]:
|
|
2651
|
+
"""Get comments for a Linear issue.
|
|
2652
|
+
|
|
2653
|
+
Args:
|
|
2654
|
+
----
|
|
2655
|
+
ticket_id: Linear issue identifier
|
|
2656
|
+
limit: Maximum number of comments to return
|
|
2657
|
+
offset: Number of comments to skip
|
|
2658
|
+
|
|
2659
|
+
Returns:
|
|
2660
|
+
-------
|
|
2661
|
+
List of comments for the issue
|
|
2662
|
+
|
|
2663
|
+
"""
|
|
2664
|
+
query = """
|
|
2665
|
+
query GetIssueComments($identifier: String!, $first: Int!) {
|
|
2666
|
+
issue(id: $identifier) {
|
|
2667
|
+
comments(first: $first) {
|
|
2668
|
+
nodes {
|
|
2669
|
+
id
|
|
2670
|
+
body
|
|
2671
|
+
createdAt
|
|
2672
|
+
updatedAt
|
|
2673
|
+
user {
|
|
2674
|
+
id
|
|
2675
|
+
name
|
|
2676
|
+
email
|
|
2677
|
+
displayName
|
|
2678
|
+
avatarUrl
|
|
2679
|
+
}
|
|
2680
|
+
parent {
|
|
2681
|
+
id
|
|
2682
|
+
}
|
|
2683
|
+
}
|
|
2684
|
+
}
|
|
2685
|
+
}
|
|
2686
|
+
}
|
|
2687
|
+
"""
|
|
2688
|
+
|
|
2689
|
+
try:
|
|
2690
|
+
result = await self.client.execute_query(
|
|
2691
|
+
query, {"identifier": ticket_id, "first": limit}
|
|
2692
|
+
)
|
|
2693
|
+
|
|
2694
|
+
if not result.get("issue"):
|
|
2695
|
+
return []
|
|
2696
|
+
|
|
2697
|
+
comments = []
|
|
2698
|
+
for comment_data in result["issue"]["comments"]["nodes"]:
|
|
2699
|
+
comments.append(map_linear_comment_to_comment(comment_data, ticket_id))
|
|
2700
|
+
|
|
2701
|
+
return comments
|
|
2702
|
+
|
|
2703
|
+
except Exception:
|
|
2704
|
+
return []
|
|
2705
|
+
|
|
2706
|
+
async def list_labels(self) -> builtins.list[dict[str, Any]]:
|
|
2707
|
+
"""List all labels available in the Linear team.
|
|
2708
|
+
|
|
2709
|
+
Returns:
|
|
2710
|
+
-------
|
|
2711
|
+
List of label dictionaries with 'id', 'name', and 'color' fields
|
|
2712
|
+
|
|
2713
|
+
"""
|
|
2714
|
+
# Get team ID for label operations
|
|
2715
|
+
team_id = await self._ensure_team_id()
|
|
2716
|
+
# Validate team_id before loading labels
|
|
2717
|
+
if not team_id:
|
|
2718
|
+
raise ValueError(
|
|
2719
|
+
"Cannot list Linear labels without team_id. "
|
|
2720
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
2721
|
+
)
|
|
2722
|
+
|
|
2723
|
+
# Check cache for labels
|
|
2724
|
+
cache_key = f"linear_labels:{team_id}"
|
|
2725
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
2726
|
+
|
|
2727
|
+
# Load labels if not cached
|
|
2728
|
+
if cached_labels is None:
|
|
2729
|
+
await self._load_team_labels(team_id)
|
|
2730
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
2731
|
+
|
|
2732
|
+
# Return cached labels or empty list if not available
|
|
2733
|
+
if not cached_labels:
|
|
2734
|
+
return []
|
|
2735
|
+
|
|
2736
|
+
# Transform to standardized format
|
|
2737
|
+
return [
|
|
2738
|
+
{
|
|
2739
|
+
"id": label["id"],
|
|
2740
|
+
"name": label["name"],
|
|
2741
|
+
"color": label.get("color", ""),
|
|
2742
|
+
}
|
|
2743
|
+
for label in cached_labels
|
|
2744
|
+
]
|
|
2745
|
+
|
|
2746
|
+
async def invalidate_label_cache(self) -> None:
|
|
2747
|
+
"""Manually invalidate the label cache.
|
|
2748
|
+
|
|
2749
|
+
Useful when labels are modified externally or after creating new labels.
|
|
2750
|
+
The cache will be automatically refreshed on the next label operation.
|
|
2751
|
+
|
|
2752
|
+
"""
|
|
2753
|
+
if self._labels_cache is not None:
|
|
2754
|
+
await self._labels_cache.clear()
|
|
2755
|
+
|
|
2756
|
+
async def upload_file(self, file_path: str, mime_type: str | None = None) -> str:
|
|
2757
|
+
"""Upload a file to Linear's storage and return the asset URL.
|
|
2758
|
+
|
|
2759
|
+
This method implements Linear's three-step file upload process:
|
|
2760
|
+
1. Request a pre-signed upload URL via fileUpload mutation
|
|
2761
|
+
2. Upload the file to S3 using the pre-signed URL
|
|
2762
|
+
3. Return the asset URL for use in attachments
|
|
2763
|
+
|
|
2764
|
+
Args:
|
|
2765
|
+
----
|
|
2766
|
+
file_path: Path to the file to upload
|
|
2767
|
+
mime_type: MIME type of the file. If None, will be auto-detected.
|
|
2768
|
+
|
|
2769
|
+
Returns:
|
|
2770
|
+
-------
|
|
2771
|
+
Asset URL that can be used with attachmentCreate mutation
|
|
2772
|
+
|
|
2773
|
+
Raises:
|
|
2774
|
+
------
|
|
2775
|
+
ValueError: If file doesn't exist, upload fails, or httpx not available
|
|
2776
|
+
FileNotFoundError: If the specified file doesn't exist
|
|
2777
|
+
|
|
2778
|
+
"""
|
|
2779
|
+
if httpx is None:
|
|
2780
|
+
raise ValueError(
|
|
2781
|
+
"httpx library not installed. Install with: pip install httpx"
|
|
2782
|
+
)
|
|
2783
|
+
|
|
2784
|
+
# Validate file exists
|
|
2785
|
+
file_path_obj = Path(file_path)
|
|
2786
|
+
if not file_path_obj.exists():
|
|
2787
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
2788
|
+
if not file_path_obj.is_file():
|
|
2789
|
+
raise ValueError(f"Path is not a file: {file_path}")
|
|
2790
|
+
|
|
2791
|
+
# Get file info
|
|
2792
|
+
file_size = file_path_obj.stat().st_size
|
|
2793
|
+
filename = file_path_obj.name
|
|
2794
|
+
|
|
2795
|
+
# Auto-detect MIME type if not provided
|
|
2796
|
+
if mime_type is None:
|
|
2797
|
+
mime_type, _ = mimetypes.guess_type(file_path)
|
|
2798
|
+
if mime_type is None:
|
|
2799
|
+
# Default to binary if can't detect
|
|
2800
|
+
mime_type = "application/octet-stream"
|
|
2801
|
+
|
|
2802
|
+
# Step 1: Request pre-signed upload URL
|
|
2803
|
+
upload_mutation = """
|
|
2804
|
+
mutation FileUpload($contentType: String!, $filename: String!, $size: Int!) {
|
|
2805
|
+
fileUpload(contentType: $contentType, filename: $filename, size: $size) {
|
|
2806
|
+
success
|
|
2807
|
+
uploadFile {
|
|
2808
|
+
uploadUrl
|
|
2809
|
+
assetUrl
|
|
2810
|
+
headers {
|
|
2811
|
+
key
|
|
2812
|
+
value
|
|
2813
|
+
}
|
|
2814
|
+
}
|
|
2815
|
+
}
|
|
2816
|
+
}
|
|
2817
|
+
"""
|
|
2818
|
+
|
|
2819
|
+
try:
|
|
2820
|
+
result = await self.client.execute_mutation(
|
|
2821
|
+
upload_mutation,
|
|
2822
|
+
{
|
|
2823
|
+
"contentType": mime_type,
|
|
2824
|
+
"filename": filename,
|
|
2825
|
+
"size": file_size,
|
|
2826
|
+
},
|
|
2827
|
+
)
|
|
2828
|
+
|
|
2829
|
+
if not result["fileUpload"]["success"]:
|
|
2830
|
+
raise ValueError("Failed to get upload URL from Linear API")
|
|
2831
|
+
|
|
2832
|
+
upload_file_data = result["fileUpload"]["uploadFile"]
|
|
2833
|
+
upload_url = upload_file_data["uploadUrl"]
|
|
2834
|
+
asset_url = upload_file_data["assetUrl"]
|
|
2835
|
+
headers_list = upload_file_data.get("headers", [])
|
|
2836
|
+
|
|
2837
|
+
# Convert headers list to dict
|
|
2838
|
+
upload_headers = {h["key"]: h["value"] for h in headers_list}
|
|
2839
|
+
# Add Content-Type header
|
|
2840
|
+
upload_headers["Content-Type"] = mime_type
|
|
2841
|
+
|
|
2842
|
+
# Step 2: Upload file to S3 using pre-signed URL
|
|
2843
|
+
async with httpx.AsyncClient() as http_client:
|
|
2844
|
+
with open(file_path, "rb") as f:
|
|
2845
|
+
file_content = f.read()
|
|
2846
|
+
|
|
2847
|
+
response = await http_client.put(
|
|
2848
|
+
upload_url,
|
|
2849
|
+
content=file_content,
|
|
2850
|
+
headers=upload_headers,
|
|
2851
|
+
timeout=60.0, # 60 second timeout for large files
|
|
2852
|
+
)
|
|
2853
|
+
|
|
2854
|
+
if response.status_code not in (200, 201, 204):
|
|
2855
|
+
raise ValueError(
|
|
2856
|
+
f"Failed to upload file to S3. Status: {response.status_code}, "
|
|
2857
|
+
f"Response: {response.text}"
|
|
2858
|
+
)
|
|
2859
|
+
|
|
2860
|
+
# Step 3: Return asset URL
|
|
2861
|
+
logging.getLogger(__name__).info(
|
|
2862
|
+
f"Successfully uploaded file '{filename}' ({file_size} bytes) to Linear"
|
|
2863
|
+
)
|
|
2864
|
+
return asset_url
|
|
2865
|
+
|
|
2866
|
+
except Exception as e:
|
|
2867
|
+
raise ValueError(f"Failed to upload file '{filename}': {e}") from e
|
|
2868
|
+
|
|
2869
|
+
async def attach_file_to_issue(
|
|
2870
|
+
self,
|
|
2871
|
+
issue_id: str,
|
|
2872
|
+
file_url: str,
|
|
2873
|
+
title: str,
|
|
2874
|
+
subtitle: str | None = None,
|
|
2875
|
+
comment_body: str | None = None,
|
|
2876
|
+
) -> dict[str, Any]:
|
|
2877
|
+
"""Attach a file to a Linear issue.
|
|
2878
|
+
|
|
2879
|
+
The file must already be uploaded using upload_file() or be a publicly
|
|
2880
|
+
accessible URL.
|
|
2881
|
+
|
|
2882
|
+
Args:
|
|
2883
|
+
----
|
|
2884
|
+
issue_id: Linear issue identifier (e.g., "ENG-842") or UUID
|
|
2885
|
+
file_url: URL of the file (from upload_file() or external URL)
|
|
2886
|
+
title: Title for the attachment
|
|
2887
|
+
subtitle: Optional subtitle for the attachment
|
|
2888
|
+
comment_body: Optional comment text to include with the attachment
|
|
2889
|
+
|
|
2890
|
+
Returns:
|
|
2891
|
+
-------
|
|
2892
|
+
Dictionary with attachment details including id, title, url, etc.
|
|
2893
|
+
|
|
2894
|
+
Raises:
|
|
2895
|
+
------
|
|
2896
|
+
ValueError: If attachment creation fails or issue not found
|
|
2897
|
+
|
|
2898
|
+
"""
|
|
2899
|
+
# Resolve issue identifier to UUID
|
|
2900
|
+
issue_uuid = await self._resolve_issue_id(issue_id)
|
|
2901
|
+
if not issue_uuid:
|
|
2902
|
+
raise ValueError(f"Issue '{issue_id}' not found")
|
|
2903
|
+
|
|
2904
|
+
# Build attachment input
|
|
2905
|
+
attachment_input: dict[str, Any] = {
|
|
2906
|
+
"issueId": issue_uuid,
|
|
2907
|
+
"title": title,
|
|
2908
|
+
"url": file_url,
|
|
2909
|
+
}
|
|
2910
|
+
|
|
2911
|
+
if subtitle:
|
|
2912
|
+
attachment_input["subtitle"] = subtitle
|
|
2913
|
+
|
|
2914
|
+
if comment_body:
|
|
2915
|
+
attachment_input["commentBody"] = comment_body
|
|
2916
|
+
|
|
2917
|
+
# Create attachment mutation
|
|
2918
|
+
attachment_mutation = """
|
|
2919
|
+
mutation AttachmentCreate($input: AttachmentCreateInput!) {
|
|
2920
|
+
attachmentCreate(input: $input) {
|
|
2921
|
+
success
|
|
2922
|
+
attachment {
|
|
2923
|
+
id
|
|
2924
|
+
title
|
|
2925
|
+
url
|
|
2926
|
+
subtitle
|
|
2927
|
+
metadata
|
|
2928
|
+
createdAt
|
|
2929
|
+
updatedAt
|
|
2930
|
+
}
|
|
2931
|
+
}
|
|
2932
|
+
}
|
|
2933
|
+
"""
|
|
2934
|
+
|
|
2935
|
+
try:
|
|
2936
|
+
result = await self.client.execute_mutation(
|
|
2937
|
+
attachment_mutation, {"input": attachment_input}
|
|
2938
|
+
)
|
|
2939
|
+
|
|
2940
|
+
if not result["attachmentCreate"]["success"]:
|
|
2941
|
+
raise ValueError(f"Failed to attach file to issue '{issue_id}'")
|
|
2942
|
+
|
|
2943
|
+
attachment = result["attachmentCreate"]["attachment"]
|
|
2944
|
+
logging.getLogger(__name__).info(
|
|
2945
|
+
f"Successfully attached file '{title}' to issue '{issue_id}'"
|
|
2946
|
+
)
|
|
2947
|
+
return attachment
|
|
2948
|
+
|
|
2949
|
+
except Exception as e:
|
|
2950
|
+
raise ValueError(f"Failed to attach file to issue '{issue_id}': {e}") from e
|
|
2951
|
+
|
|
2952
|
+
async def attach_file_to_epic(
|
|
2953
|
+
self,
|
|
2954
|
+
epic_id: str,
|
|
2955
|
+
file_url: str,
|
|
2956
|
+
title: str,
|
|
2957
|
+
subtitle: str | None = None,
|
|
2958
|
+
) -> dict[str, Any]:
|
|
2959
|
+
"""Attach a file to a Linear project (Epic).
|
|
2960
|
+
|
|
2961
|
+
The file must already be uploaded using upload_file() or be a publicly
|
|
2962
|
+
accessible URL.
|
|
2963
|
+
|
|
2964
|
+
Args:
|
|
2965
|
+
----
|
|
2966
|
+
epic_id: Linear project UUID or slug-shortid
|
|
2967
|
+
file_url: URL of the file (from upload_file() or external URL)
|
|
2968
|
+
title: Title for the attachment
|
|
2969
|
+
subtitle: Optional subtitle for the attachment
|
|
2970
|
+
|
|
2971
|
+
Returns:
|
|
2972
|
+
-------
|
|
2973
|
+
Dictionary with attachment details including id, title, url, etc.
|
|
2974
|
+
|
|
2975
|
+
Raises:
|
|
2976
|
+
------
|
|
2977
|
+
ValueError: If attachment creation fails or project not found
|
|
2978
|
+
|
|
2979
|
+
"""
|
|
2980
|
+
# Resolve project identifier to UUID
|
|
2981
|
+
project_uuid = await self._resolve_project_id(epic_id)
|
|
2982
|
+
if not project_uuid:
|
|
2983
|
+
raise ValueError(f"Project '{epic_id}' not found")
|
|
2984
|
+
|
|
2985
|
+
# Build attachment input (use projectId instead of issueId)
|
|
2986
|
+
attachment_input: dict[str, Any] = {
|
|
2987
|
+
"projectId": project_uuid,
|
|
2988
|
+
"title": title,
|
|
2989
|
+
"url": file_url,
|
|
2990
|
+
}
|
|
2991
|
+
|
|
2992
|
+
if subtitle:
|
|
2993
|
+
attachment_input["subtitle"] = subtitle
|
|
2994
|
+
|
|
2995
|
+
# Create attachment mutation (same as for issues)
|
|
2996
|
+
attachment_mutation = """
|
|
2997
|
+
mutation AttachmentCreate($input: AttachmentCreateInput!) {
|
|
2998
|
+
attachmentCreate(input: $input) {
|
|
2999
|
+
success
|
|
3000
|
+
attachment {
|
|
3001
|
+
id
|
|
3002
|
+
title
|
|
3003
|
+
url
|
|
3004
|
+
subtitle
|
|
3005
|
+
metadata
|
|
3006
|
+
createdAt
|
|
3007
|
+
updatedAt
|
|
3008
|
+
}
|
|
3009
|
+
}
|
|
3010
|
+
}
|
|
3011
|
+
"""
|
|
3012
|
+
|
|
3013
|
+
try:
|
|
3014
|
+
result = await self.client.execute_mutation(
|
|
3015
|
+
attachment_mutation, {"input": attachment_input}
|
|
3016
|
+
)
|
|
3017
|
+
|
|
3018
|
+
if not result["attachmentCreate"]["success"]:
|
|
3019
|
+
raise ValueError(f"Failed to attach file to project '{epic_id}'")
|
|
3020
|
+
|
|
3021
|
+
attachment = result["attachmentCreate"]["attachment"]
|
|
3022
|
+
logging.getLogger(__name__).info(
|
|
3023
|
+
f"Successfully attached file '{title}' to project '{epic_id}'"
|
|
3024
|
+
)
|
|
3025
|
+
return attachment
|
|
3026
|
+
|
|
3027
|
+
except Exception as e:
|
|
3028
|
+
raise ValueError(
|
|
3029
|
+
f"Failed to attach file to project '{epic_id}': {e}"
|
|
3030
|
+
) from e
|
|
3031
|
+
|
|
3032
|
+
async def get_attachments(self, ticket_id: str) -> builtins.list[Attachment]:
|
|
3033
|
+
"""Get all attachments for a Linear issue or project.
|
|
3034
|
+
|
|
3035
|
+
This method retrieves attachment metadata from Linear's GraphQL API.
|
|
3036
|
+
Note that Linear attachment URLs require authentication to access.
|
|
3037
|
+
|
|
3038
|
+
Args:
|
|
3039
|
+
----
|
|
3040
|
+
ticket_id: Linear issue identifier (e.g., "ENG-842") or project UUID
|
|
3041
|
+
|
|
3042
|
+
Returns:
|
|
3043
|
+
-------
|
|
3044
|
+
List of Attachment objects with metadata
|
|
3045
|
+
|
|
3046
|
+
Raises:
|
|
3047
|
+
------
|
|
3048
|
+
ValueError: If credentials are invalid
|
|
3049
|
+
|
|
3050
|
+
Authentication Note:
|
|
3051
|
+
-------------------
|
|
3052
|
+
Linear attachment URLs require authentication headers:
|
|
3053
|
+
Authorization: Bearer {api_key}
|
|
3054
|
+
|
|
3055
|
+
URLs are in format: https://files.linear.app/workspace/attachment-id/filename
|
|
3056
|
+
Direct access without authentication will return 401 Unauthorized.
|
|
3057
|
+
|
|
3058
|
+
"""
|
|
3059
|
+
logger = logging.getLogger(__name__)
|
|
3060
|
+
|
|
3061
|
+
# Validate credentials
|
|
3062
|
+
is_valid, error_message = self.validate_credentials()
|
|
3063
|
+
if not is_valid:
|
|
3064
|
+
raise ValueError(error_message)
|
|
3065
|
+
|
|
3066
|
+
# Try as issue first (most common case)
|
|
3067
|
+
issue_uuid = await self._resolve_issue_id(ticket_id)
|
|
3068
|
+
|
|
3069
|
+
if issue_uuid:
|
|
3070
|
+
# Query issue attachments
|
|
3071
|
+
query = """
|
|
3072
|
+
query GetIssueAttachments($issueId: String!) {
|
|
3073
|
+
issue(id: $issueId) {
|
|
3074
|
+
id
|
|
3075
|
+
identifier
|
|
3076
|
+
attachments {
|
|
3077
|
+
nodes {
|
|
3078
|
+
id
|
|
3079
|
+
title
|
|
3080
|
+
url
|
|
3081
|
+
subtitle
|
|
3082
|
+
metadata
|
|
3083
|
+
createdAt
|
|
3084
|
+
updatedAt
|
|
3085
|
+
}
|
|
3086
|
+
}
|
|
3087
|
+
}
|
|
3088
|
+
}
|
|
3089
|
+
"""
|
|
3090
|
+
|
|
3091
|
+
try:
|
|
3092
|
+
result = await self.client.execute_query(query, {"issueId": issue_uuid})
|
|
3093
|
+
|
|
3094
|
+
if not result.get("issue"):
|
|
3095
|
+
logger.warning(f"Issue {ticket_id} not found")
|
|
3096
|
+
return []
|
|
3097
|
+
|
|
3098
|
+
attachments_data = (
|
|
3099
|
+
result["issue"].get("attachments", {}).get("nodes", [])
|
|
3100
|
+
)
|
|
3101
|
+
|
|
3102
|
+
# Map to Attachment objects using identifier (not UUID)
|
|
3103
|
+
return [
|
|
3104
|
+
map_linear_attachment_to_attachment(att, ticket_id)
|
|
3105
|
+
for att in attachments_data
|
|
3106
|
+
]
|
|
3107
|
+
|
|
3108
|
+
except Exception as e:
|
|
3109
|
+
logger.error(f"Failed to get attachments for issue {ticket_id}: {e}")
|
|
3110
|
+
return []
|
|
3111
|
+
|
|
3112
|
+
# Try as project if not an issue
|
|
3113
|
+
project_uuid = await self._resolve_project_id(ticket_id)
|
|
3114
|
+
|
|
3115
|
+
if project_uuid:
|
|
3116
|
+
# Query project attachments (documents)
|
|
3117
|
+
query = """
|
|
3118
|
+
query GetProjectAttachments($projectId: String!) {
|
|
3119
|
+
project(id: $projectId) {
|
|
3120
|
+
id
|
|
3121
|
+
name
|
|
3122
|
+
documents {
|
|
3123
|
+
nodes {
|
|
3124
|
+
id
|
|
3125
|
+
title
|
|
3126
|
+
url
|
|
3127
|
+
createdAt
|
|
3128
|
+
updatedAt
|
|
3129
|
+
}
|
|
3130
|
+
}
|
|
3131
|
+
}
|
|
3132
|
+
}
|
|
3133
|
+
"""
|
|
3134
|
+
|
|
3135
|
+
try:
|
|
3136
|
+
result = await self.client.execute_query(
|
|
3137
|
+
query, {"projectId": project_uuid}
|
|
3138
|
+
)
|
|
3139
|
+
|
|
3140
|
+
if not result.get("project"):
|
|
3141
|
+
logger.warning(f"Project {ticket_id} not found")
|
|
3142
|
+
return []
|
|
3143
|
+
|
|
3144
|
+
documents_data = result["project"].get("documents", {}).get("nodes", [])
|
|
3145
|
+
|
|
3146
|
+
# Map documents to Attachment objects
|
|
3147
|
+
return [
|
|
3148
|
+
map_linear_attachment_to_attachment(doc, ticket_id)
|
|
3149
|
+
for doc in documents_data
|
|
3150
|
+
]
|
|
3151
|
+
|
|
3152
|
+
except Exception as e:
|
|
3153
|
+
logger.error(f"Failed to get attachments for project {ticket_id}: {e}")
|
|
3154
|
+
return []
|
|
3155
|
+
|
|
3156
|
+
# Not found as either issue or project
|
|
3157
|
+
logger.warning(f"Ticket {ticket_id} not found as issue or project")
|
|
3158
|
+
return []
|
|
3159
|
+
|
|
3160
|
+
async def list_cycles(
|
|
3161
|
+
self, team_id: str | None = None, limit: int = 50
|
|
3162
|
+
) -> builtins.list[dict[str, Any]]:
|
|
3163
|
+
"""List Linear Cycles (Sprints) for the team.
|
|
3164
|
+
|
|
3165
|
+
Args:
|
|
3166
|
+
----
|
|
3167
|
+
team_id: Linear team UUID. If None, uses the configured team.
|
|
3168
|
+
limit: Maximum number of cycles to return (default: 50)
|
|
3169
|
+
|
|
3170
|
+
Returns:
|
|
3171
|
+
-------
|
|
3172
|
+
List of cycle dictionaries with fields:
|
|
3173
|
+
- id: Cycle UUID
|
|
3174
|
+
- name: Cycle name
|
|
3175
|
+
- number: Cycle number
|
|
3176
|
+
- startsAt: Start date (ISO format)
|
|
3177
|
+
- endsAt: End date (ISO format)
|
|
3178
|
+
- completedAt: Completion date (ISO format, None if not completed)
|
|
3179
|
+
- progress: Progress percentage (0-1)
|
|
3180
|
+
|
|
3181
|
+
Raises:
|
|
3182
|
+
------
|
|
3183
|
+
ValueError: If credentials are invalid or query fails
|
|
3184
|
+
|
|
3185
|
+
"""
|
|
3186
|
+
# Validate credentials
|
|
3187
|
+
is_valid, error_message = self.validate_credentials()
|
|
3188
|
+
if not is_valid:
|
|
3189
|
+
raise ValueError(error_message)
|
|
3190
|
+
|
|
3191
|
+
await self.initialize()
|
|
3192
|
+
|
|
3193
|
+
# Use configured team if not specified
|
|
3194
|
+
if team_id is None:
|
|
3195
|
+
team_id = await self._ensure_team_id()
|
|
3196
|
+
|
|
3197
|
+
# Validate team_id before listing cycles
|
|
3198
|
+
if not team_id:
|
|
3199
|
+
raise ValueError(
|
|
3200
|
+
"Cannot list Linear cycles without team_id. "
|
|
3201
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
3202
|
+
)
|
|
3203
|
+
|
|
3204
|
+
try:
|
|
3205
|
+
# Fetch all cycles with pagination
|
|
3206
|
+
all_cycles: list[dict[str, Any]] = []
|
|
3207
|
+
has_next_page = True
|
|
3208
|
+
after_cursor = None
|
|
3209
|
+
|
|
3210
|
+
while has_next_page and len(all_cycles) < limit:
|
|
3211
|
+
# Calculate remaining items needed
|
|
3212
|
+
remaining = limit - len(all_cycles)
|
|
3213
|
+
page_size = min(remaining, 50) # Linear max page size is typically 50
|
|
3214
|
+
|
|
3215
|
+
variables = {"teamId": team_id, "first": page_size}
|
|
3216
|
+
if after_cursor:
|
|
3217
|
+
variables["after"] = after_cursor
|
|
3218
|
+
|
|
3219
|
+
result = await self.client.execute_query(LIST_CYCLES_QUERY, variables)
|
|
3220
|
+
|
|
3221
|
+
cycles_data = result.get("team", {}).get("cycles", {})
|
|
3222
|
+
page_cycles = cycles_data.get("nodes", [])
|
|
3223
|
+
page_info = cycles_data.get("pageInfo", {})
|
|
3224
|
+
|
|
3225
|
+
all_cycles.extend(page_cycles)
|
|
3226
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
3227
|
+
after_cursor = page_info.get("endCursor")
|
|
3228
|
+
|
|
3229
|
+
return all_cycles[:limit] # Ensure we don't exceed limit
|
|
3230
|
+
|
|
3231
|
+
except Exception as e:
|
|
3232
|
+
raise ValueError(f"Failed to list Linear cycles: {e}") from e
|
|
402
3233
|
|
|
403
|
-
async def
|
|
404
|
-
"""
|
|
3234
|
+
async def get_issue_status(self, issue_id: str) -> dict[str, Any] | None:
|
|
3235
|
+
"""Get rich issue status information for a Linear issue.
|
|
405
3236
|
|
|
406
3237
|
Args:
|
|
407
|
-
|
|
3238
|
+
----
|
|
3239
|
+
issue_id: Linear issue identifier (e.g., 'BTA-123') or UUID
|
|
408
3240
|
|
|
409
3241
|
Returns:
|
|
410
|
-
|
|
3242
|
+
-------
|
|
3243
|
+
Dictionary with workflow state details:
|
|
3244
|
+
- id: State UUID
|
|
3245
|
+
- name: State name (e.g., "In Progress")
|
|
3246
|
+
- type: State type (e.g., "started", "completed")
|
|
3247
|
+
- color: State color (hex format)
|
|
3248
|
+
- description: State description
|
|
3249
|
+
- position: Position in workflow
|
|
3250
|
+
Returns None if issue not found.
|
|
3251
|
+
|
|
3252
|
+
Raises:
|
|
3253
|
+
------
|
|
3254
|
+
ValueError: If credentials are invalid or query fails
|
|
3255
|
+
|
|
411
3256
|
"""
|
|
412
|
-
# Validate credentials
|
|
3257
|
+
# Validate credentials
|
|
413
3258
|
is_valid, error_message = self.validate_credentials()
|
|
414
3259
|
if not is_valid:
|
|
415
3260
|
raise ValueError(error_message)
|
|
416
3261
|
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
3262
|
+
await self.initialize()
|
|
3263
|
+
|
|
3264
|
+
# Resolve issue identifier to UUID if needed
|
|
3265
|
+
issue_uuid = await self._resolve_issue_id(issue_id)
|
|
3266
|
+
if not issue_uuid:
|
|
3267
|
+
return None
|
|
3268
|
+
|
|
3269
|
+
try:
|
|
3270
|
+
result = await self.client.execute_query(
|
|
3271
|
+
GET_ISSUE_STATUS_QUERY, {"issueId": issue_uuid}
|
|
3272
|
+
)
|
|
3273
|
+
|
|
3274
|
+
issue_data = result.get("issue")
|
|
3275
|
+
if not issue_data:
|
|
3276
|
+
return None
|
|
3277
|
+
|
|
3278
|
+
return issue_data.get("state")
|
|
3279
|
+
|
|
3280
|
+
except Exception as e:
|
|
3281
|
+
raise ValueError(f"Failed to get issue status for '{issue_id}': {e}") from e
|
|
3282
|
+
|
|
3283
|
+
async def list_issue_statuses(
|
|
3284
|
+
self, team_id: str | None = None
|
|
3285
|
+
) -> builtins.list[dict[str, Any]]:
|
|
3286
|
+
"""List all workflow states for the team.
|
|
3287
|
+
|
|
3288
|
+
Args:
|
|
3289
|
+
----
|
|
3290
|
+
team_id: Linear team UUID. If None, uses the configured team.
|
|
3291
|
+
|
|
3292
|
+
Returns:
|
|
3293
|
+
-------
|
|
3294
|
+
List of workflow state dictionaries with fields:
|
|
3295
|
+
- id: State UUID
|
|
3296
|
+
- name: State name (e.g., "Backlog", "In Progress", "Done")
|
|
3297
|
+
- type: State type (e.g., "backlog", "unstarted", "started", "completed", "canceled")
|
|
3298
|
+
- color: State color (hex format)
|
|
3299
|
+
- description: State description
|
|
3300
|
+
- position: Position in workflow (lower = earlier)
|
|
3301
|
+
|
|
3302
|
+
Raises:
|
|
3303
|
+
------
|
|
3304
|
+
ValueError: If credentials are invalid or query fails
|
|
3305
|
+
|
|
423
3306
|
"""
|
|
3307
|
+
# Validate credentials
|
|
3308
|
+
is_valid, error_message = self.validate_credentials()
|
|
3309
|
+
if not is_valid:
|
|
3310
|
+
raise ValueError(error_message)
|
|
3311
|
+
|
|
3312
|
+
await self.initialize()
|
|
3313
|
+
|
|
3314
|
+
# Use configured team if not specified
|
|
3315
|
+
if team_id is None:
|
|
3316
|
+
team_id = await self._ensure_team_id()
|
|
3317
|
+
|
|
3318
|
+
# Validate team_id before listing statuses
|
|
3319
|
+
if not team_id:
|
|
3320
|
+
raise ValueError(
|
|
3321
|
+
"Cannot list Linear issue statuses without team_id. "
|
|
3322
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
3323
|
+
)
|
|
424
3324
|
|
|
425
3325
|
try:
|
|
426
3326
|
result = await self.client.execute_query(
|
|
427
|
-
|
|
428
|
-
{"identifier": ticket_id}
|
|
3327
|
+
LIST_ISSUE_STATUSES_QUERY, {"teamId": team_id}
|
|
429
3328
|
)
|
|
430
3329
|
|
|
431
|
-
|
|
432
|
-
|
|
3330
|
+
states_data = result.get("team", {}).get("states", {})
|
|
3331
|
+
states = states_data.get("nodes", [])
|
|
433
3332
|
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
pass
|
|
3333
|
+
# Sort by position to maintain workflow order
|
|
3334
|
+
states.sort(key=lambda s: s.get("position", 0))
|
|
437
3335
|
|
|
438
|
-
|
|
3336
|
+
return states
|
|
439
3337
|
|
|
440
|
-
|
|
441
|
-
|
|
3338
|
+
except Exception as e:
|
|
3339
|
+
raise ValueError(f"Failed to list workflow states: {e}") from e
|
|
3340
|
+
|
|
3341
|
+
async def list_epics(
|
|
3342
|
+
self,
|
|
3343
|
+
limit: int = 20,
|
|
3344
|
+
offset: int = 0,
|
|
3345
|
+
state: str | None = None,
|
|
3346
|
+
include_completed: bool = True,
|
|
3347
|
+
compact: bool = False,
|
|
3348
|
+
**kwargs: Any,
|
|
3349
|
+
) -> dict[str, Any] | builtins.list[Epic]:
|
|
3350
|
+
"""List Linear projects (epics) with efficient pagination and compact output.
|
|
442
3351
|
|
|
443
3352
|
Args:
|
|
444
|
-
|
|
445
|
-
|
|
3353
|
+
----
|
|
3354
|
+
limit: Maximum number of projects to return (default: 20, max: 100)
|
|
3355
|
+
offset: Number of projects to skip (note: Linear uses cursor-based pagination)
|
|
3356
|
+
state: Filter by project state (e.g., "planned", "started", "completed", "canceled")
|
|
3357
|
+
include_completed: Whether to include completed projects (default: True)
|
|
3358
|
+
compact: Return compact format for token efficiency (default: False for backward compatibility)
|
|
3359
|
+
**kwargs: Additional filter parameters (reserved for future use)
|
|
446
3360
|
|
|
447
3361
|
Returns:
|
|
448
|
-
|
|
3362
|
+
-------
|
|
3363
|
+
When compact=True: Dictionary with items and pagination metadata
|
|
3364
|
+
When compact=False: List of Epic objects (backward compatible, default)
|
|
3365
|
+
|
|
3366
|
+
Raises:
|
|
3367
|
+
------
|
|
3368
|
+
ValueError: If credentials are invalid or query fails
|
|
3369
|
+
|
|
3370
|
+
Design Decision: Backward Compatible with Opt-in Compact Mode (1M-554)
|
|
3371
|
+
----------------------------------------------------------------------
|
|
3372
|
+
Rationale: Reduced default limit from 50 to 20 to match list() behavior.
|
|
3373
|
+
Compact mode provides ~77% token reduction when explicitly enabled.
|
|
3374
|
+
|
|
3375
|
+
Recommended: Use compact=True for new code to reduce token usage.
|
|
3376
|
+
|
|
449
3377
|
"""
|
|
450
|
-
# Validate credentials
|
|
3378
|
+
# Validate credentials
|
|
451
3379
|
is_valid, error_message = self.validate_credentials()
|
|
452
3380
|
if not is_valid:
|
|
453
3381
|
raise ValueError(error_message)
|
|
454
3382
|
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
3383
|
+
await self.initialize()
|
|
3384
|
+
team_id = await self._ensure_team_id()
|
|
3385
|
+
|
|
3386
|
+
# Validate team_id before listing projects
|
|
3387
|
+
if not team_id:
|
|
3388
|
+
raise ValueError(
|
|
3389
|
+
"Cannot list Linear projects without team_id. "
|
|
3390
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
3391
|
+
)
|
|
3392
|
+
|
|
3393
|
+
# Enforce maximum limit to prevent excessive responses
|
|
3394
|
+
if limit > 100:
|
|
3395
|
+
limit = 100
|
|
3396
|
+
|
|
3397
|
+
# Build project filter using existing helper
|
|
3398
|
+
from .types import build_project_filter
|
|
3399
|
+
|
|
3400
|
+
project_filter = build_project_filter(
|
|
3401
|
+
state=state,
|
|
3402
|
+
team_id=team_id,
|
|
3403
|
+
include_completed=include_completed,
|
|
3404
|
+
)
|
|
3405
|
+
|
|
3406
|
+
try:
|
|
3407
|
+
# Fetch projects with pagination
|
|
3408
|
+
all_projects = []
|
|
3409
|
+
has_next_page = True
|
|
3410
|
+
after_cursor = None
|
|
3411
|
+
projects_fetched = 0
|
|
3412
|
+
|
|
3413
|
+
while has_next_page and projects_fetched < limit + offset:
|
|
3414
|
+
# Calculate how many more we need
|
|
3415
|
+
remaining = (limit + offset) - projects_fetched
|
|
3416
|
+
page_size = min(remaining, 50) # Linear max page size is typically 50
|
|
3417
|
+
|
|
3418
|
+
variables = {"filter": project_filter, "first": page_size}
|
|
3419
|
+
if after_cursor:
|
|
3420
|
+
variables["after"] = after_cursor
|
|
3421
|
+
|
|
3422
|
+
result = await self.client.execute_query(LIST_PROJECTS_QUERY, variables)
|
|
3423
|
+
|
|
3424
|
+
projects_data = result.get("projects", {})
|
|
3425
|
+
page_projects = projects_data.get("nodes", [])
|
|
3426
|
+
page_info = projects_data.get("pageInfo", {})
|
|
3427
|
+
|
|
3428
|
+
all_projects.extend(page_projects)
|
|
3429
|
+
projects_fetched += len(page_projects)
|
|
3430
|
+
|
|
3431
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
3432
|
+
after_cursor = page_info.get("endCursor")
|
|
3433
|
+
|
|
3434
|
+
# Stop if no more results on this page
|
|
3435
|
+
if not page_projects:
|
|
3436
|
+
break
|
|
3437
|
+
|
|
3438
|
+
# Apply offset and limit
|
|
3439
|
+
paginated_projects = all_projects[offset : offset + limit]
|
|
3440
|
+
|
|
3441
|
+
# Map Linear projects to Epic objects using existing mapper
|
|
3442
|
+
epics = []
|
|
3443
|
+
for project in paginated_projects:
|
|
3444
|
+
epics.append(map_linear_project_to_epic(project))
|
|
3445
|
+
|
|
3446
|
+
# Return compact format with pagination metadata
|
|
3447
|
+
if compact:
|
|
3448
|
+
from .mappers import epic_to_compact_format
|
|
3449
|
+
|
|
3450
|
+
compact_items = [epic_to_compact_format(epic) for epic in epics]
|
|
3451
|
+
return {
|
|
3452
|
+
"status": "success",
|
|
3453
|
+
"items": compact_items,
|
|
3454
|
+
"pagination": {
|
|
3455
|
+
"total_returned": len(compact_items),
|
|
3456
|
+
"limit": limit,
|
|
3457
|
+
"offset": offset,
|
|
3458
|
+
"has_more": has_next_page, # Use actual Linear pagination status
|
|
3459
|
+
},
|
|
460
3460
|
}
|
|
3461
|
+
|
|
3462
|
+
# Backward compatible: return list of Epic objects
|
|
3463
|
+
return epics
|
|
3464
|
+
|
|
3465
|
+
except Exception as e:
|
|
3466
|
+
raise ValueError(f"Failed to list Linear projects: {e}") from e
|
|
3467
|
+
|
|
3468
|
+
def _linear_update_to_model(self, linear_data: dict[str, Any]) -> ProjectUpdate:
|
|
3469
|
+
"""Convert Linear GraphQL response to ProjectUpdate model (1M-238).
|
|
3470
|
+
|
|
3471
|
+
Maps Linear's ProjectUpdate entity fields to the universal ProjectUpdate model,
|
|
3472
|
+
handling health value transformations and optional fields.
|
|
3473
|
+
|
|
3474
|
+
Args:
|
|
3475
|
+
----
|
|
3476
|
+
linear_data: GraphQL response data for a ProjectUpdate entity
|
|
3477
|
+
|
|
3478
|
+
Returns:
|
|
3479
|
+
-------
|
|
3480
|
+
ProjectUpdate instance with mapped fields
|
|
3481
|
+
|
|
3482
|
+
Linear Health Mapping:
|
|
3483
|
+
---------------------
|
|
3484
|
+
Linear uses camelCase enum values: onTrack, atRisk, offTrack
|
|
3485
|
+
Universal model uses snake_case: ON_TRACK, AT_RISK, OFF_TRACK
|
|
3486
|
+
|
|
3487
|
+
"""
|
|
3488
|
+
# Map Linear health values (camelCase) to universal enum (UPPER_SNAKE_CASE)
|
|
3489
|
+
health_mapping = {
|
|
3490
|
+
"onTrack": ProjectUpdateHealth.ON_TRACK,
|
|
3491
|
+
"atRisk": ProjectUpdateHealth.AT_RISK,
|
|
3492
|
+
"offTrack": ProjectUpdateHealth.OFF_TRACK,
|
|
3493
|
+
}
|
|
3494
|
+
|
|
3495
|
+
health_value = linear_data.get("health")
|
|
3496
|
+
health = health_mapping.get(health_value) if health_value else None
|
|
3497
|
+
|
|
3498
|
+
# Extract user info
|
|
3499
|
+
user_data = linear_data.get("user", {})
|
|
3500
|
+
author_id = user_data.get("id") if user_data else None
|
|
3501
|
+
author_name = user_data.get("name") if user_data else None
|
|
3502
|
+
|
|
3503
|
+
# Extract project info
|
|
3504
|
+
project_data = linear_data.get("project", {})
|
|
3505
|
+
project_id = project_data.get("id", "")
|
|
3506
|
+
project_name = project_data.get("name")
|
|
3507
|
+
|
|
3508
|
+
# Parse timestamps
|
|
3509
|
+
created_at = datetime.fromisoformat(
|
|
3510
|
+
linear_data["createdAt"].replace("Z", "+00:00")
|
|
3511
|
+
)
|
|
3512
|
+
updated_at = None
|
|
3513
|
+
if linear_data.get("updatedAt"):
|
|
3514
|
+
updated_at = datetime.fromisoformat(
|
|
3515
|
+
linear_data["updatedAt"].replace("Z", "+00:00")
|
|
3516
|
+
)
|
|
3517
|
+
|
|
3518
|
+
return ProjectUpdate(
|
|
3519
|
+
id=linear_data["id"],
|
|
3520
|
+
project_id=project_id,
|
|
3521
|
+
project_name=project_name,
|
|
3522
|
+
body=linear_data["body"],
|
|
3523
|
+
health=health,
|
|
3524
|
+
created_at=created_at,
|
|
3525
|
+
updated_at=updated_at,
|
|
3526
|
+
author_id=author_id,
|
|
3527
|
+
author_name=author_name,
|
|
3528
|
+
url=linear_data.get("url"),
|
|
3529
|
+
diff_markdown=linear_data.get("diffMarkdown"),
|
|
3530
|
+
)
|
|
3531
|
+
|
|
3532
|
+
async def create_project_update(
|
|
3533
|
+
self,
|
|
3534
|
+
project_id: str,
|
|
3535
|
+
body: str,
|
|
3536
|
+
health: ProjectUpdateHealth | None = None,
|
|
3537
|
+
) -> ProjectUpdate:
|
|
3538
|
+
"""Create a project status update in Linear (1M-238).
|
|
3539
|
+
|
|
3540
|
+
Creates a new status update for a Linear project with optional health indicator.
|
|
3541
|
+
Linear will automatically generate a diff showing changes since the last update.
|
|
3542
|
+
|
|
3543
|
+
Args:
|
|
3544
|
+
----
|
|
3545
|
+
project_id: Linear project UUID, slugId, or short ID
|
|
3546
|
+
body: Markdown-formatted update content (required)
|
|
3547
|
+
health: Optional health status (ON_TRACK, AT_RISK, OFF_TRACK)
|
|
3548
|
+
|
|
3549
|
+
Returns:
|
|
3550
|
+
-------
|
|
3551
|
+
Created ProjectUpdate with Linear metadata including auto-generated diff
|
|
3552
|
+
|
|
3553
|
+
Raises:
|
|
3554
|
+
------
|
|
3555
|
+
ValueError: If credentials invalid, project not found, or creation fails
|
|
3556
|
+
|
|
3557
|
+
Example:
|
|
3558
|
+
-------
|
|
3559
|
+
>>> update = await adapter.create_project_update(
|
|
3560
|
+
... project_id="PROJ-123",
|
|
3561
|
+
... body="Sprint 23 completed. 15/20 stories done.",
|
|
3562
|
+
... health=ProjectUpdateHealth.AT_RISK
|
|
3563
|
+
... )
|
|
3564
|
+
|
|
3565
|
+
"""
|
|
3566
|
+
logger = logging.getLogger(__name__)
|
|
3567
|
+
|
|
3568
|
+
# Validate credentials
|
|
3569
|
+
is_valid, error_message = self.validate_credentials()
|
|
3570
|
+
if not is_valid:
|
|
3571
|
+
raise ValueError(error_message)
|
|
3572
|
+
|
|
3573
|
+
await self.initialize()
|
|
3574
|
+
|
|
3575
|
+
# Resolve project identifier to UUID if needed
|
|
3576
|
+
project_uuid = await self._resolve_project_id(project_id)
|
|
3577
|
+
if not project_uuid:
|
|
3578
|
+
raise ValueError(f"Project '{project_id}' not found")
|
|
3579
|
+
|
|
3580
|
+
# Build mutation variables
|
|
3581
|
+
variables: dict[str, Any] = {
|
|
3582
|
+
"projectId": project_uuid,
|
|
3583
|
+
"body": body,
|
|
3584
|
+
}
|
|
3585
|
+
|
|
3586
|
+
# Map health enum to Linear's camelCase format
|
|
3587
|
+
if health:
|
|
3588
|
+
health_mapping = {
|
|
3589
|
+
ProjectUpdateHealth.ON_TRACK: "onTrack",
|
|
3590
|
+
ProjectUpdateHealth.AT_RISK: "atRisk",
|
|
3591
|
+
ProjectUpdateHealth.OFF_TRACK: "offTrack",
|
|
461
3592
|
}
|
|
3593
|
+
variables["health"] = health_mapping.get(health)
|
|
3594
|
+
|
|
3595
|
+
try:
|
|
3596
|
+
result = await self.client.execute_mutation(
|
|
3597
|
+
CREATE_PROJECT_UPDATE_MUTATION, variables
|
|
3598
|
+
)
|
|
3599
|
+
|
|
3600
|
+
if not result["projectUpdateCreate"]["success"]:
|
|
3601
|
+
raise ValueError(f"Failed to create project update for '{project_id}'")
|
|
3602
|
+
|
|
3603
|
+
update_data = result["projectUpdateCreate"]["projectUpdate"]
|
|
3604
|
+
logger.info(
|
|
3605
|
+
f"Created project update for project '{project_id}' (UUID: {project_uuid})"
|
|
3606
|
+
)
|
|
3607
|
+
|
|
3608
|
+
return self._linear_update_to_model(update_data)
|
|
3609
|
+
|
|
3610
|
+
except Exception as e:
|
|
3611
|
+
raise ValueError(
|
|
3612
|
+
f"Failed to create project update for '{project_id}': {e}"
|
|
3613
|
+
) from e
|
|
3614
|
+
|
|
3615
|
+
async def list_project_updates(
|
|
3616
|
+
self,
|
|
3617
|
+
project_id: str,
|
|
3618
|
+
limit: int = 10,
|
|
3619
|
+
) -> list[ProjectUpdate]:
|
|
3620
|
+
"""List project updates for a project (1M-238).
|
|
3621
|
+
|
|
3622
|
+
Retrieves recent status updates for a Linear project, ordered by creation date.
|
|
3623
|
+
|
|
3624
|
+
Args:
|
|
3625
|
+
----
|
|
3626
|
+
project_id: Linear project UUID, slugId, or short ID
|
|
3627
|
+
limit: Maximum number of updates to return (default: 10, max: 250)
|
|
3628
|
+
|
|
3629
|
+
Returns:
|
|
3630
|
+
-------
|
|
3631
|
+
List of ProjectUpdate objects ordered by creation date (newest first)
|
|
3632
|
+
|
|
3633
|
+
Raises:
|
|
3634
|
+
------
|
|
3635
|
+
ValueError: If credentials invalid or query fails
|
|
3636
|
+
|
|
3637
|
+
Example:
|
|
3638
|
+
-------
|
|
3639
|
+
>>> updates = await adapter.list_project_updates("PROJ-123", limit=5)
|
|
3640
|
+
>>> for update in updates:
|
|
3641
|
+
... print(f"{update.created_at}: {update.health} - {update.body[:50]}")
|
|
3642
|
+
|
|
3643
|
+
"""
|
|
3644
|
+
logger = logging.getLogger(__name__)
|
|
3645
|
+
|
|
3646
|
+
# Validate credentials
|
|
3647
|
+
is_valid, error_message = self.validate_credentials()
|
|
3648
|
+
if not is_valid:
|
|
3649
|
+
raise ValueError(error_message)
|
|
3650
|
+
|
|
3651
|
+
await self.initialize()
|
|
3652
|
+
|
|
3653
|
+
# Resolve project identifier to UUID if needed
|
|
3654
|
+
project_uuid = await self._resolve_project_id(project_id)
|
|
3655
|
+
if not project_uuid:
|
|
3656
|
+
raise ValueError(f"Project '{project_id}' not found")
|
|
3657
|
+
|
|
3658
|
+
try:
|
|
3659
|
+
result = await self.client.execute_query(
|
|
3660
|
+
LIST_PROJECT_UPDATES_QUERY,
|
|
3661
|
+
{"projectId": project_uuid, "first": min(limit, 250)},
|
|
3662
|
+
)
|
|
3663
|
+
|
|
3664
|
+
project_data = result.get("project")
|
|
3665
|
+
if not project_data:
|
|
3666
|
+
raise ValueError(f"Project '{project_id}' not found")
|
|
3667
|
+
|
|
3668
|
+
updates_data = project_data.get("projectUpdates", {}).get("nodes", [])
|
|
3669
|
+
|
|
3670
|
+
# Map Linear updates to ProjectUpdate models
|
|
3671
|
+
return [self._linear_update_to_model(update) for update in updates_data]
|
|
3672
|
+
|
|
3673
|
+
except Exception as e:
|
|
3674
|
+
logger.warning(f"Failed to list project updates for {project_id}: {e}")
|
|
3675
|
+
raise ValueError(
|
|
3676
|
+
f"Failed to list project updates for '{project_id}': {e}"
|
|
3677
|
+
) from e
|
|
3678
|
+
|
|
3679
|
+
async def get_project_update(
|
|
3680
|
+
self,
|
|
3681
|
+
update_id: str,
|
|
3682
|
+
) -> ProjectUpdate:
|
|
3683
|
+
"""Get a specific project update by ID (1M-238).
|
|
3684
|
+
|
|
3685
|
+
Retrieves detailed information about a single project status update.
|
|
3686
|
+
|
|
3687
|
+
Args:
|
|
3688
|
+
----
|
|
3689
|
+
update_id: Linear ProjectUpdate UUID
|
|
3690
|
+
|
|
3691
|
+
Returns:
|
|
3692
|
+
-------
|
|
3693
|
+
ProjectUpdate object with full details
|
|
3694
|
+
|
|
3695
|
+
Raises:
|
|
3696
|
+
------
|
|
3697
|
+
ValueError: If credentials invalid, update not found, or query fails
|
|
3698
|
+
|
|
3699
|
+
Example:
|
|
3700
|
+
-------
|
|
3701
|
+
>>> update = await adapter.get_project_update("update-uuid-here")
|
|
3702
|
+
>>> print(f"Update: {update.body}")
|
|
3703
|
+
>>> print(f"Health: {update.health}")
|
|
3704
|
+
>>> print(f"Diff: {update.diff_markdown}")
|
|
3705
|
+
|
|
462
3706
|
"""
|
|
3707
|
+
logger = logging.getLogger(__name__)
|
|
3708
|
+
|
|
3709
|
+
# Validate credentials
|
|
3710
|
+
is_valid, error_message = self.validate_credentials()
|
|
3711
|
+
if not is_valid:
|
|
3712
|
+
raise ValueError(error_message)
|
|
3713
|
+
|
|
3714
|
+
await self.initialize()
|
|
463
3715
|
|
|
464
3716
|
try:
|
|
465
3717
|
result = await self.client.execute_query(
|
|
466
|
-
|
|
467
|
-
{"identifier": ticket_id}
|
|
3718
|
+
GET_PROJECT_UPDATE_QUERY, {"id": update_id}
|
|
468
3719
|
)
|
|
469
3720
|
|
|
470
|
-
|
|
471
|
-
|
|
3721
|
+
update_data = result.get("projectUpdate")
|
|
3722
|
+
if not update_data:
|
|
3723
|
+
raise ValueError(f"Project update '{update_id}' not found")
|
|
472
3724
|
|
|
473
|
-
|
|
3725
|
+
return self._linear_update_to_model(update_data)
|
|
474
3726
|
|
|
475
|
-
|
|
476
|
-
|
|
3727
|
+
except Exception as e:
|
|
3728
|
+
logger.error(f"Failed to get project update {update_id}: {e}")
|
|
3729
|
+
raise ValueError(f"Failed to get project update '{update_id}': {e}") from e
|
|
477
3730
|
|
|
478
|
-
|
|
479
|
-
if "state" in updates:
|
|
480
|
-
target_state = TicketState(updates["state"]) if isinstance(updates["state"], str) else updates["state"]
|
|
481
|
-
state_mapping = self._get_state_mapping()
|
|
482
|
-
if target_state in state_mapping:
|
|
483
|
-
update_input["stateId"] = state_mapping[target_state]
|
|
3731
|
+
# Milestone Operations (1M-607 Phase 2: Linear Adapter Integration)
|
|
484
3732
|
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
3733
|
+
async def milestone_create(
|
|
3734
|
+
self,
|
|
3735
|
+
name: str,
|
|
3736
|
+
target_date: datetime | None = None,
|
|
3737
|
+
labels: list[str] | None = None,
|
|
3738
|
+
description: str = "",
|
|
3739
|
+
project_id: str | None = None,
|
|
3740
|
+
) -> Milestone:
|
|
3741
|
+
"""Create milestone using Linear Cycles.
|
|
3742
|
+
|
|
3743
|
+
Linear Cycles require start and end dates. If target_date is provided,
|
|
3744
|
+
set startsAt to today and endsAt to target_date. If no target_date,
|
|
3745
|
+
defaults to a 2-week cycle.
|
|
3746
|
+
|
|
3747
|
+
Args:
|
|
3748
|
+
----
|
|
3749
|
+
name: Milestone name
|
|
3750
|
+
target_date: Target completion date (optional)
|
|
3751
|
+
labels: Labels for milestone grouping (optional, stored in metadata)
|
|
3752
|
+
description: Milestone description
|
|
3753
|
+
project_id: Associated project ID (optional)
|
|
3754
|
+
|
|
3755
|
+
Returns:
|
|
3756
|
+
-------
|
|
3757
|
+
Created Milestone object
|
|
3758
|
+
|
|
3759
|
+
Raises:
|
|
3760
|
+
------
|
|
3761
|
+
ValueError: If credentials invalid or creation fails
|
|
3762
|
+
|
|
3763
|
+
"""
|
|
3764
|
+
logger = logging.getLogger(__name__)
|
|
3765
|
+
|
|
3766
|
+
# Validate credentials
|
|
3767
|
+
is_valid, error_message = self.validate_credentials()
|
|
3768
|
+
if not is_valid:
|
|
3769
|
+
raise ValueError(error_message)
|
|
3770
|
+
|
|
3771
|
+
await self.initialize()
|
|
3772
|
+
team_id = await self._ensure_team_id()
|
|
3773
|
+
|
|
3774
|
+
# Linear requires both start and end dates for cycles
|
|
3775
|
+
from datetime import timedelta, timezone
|
|
3776
|
+
|
|
3777
|
+
starts_at = datetime.now(timezone.utc)
|
|
3778
|
+
if target_date:
|
|
3779
|
+
ends_at = target_date
|
|
3780
|
+
# Ensure ends_at has timezone info
|
|
3781
|
+
if ends_at.tzinfo is None:
|
|
3782
|
+
ends_at = ends_at.replace(tzinfo=timezone.utc)
|
|
3783
|
+
else:
|
|
3784
|
+
# Default to 2 weeks from now
|
|
3785
|
+
ends_at = starts_at + timedelta(days=14)
|
|
490
3786
|
|
|
491
|
-
|
|
492
|
-
result = await self.client.
|
|
493
|
-
|
|
494
|
-
{
|
|
3787
|
+
try:
|
|
3788
|
+
result = await self.client.execute_query(
|
|
3789
|
+
CREATE_CYCLE_MUTATION,
|
|
3790
|
+
{
|
|
3791
|
+
"input": {
|
|
3792
|
+
"name": name,
|
|
3793
|
+
"description": description,
|
|
3794
|
+
"startsAt": starts_at.isoformat(),
|
|
3795
|
+
"endsAt": ends_at.isoformat(),
|
|
3796
|
+
"teamId": team_id,
|
|
3797
|
+
}
|
|
3798
|
+
},
|
|
495
3799
|
)
|
|
496
3800
|
|
|
497
|
-
if not result
|
|
498
|
-
raise ValueError("Failed to
|
|
3801
|
+
if not result.get("cycleCreate", {}).get("success"):
|
|
3802
|
+
raise ValueError("Failed to create cycle")
|
|
499
3803
|
|
|
500
|
-
|
|
501
|
-
|
|
3804
|
+
cycle_data = result["cycleCreate"]["cycle"]
|
|
3805
|
+
logger.info(
|
|
3806
|
+
f"Created Linear cycle {cycle_data['id']} for milestone '{name}'"
|
|
3807
|
+
)
|
|
3808
|
+
|
|
3809
|
+
# Convert Linear Cycle to Milestone model
|
|
3810
|
+
return self._cycle_to_milestone(cycle_data, labels)
|
|
502
3811
|
|
|
503
3812
|
except Exception as e:
|
|
504
|
-
|
|
3813
|
+
logger.error(f"Failed to create milestone '{name}': {e}")
|
|
3814
|
+
raise ValueError(f"Failed to create milestone: {e}") from e
|
|
505
3815
|
|
|
506
|
-
async def
|
|
507
|
-
"""
|
|
3816
|
+
async def milestone_get(self, milestone_id: str) -> Milestone | None:
|
|
3817
|
+
"""Get milestone by ID with progress calculation.
|
|
508
3818
|
|
|
509
3819
|
Args:
|
|
510
|
-
|
|
3820
|
+
----
|
|
3821
|
+
milestone_id: Milestone/Cycle identifier
|
|
511
3822
|
|
|
512
3823
|
Returns:
|
|
513
|
-
|
|
3824
|
+
-------
|
|
3825
|
+
Milestone object with calculated progress, None if not found
|
|
3826
|
+
|
|
514
3827
|
"""
|
|
515
|
-
|
|
3828
|
+
logger = logging.getLogger(__name__)
|
|
3829
|
+
|
|
3830
|
+
# Validate credentials
|
|
3831
|
+
is_valid, error_message = self.validate_credentials()
|
|
3832
|
+
if not is_valid:
|
|
3833
|
+
raise ValueError(error_message)
|
|
3834
|
+
|
|
3835
|
+
await self.initialize()
|
|
3836
|
+
|
|
516
3837
|
try:
|
|
517
|
-
result = await self.
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
return False
|
|
3838
|
+
result = await self.client.execute_query(
|
|
3839
|
+
GET_CYCLE_QUERY, {"id": milestone_id}
|
|
3840
|
+
)
|
|
521
3841
|
|
|
522
|
-
|
|
3842
|
+
cycle_data = result.get("cycle")
|
|
3843
|
+
if not cycle_data:
|
|
3844
|
+
logger.debug(f"Cycle {milestone_id} not found")
|
|
3845
|
+
return None
|
|
3846
|
+
|
|
3847
|
+
return self._cycle_to_milestone(cycle_data)
|
|
3848
|
+
|
|
3849
|
+
except Exception as e:
|
|
3850
|
+
logger.warning(f"Failed to get milestone {milestone_id}: {e}")
|
|
3851
|
+
return None
|
|
3852
|
+
|
|
3853
|
+
async def milestone_list(
|
|
523
3854
|
self,
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
"""List Linear issues with optional filtering.
|
|
3855
|
+
project_id: str | None = None,
|
|
3856
|
+
state: str | None = None,
|
|
3857
|
+
) -> list[Milestone]:
|
|
3858
|
+
"""List milestones using Linear Cycles.
|
|
529
3859
|
|
|
530
3860
|
Args:
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
3861
|
+
----
|
|
3862
|
+
project_id: Filter by project (not used by Linear Cycles)
|
|
3863
|
+
state: Filter by state (open, active, completed, closed)
|
|
534
3864
|
|
|
535
3865
|
Returns:
|
|
536
|
-
|
|
3866
|
+
-------
|
|
3867
|
+
List of Milestone objects
|
|
3868
|
+
|
|
537
3869
|
"""
|
|
3870
|
+
logger = logging.getLogger(__name__)
|
|
3871
|
+
|
|
538
3872
|
# Validate credentials
|
|
539
3873
|
is_valid, error_message = self.validate_credentials()
|
|
540
3874
|
if not is_valid:
|
|
@@ -543,266 +3877,280 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
543
3877
|
await self.initialize()
|
|
544
3878
|
team_id = await self._ensure_team_id()
|
|
545
3879
|
|
|
546
|
-
# Build issue filter
|
|
547
|
-
issue_filter = build_issue_filter(
|
|
548
|
-
team_id=team_id,
|
|
549
|
-
state=filters.get("state") if filters else None,
|
|
550
|
-
priority=filters.get("priority") if filters else None,
|
|
551
|
-
include_archived=filters.get("includeArchived", False) if filters else False,
|
|
552
|
-
)
|
|
553
|
-
|
|
554
|
-
# Add additional filters
|
|
555
|
-
if filters:
|
|
556
|
-
if "assignee" in filters:
|
|
557
|
-
user_id = await self._get_user_id(filters["assignee"])
|
|
558
|
-
if user_id:
|
|
559
|
-
issue_filter["assignee"] = {"id": {"eq": user_id}}
|
|
560
|
-
|
|
561
|
-
if "created_after" in filters:
|
|
562
|
-
issue_filter["createdAt"] = {"gte": filters["created_after"]}
|
|
563
|
-
if "updated_after" in filters:
|
|
564
|
-
issue_filter["updatedAt"] = {"gte": filters["updated_after"]}
|
|
565
|
-
if "due_before" in filters:
|
|
566
|
-
issue_filter["dueDate"] = {"lte": filters["due_before"]}
|
|
567
|
-
|
|
568
3880
|
try:
|
|
569
3881
|
result = await self.client.execute_query(
|
|
570
|
-
|
|
571
|
-
{"
|
|
3882
|
+
LIST_CYCLES_QUERY,
|
|
3883
|
+
{"teamId": team_id, "first": 50, "after": None},
|
|
572
3884
|
)
|
|
573
3885
|
|
|
574
|
-
|
|
575
|
-
for
|
|
576
|
-
tasks.append(map_linear_issue_to_task(issue))
|
|
3886
|
+
cycles = result.get("team", {}).get("cycles", {}).get("nodes", [])
|
|
3887
|
+
milestones = [self._cycle_to_milestone(cycle) for cycle in cycles]
|
|
577
3888
|
|
|
578
|
-
|
|
3889
|
+
# Apply state filter if provided
|
|
3890
|
+
if state:
|
|
3891
|
+
milestones = [m for m in milestones if m.state == state]
|
|
3892
|
+
|
|
3893
|
+
logger.debug(f"Listed {len(milestones)} milestones (state={state})")
|
|
3894
|
+
return milestones
|
|
579
3895
|
|
|
580
3896
|
except Exception as e:
|
|
581
|
-
|
|
3897
|
+
logger.error(f"Failed to list milestones: {e}")
|
|
3898
|
+
return []
|
|
582
3899
|
|
|
583
|
-
async def
|
|
584
|
-
|
|
3900
|
+
async def milestone_update(
|
|
3901
|
+
self,
|
|
3902
|
+
milestone_id: str,
|
|
3903
|
+
name: str | None = None,
|
|
3904
|
+
target_date: datetime | None = None,
|
|
3905
|
+
state: str | None = None,
|
|
3906
|
+
labels: list[str] | None = None,
|
|
3907
|
+
description: str | None = None,
|
|
3908
|
+
) -> Milestone | None:
|
|
3909
|
+
"""Update milestone properties.
|
|
585
3910
|
|
|
586
3911
|
Args:
|
|
587
|
-
|
|
3912
|
+
----
|
|
3913
|
+
milestone_id: Milestone identifier
|
|
3914
|
+
name: New name (optional)
|
|
3915
|
+
target_date: New target date (optional)
|
|
3916
|
+
state: New state (optional)
|
|
3917
|
+
labels: New labels (optional, stored in metadata)
|
|
3918
|
+
description: New description (optional)
|
|
588
3919
|
|
|
589
3920
|
Returns:
|
|
590
|
-
|
|
3921
|
+
-------
|
|
3922
|
+
Updated Milestone object, None if not found
|
|
3923
|
+
|
|
591
3924
|
"""
|
|
3925
|
+
logger = logging.getLogger(__name__)
|
|
3926
|
+
|
|
592
3927
|
# Validate credentials
|
|
593
3928
|
is_valid, error_message = self.validate_credentials()
|
|
594
3929
|
if not is_valid:
|
|
595
3930
|
raise ValueError(error_message)
|
|
596
3931
|
|
|
597
3932
|
await self.initialize()
|
|
598
|
-
team_id = await self._ensure_team_id()
|
|
599
|
-
|
|
600
|
-
# Build comprehensive issue filter
|
|
601
|
-
issue_filter = {"team": {"id": {"eq": team_id}}}
|
|
602
|
-
|
|
603
|
-
# Text search (Linear supports full-text search)
|
|
604
|
-
if query.query:
|
|
605
|
-
# Linear's search is quite sophisticated, but we'll use a simple approach
|
|
606
|
-
# In practice, you might want to use Linear's search API endpoint
|
|
607
|
-
issue_filter["title"] = {"containsIgnoreCase": query.query}
|
|
608
|
-
|
|
609
|
-
# State filter
|
|
610
|
-
if query.state:
|
|
611
|
-
state_type = get_linear_state_type(query.state)
|
|
612
|
-
issue_filter["state"] = {"type": {"eq": state_type}}
|
|
613
3933
|
|
|
614
|
-
#
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
3934
|
+
# Build update input
|
|
3935
|
+
update_input = {}
|
|
3936
|
+
if name:
|
|
3937
|
+
update_input["name"] = name
|
|
3938
|
+
if description is not None:
|
|
3939
|
+
update_input["description"] = description
|
|
3940
|
+
if target_date:
|
|
3941
|
+
from datetime import timezone
|
|
618
3942
|
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
3943
|
+
# Ensure target_date has timezone
|
|
3944
|
+
if target_date.tzinfo is None:
|
|
3945
|
+
target_date = target_date.replace(tzinfo=timezone.utc)
|
|
3946
|
+
update_input["endsAt"] = target_date.isoformat()
|
|
3947
|
+
if state == "completed":
|
|
3948
|
+
# Mark cycle as completed
|
|
3949
|
+
from datetime import datetime, timezone
|
|
624
3950
|
|
|
625
|
-
|
|
626
|
-
if query.tags:
|
|
627
|
-
issue_filter["labels"] = {"some": {"name": {"in": query.tags}}}
|
|
3951
|
+
update_input["completedAt"] = datetime.now(timezone.utc).isoformat()
|
|
628
3952
|
|
|
629
|
-
|
|
630
|
-
|
|
3953
|
+
if not update_input:
|
|
3954
|
+
# No updates provided, just return current milestone
|
|
3955
|
+
return await self.milestone_get(milestone_id)
|
|
631
3956
|
|
|
632
3957
|
try:
|
|
633
3958
|
result = await self.client.execute_query(
|
|
634
|
-
|
|
635
|
-
{"
|
|
3959
|
+
UPDATE_CYCLE_MUTATION,
|
|
3960
|
+
{"id": milestone_id, "input": update_input},
|
|
636
3961
|
)
|
|
637
3962
|
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
3963
|
+
if not result.get("cycleUpdate", {}).get("success"):
|
|
3964
|
+
logger.warning(f"Failed to update cycle {milestone_id}")
|
|
3965
|
+
return None
|
|
641
3966
|
|
|
642
|
-
|
|
3967
|
+
cycle_data = result["cycleUpdate"]["cycle"]
|
|
3968
|
+
logger.info(f"Updated Linear cycle {milestone_id}")
|
|
3969
|
+
|
|
3970
|
+
return self._cycle_to_milestone(cycle_data, labels)
|
|
643
3971
|
|
|
644
3972
|
except Exception as e:
|
|
645
|
-
|
|
3973
|
+
logger.error(f"Failed to update milestone {milestone_id}: {e}")
|
|
3974
|
+
return None
|
|
646
3975
|
|
|
647
|
-
async def
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
3976
|
+
async def milestone_delete(self, milestone_id: str) -> bool:
|
|
3977
|
+
"""Delete (archive) milestone.
|
|
3978
|
+
|
|
3979
|
+
Linear doesn't support permanent cycle deletion, so this archives the cycle.
|
|
651
3980
|
|
|
652
3981
|
Args:
|
|
653
|
-
|
|
654
|
-
|
|
3982
|
+
----
|
|
3983
|
+
milestone_id: Milestone identifier
|
|
655
3984
|
|
|
656
3985
|
Returns:
|
|
657
|
-
|
|
3986
|
+
-------
|
|
3987
|
+
True if deleted successfully, False otherwise
|
|
3988
|
+
|
|
658
3989
|
"""
|
|
659
|
-
|
|
660
|
-
if not await self.validate_transition(ticket_id, target_state):
|
|
661
|
-
return None
|
|
3990
|
+
logger = logging.getLogger(__name__)
|
|
662
3991
|
|
|
663
|
-
#
|
|
664
|
-
|
|
3992
|
+
# Validate credentials
|
|
3993
|
+
is_valid, error_message = self.validate_credentials()
|
|
3994
|
+
if not is_valid:
|
|
3995
|
+
raise ValueError(error_message)
|
|
665
3996
|
|
|
666
|
-
|
|
667
|
-
self, ticket_id: str, target_state: TicketState
|
|
668
|
-
) -> bool:
|
|
669
|
-
"""Validate if state transition is allowed.
|
|
3997
|
+
await self.initialize()
|
|
670
3998
|
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
3999
|
+
try:
|
|
4000
|
+
result = await self.client.execute_query(
|
|
4001
|
+
ARCHIVE_CYCLE_MUTATION, {"id": milestone_id}
|
|
4002
|
+
)
|
|
674
4003
|
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
return True
|
|
4004
|
+
success = result.get("cycleArchive", {}).get("success", False)
|
|
4005
|
+
if success:
|
|
4006
|
+
logger.info(f"Archived Linear cycle {milestone_id}")
|
|
4007
|
+
else:
|
|
4008
|
+
logger.warning(f"Failed to archive cycle {milestone_id}")
|
|
681
4009
|
|
|
682
|
-
|
|
683
|
-
|
|
4010
|
+
return success
|
|
4011
|
+
|
|
4012
|
+
except Exception as e:
|
|
4013
|
+
logger.error(f"Failed to delete milestone {milestone_id}: {e}")
|
|
4014
|
+
return False
|
|
4015
|
+
|
|
4016
|
+
async def milestone_get_issues(
|
|
4017
|
+
self,
|
|
4018
|
+
milestone_id: str,
|
|
4019
|
+
state: str | None = None,
|
|
4020
|
+
) -> list[Task]:
|
|
4021
|
+
"""Get issues associated with milestone (cycle).
|
|
684
4022
|
|
|
685
4023
|
Args:
|
|
686
|
-
|
|
4024
|
+
----
|
|
4025
|
+
milestone_id: Milestone identifier
|
|
4026
|
+
state: Filter by issue state (optional)
|
|
687
4027
|
|
|
688
4028
|
Returns:
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
id_query = """
|
|
693
|
-
query GetIssueId($identifier: String!) {
|
|
694
|
-
issue(id: $identifier) {
|
|
695
|
-
id
|
|
696
|
-
}
|
|
697
|
-
}
|
|
4029
|
+
-------
|
|
4030
|
+
List of Task objects in the milestone
|
|
4031
|
+
|
|
698
4032
|
"""
|
|
4033
|
+
logger = logging.getLogger(__name__)
|
|
4034
|
+
|
|
4035
|
+
# Validate credentials
|
|
4036
|
+
is_valid, error_message = self.validate_credentials()
|
|
4037
|
+
if not is_valid:
|
|
4038
|
+
raise ValueError(error_message)
|
|
4039
|
+
|
|
4040
|
+
await self.initialize()
|
|
699
4041
|
|
|
700
4042
|
try:
|
|
701
4043
|
result = await self.client.execute_query(
|
|
702
|
-
|
|
703
|
-
{"identifier": comment.ticket_id}
|
|
4044
|
+
GET_CYCLE_ISSUES_QUERY, {"cycleId": milestone_id, "first": 100}
|
|
704
4045
|
)
|
|
705
4046
|
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
# Create comment mutation
|
|
712
|
-
create_comment_query = """
|
|
713
|
-
mutation CreateComment($input: CommentCreateInput!) {
|
|
714
|
-
commentCreate(input: $input) {
|
|
715
|
-
success
|
|
716
|
-
comment {
|
|
717
|
-
id
|
|
718
|
-
body
|
|
719
|
-
createdAt
|
|
720
|
-
updatedAt
|
|
721
|
-
user {
|
|
722
|
-
id
|
|
723
|
-
name
|
|
724
|
-
email
|
|
725
|
-
displayName
|
|
726
|
-
}
|
|
727
|
-
}
|
|
728
|
-
}
|
|
729
|
-
}
|
|
730
|
-
"""
|
|
4047
|
+
cycle_data = result.get("cycle")
|
|
4048
|
+
if not cycle_data:
|
|
4049
|
+
logger.warning(f"Cycle {milestone_id} not found")
|
|
4050
|
+
return []
|
|
731
4051
|
|
|
732
|
-
|
|
733
|
-
"issueId": linear_id,
|
|
734
|
-
"body": comment.body,
|
|
735
|
-
}
|
|
4052
|
+
issues = cycle_data.get("issues", {}).get("nodes", [])
|
|
736
4053
|
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
{"input": comment_input}
|
|
740
|
-
)
|
|
4054
|
+
# Convert Linear issues to Task objects
|
|
4055
|
+
tasks = [map_linear_issue_to_task(issue) for issue in issues]
|
|
741
4056
|
|
|
742
|
-
if
|
|
743
|
-
|
|
4057
|
+
# Filter by state if provided
|
|
4058
|
+
if state:
|
|
4059
|
+
state_filter = TicketState(state) if state else None
|
|
4060
|
+
tasks = [t for t in tasks if t.state == state_filter]
|
|
744
4061
|
|
|
745
|
-
|
|
746
|
-
return
|
|
4062
|
+
logger.debug(f"Retrieved {len(tasks)} issues from milestone {milestone_id}")
|
|
4063
|
+
return tasks
|
|
747
4064
|
|
|
748
4065
|
except Exception as e:
|
|
749
|
-
|
|
4066
|
+
logger.error(f"Failed to get milestone issues {milestone_id}: {e}")
|
|
4067
|
+
return []
|
|
750
4068
|
|
|
751
|
-
|
|
752
|
-
self,
|
|
753
|
-
|
|
754
|
-
|
|
4069
|
+
def _cycle_to_milestone(
|
|
4070
|
+
self,
|
|
4071
|
+
cycle_data: dict[str, Any],
|
|
4072
|
+
labels: list[str] | None = None,
|
|
4073
|
+
) -> Milestone:
|
|
4074
|
+
"""Convert Linear Cycle to universal Milestone model.
|
|
4075
|
+
|
|
4076
|
+
Determines state based on dates:
|
|
4077
|
+
- completed: Has completedAt timestamp
|
|
4078
|
+
- closed: Past end date without completion
|
|
4079
|
+
- active: Current date between start and end
|
|
4080
|
+
- open: Before start date
|
|
755
4081
|
|
|
756
4082
|
Args:
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
4083
|
+
----
|
|
4084
|
+
cycle_data: Linear Cycle data from GraphQL
|
|
4085
|
+
labels: Optional labels to associate with milestone
|
|
760
4086
|
|
|
761
4087
|
Returns:
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
query GetIssueComments($identifier: String!, $first: Int!) {
|
|
766
|
-
issue(id: $identifier) {
|
|
767
|
-
comments(first: $first) {
|
|
768
|
-
nodes {
|
|
769
|
-
id
|
|
770
|
-
body
|
|
771
|
-
createdAt
|
|
772
|
-
updatedAt
|
|
773
|
-
user {
|
|
774
|
-
id
|
|
775
|
-
name
|
|
776
|
-
email
|
|
777
|
-
displayName
|
|
778
|
-
avatarUrl
|
|
779
|
-
}
|
|
780
|
-
parent {
|
|
781
|
-
id
|
|
782
|
-
}
|
|
783
|
-
}
|
|
784
|
-
}
|
|
785
|
-
}
|
|
786
|
-
}
|
|
4088
|
+
-------
|
|
4089
|
+
Milestone object
|
|
4090
|
+
|
|
787
4091
|
"""
|
|
4092
|
+
from datetime import datetime, timezone
|
|
788
4093
|
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
query,
|
|
792
|
-
{"identifier": ticket_id, "first": limit}
|
|
793
|
-
)
|
|
4094
|
+
# Determine state from dates
|
|
4095
|
+
now = datetime.now(timezone.utc)
|
|
794
4096
|
|
|
795
|
-
|
|
796
|
-
|
|
4097
|
+
# Parse dates
|
|
4098
|
+
starts_at_str = cycle_data.get("startsAt")
|
|
4099
|
+
ends_at_str = cycle_data.get("endsAt")
|
|
4100
|
+
completed_at_str = cycle_data.get("completedAt")
|
|
797
4101
|
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
4102
|
+
starts_at = (
|
|
4103
|
+
datetime.fromisoformat(starts_at_str.replace("Z", "+00:00"))
|
|
4104
|
+
if starts_at_str
|
|
4105
|
+
else None
|
|
4106
|
+
)
|
|
4107
|
+
ends_at = (
|
|
4108
|
+
datetime.fromisoformat(ends_at_str.replace("Z", "+00:00"))
|
|
4109
|
+
if ends_at_str
|
|
4110
|
+
else None
|
|
4111
|
+
)
|
|
4112
|
+
completed_at = (
|
|
4113
|
+
datetime.fromisoformat(completed_at_str.replace("Z", "+00:00"))
|
|
4114
|
+
if completed_at_str
|
|
4115
|
+
else None
|
|
4116
|
+
)
|
|
801
4117
|
|
|
802
|
-
|
|
4118
|
+
# Determine state
|
|
4119
|
+
if completed_at:
|
|
4120
|
+
state = "completed"
|
|
4121
|
+
elif ends_at and now > ends_at:
|
|
4122
|
+
state = "closed" # Past due without completion
|
|
4123
|
+
elif starts_at and ends_at and starts_at <= now <= ends_at:
|
|
4124
|
+
state = "active"
|
|
4125
|
+
else:
|
|
4126
|
+
state = "open" # Before start date
|
|
803
4127
|
|
|
804
|
-
|
|
805
|
-
|
|
4128
|
+
# Parse progress (Linear uses 0.0-1.0, we use 0-100)
|
|
4129
|
+
progress = cycle_data.get("progress", 0.0)
|
|
4130
|
+
progress_pct = progress * 100.0
|
|
4131
|
+
|
|
4132
|
+
return Milestone(
|
|
4133
|
+
id=cycle_data["id"],
|
|
4134
|
+
name=cycle_data["name"],
|
|
4135
|
+
description=cycle_data.get("description", ""),
|
|
4136
|
+
target_date=ends_at,
|
|
4137
|
+
state=state,
|
|
4138
|
+
labels=labels or [],
|
|
4139
|
+
total_issues=cycle_data.get("issueCount", 0),
|
|
4140
|
+
closed_issues=cycle_data.get("completedIssueCount", 0),
|
|
4141
|
+
progress_pct=progress_pct,
|
|
4142
|
+
created_at=None, # Linear doesn't provide creation timestamp for cycles
|
|
4143
|
+
updated_at=None,
|
|
4144
|
+
platform_data={
|
|
4145
|
+
"linear": {
|
|
4146
|
+
"cycle_id": cycle_data["id"],
|
|
4147
|
+
"starts_at": starts_at_str,
|
|
4148
|
+
"ends_at": ends_at_str,
|
|
4149
|
+
"completed_at": completed_at_str,
|
|
4150
|
+
"team": cycle_data.get("team"),
|
|
4151
|
+
}
|
|
4152
|
+
},
|
|
4153
|
+
)
|
|
806
4154
|
|
|
807
4155
|
async def close(self) -> None:
|
|
808
4156
|
"""Close the adapter and clean up resources."""
|