mcp-ticketer 0.12.0__py3-none-any.whl → 2.2.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__init__.py +10 -10
- mcp_ticketer/__version__.py +1 -1
- mcp_ticketer/_version_scm.py +1 -0
- mcp_ticketer/adapters/aitrackdown.py +507 -6
- mcp_ticketer/adapters/asana/adapter.py +229 -0
- mcp_ticketer/adapters/asana/mappers.py +14 -0
- mcp_ticketer/adapters/github/__init__.py +26 -0
- mcp_ticketer/adapters/github/adapter.py +3229 -0
- mcp_ticketer/adapters/github/client.py +335 -0
- mcp_ticketer/adapters/github/mappers.py +797 -0
- mcp_ticketer/adapters/github/queries.py +692 -0
- mcp_ticketer/adapters/github/types.py +460 -0
- mcp_ticketer/adapters/hybrid.py +47 -5
- mcp_ticketer/adapters/jira/__init__.py +35 -0
- mcp_ticketer/adapters/jira/adapter.py +1351 -0
- mcp_ticketer/adapters/jira/client.py +271 -0
- mcp_ticketer/adapters/jira/mappers.py +246 -0
- mcp_ticketer/adapters/jira/queries.py +216 -0
- mcp_ticketer/adapters/jira/types.py +304 -0
- mcp_ticketer/adapters/linear/adapter.py +2730 -139
- mcp_ticketer/adapters/linear/client.py +175 -3
- mcp_ticketer/adapters/linear/mappers.py +203 -8
- mcp_ticketer/adapters/linear/queries.py +280 -3
- mcp_ticketer/adapters/linear/types.py +120 -4
- mcp_ticketer/analysis/__init__.py +56 -0
- mcp_ticketer/analysis/dependency_graph.py +255 -0
- mcp_ticketer/analysis/health_assessment.py +304 -0
- mcp_ticketer/analysis/orphaned.py +218 -0
- mcp_ticketer/analysis/project_status.py +594 -0
- mcp_ticketer/analysis/similarity.py +224 -0
- mcp_ticketer/analysis/staleness.py +266 -0
- mcp_ticketer/automation/__init__.py +11 -0
- mcp_ticketer/automation/project_updates.py +378 -0
- mcp_ticketer/cli/adapter_diagnostics.py +3 -1
- mcp_ticketer/cli/auggie_configure.py +17 -5
- mcp_ticketer/cli/codex_configure.py +97 -61
- mcp_ticketer/cli/configure.py +1288 -105
- mcp_ticketer/cli/cursor_configure.py +314 -0
- mcp_ticketer/cli/diagnostics.py +13 -12
- mcp_ticketer/cli/discover.py +5 -0
- mcp_ticketer/cli/gemini_configure.py +17 -5
- mcp_ticketer/cli/init_command.py +880 -0
- mcp_ticketer/cli/install_mcp_server.py +418 -0
- mcp_ticketer/cli/instruction_commands.py +6 -0
- mcp_ticketer/cli/main.py +267 -3175
- mcp_ticketer/cli/mcp_configure.py +821 -119
- mcp_ticketer/cli/mcp_server_commands.py +415 -0
- mcp_ticketer/cli/platform_detection.py +77 -12
- mcp_ticketer/cli/platform_installer.py +545 -0
- mcp_ticketer/cli/project_update_commands.py +350 -0
- mcp_ticketer/cli/setup_command.py +795 -0
- mcp_ticketer/cli/simple_health.py +12 -10
- mcp_ticketer/cli/ticket_commands.py +705 -103
- mcp_ticketer/cli/utils.py +113 -0
- mcp_ticketer/core/__init__.py +56 -6
- mcp_ticketer/core/adapter.py +533 -2
- mcp_ticketer/core/config.py +21 -21
- mcp_ticketer/core/exceptions.py +7 -1
- mcp_ticketer/core/label_manager.py +732 -0
- mcp_ticketer/core/mappers.py +31 -19
- mcp_ticketer/core/milestone_manager.py +252 -0
- mcp_ticketer/core/models.py +480 -0
- mcp_ticketer/core/onepassword_secrets.py +1 -1
- mcp_ticketer/core/priority_matcher.py +463 -0
- mcp_ticketer/core/project_config.py +132 -14
- mcp_ticketer/core/project_utils.py +281 -0
- mcp_ticketer/core/project_validator.py +376 -0
- mcp_ticketer/core/session_state.py +176 -0
- mcp_ticketer/core/state_matcher.py +625 -0
- mcp_ticketer/core/url_parser.py +425 -0
- mcp_ticketer/core/validators.py +69 -0
- mcp_ticketer/mcp/server/__main__.py +2 -1
- mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
- mcp_ticketer/mcp/server/main.py +106 -25
- mcp_ticketer/mcp/server/routing.py +723 -0
- mcp_ticketer/mcp/server/server_sdk.py +58 -0
- mcp_ticketer/mcp/server/tools/__init__.py +33 -11
- mcp_ticketer/mcp/server/tools/analysis_tools.py +854 -0
- mcp_ticketer/mcp/server/tools/attachment_tools.py +5 -5
- mcp_ticketer/mcp/server/tools/bulk_tools.py +259 -202
- mcp_ticketer/mcp/server/tools/comment_tools.py +74 -12
- mcp_ticketer/mcp/server/tools/config_tools.py +1391 -145
- mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
- mcp_ticketer/mcp/server/tools/hierarchy_tools.py +870 -460
- mcp_ticketer/mcp/server/tools/instruction_tools.py +7 -5
- mcp_ticketer/mcp/server/tools/label_tools.py +942 -0
- mcp_ticketer/mcp/server/tools/milestone_tools.py +338 -0
- mcp_ticketer/mcp/server/tools/pr_tools.py +3 -7
- mcp_ticketer/mcp/server/tools/project_status_tools.py +158 -0
- mcp_ticketer/mcp/server/tools/project_update_tools.py +473 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +209 -97
- mcp_ticketer/mcp/server/tools/session_tools.py +308 -0
- mcp_ticketer/mcp/server/tools/ticket_tools.py +1107 -124
- mcp_ticketer/mcp/server/tools/user_ticket_tools.py +218 -236
- mcp_ticketer/queue/queue.py +68 -0
- mcp_ticketer/queue/worker.py +1 -1
- mcp_ticketer/utils/__init__.py +5 -0
- mcp_ticketer/utils/token_utils.py +246 -0
- mcp_ticketer-2.2.13.dist-info/METADATA +1396 -0
- mcp_ticketer-2.2.13.dist-info/RECORD +158 -0
- mcp_ticketer-2.2.13.dist-info/top_level.txt +2 -0
- py_mcp_installer/examples/phase3_demo.py +178 -0
- py_mcp_installer/scripts/manage_version.py +54 -0
- py_mcp_installer/setup.py +6 -0
- py_mcp_installer/src/py_mcp_installer/__init__.py +153 -0
- py_mcp_installer/src/py_mcp_installer/command_builder.py +445 -0
- py_mcp_installer/src/py_mcp_installer/config_manager.py +541 -0
- py_mcp_installer/src/py_mcp_installer/exceptions.py +243 -0
- py_mcp_installer/src/py_mcp_installer/installation_strategy.py +617 -0
- py_mcp_installer/src/py_mcp_installer/installer.py +656 -0
- py_mcp_installer/src/py_mcp_installer/mcp_inspector.py +750 -0
- py_mcp_installer/src/py_mcp_installer/platform_detector.py +451 -0
- py_mcp_installer/src/py_mcp_installer/platforms/__init__.py +26 -0
- py_mcp_installer/src/py_mcp_installer/platforms/claude_code.py +225 -0
- py_mcp_installer/src/py_mcp_installer/platforms/codex.py +181 -0
- py_mcp_installer/src/py_mcp_installer/platforms/cursor.py +191 -0
- py_mcp_installer/src/py_mcp_installer/types.py +222 -0
- py_mcp_installer/src/py_mcp_installer/utils.py +463 -0
- py_mcp_installer/tests/__init__.py +0 -0
- py_mcp_installer/tests/platforms/__init__.py +0 -0
- py_mcp_installer/tests/test_platform_detector.py +17 -0
- mcp_ticketer/adapters/github.py +0 -1574
- mcp_ticketer/adapters/jira.py +0 -1258
- mcp_ticketer-0.12.0.dist-info/METADATA +0 -550
- mcp_ticketer-0.12.0.dist-info/RECORD +0 -91
- mcp_ticketer-0.12.0.dist-info/top_level.txt +0 -1
- {mcp_ticketer-0.12.0.dist-info → mcp_ticketer-2.2.13.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.12.0.dist-info → mcp_ticketer-2.2.13.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.12.0.dist-info → mcp_ticketer-2.2.13.dist-info}/licenses/LICENSE +0 -0
|
@@ -6,6 +6,7 @@ import asyncio
|
|
|
6
6
|
import logging
|
|
7
7
|
import mimetypes
|
|
8
8
|
import os
|
|
9
|
+
from datetime import datetime
|
|
9
10
|
from pathlib import Path
|
|
10
11
|
from typing import Any
|
|
11
12
|
|
|
@@ -20,22 +21,49 @@ except ImportError:
|
|
|
20
21
|
|
|
21
22
|
import builtins
|
|
22
23
|
|
|
24
|
+
from ...cache.memory import MemoryCache
|
|
23
25
|
from ...core.adapter import BaseAdapter
|
|
24
|
-
from ...core.models import
|
|
26
|
+
from ...core.models import (
|
|
27
|
+
Attachment,
|
|
28
|
+
Comment,
|
|
29
|
+
Epic,
|
|
30
|
+
Milestone,
|
|
31
|
+
ProjectUpdate,
|
|
32
|
+
ProjectUpdateHealth,
|
|
33
|
+
SearchQuery,
|
|
34
|
+
Task,
|
|
35
|
+
TicketState,
|
|
36
|
+
)
|
|
25
37
|
from ...core.registry import AdapterRegistry
|
|
38
|
+
from ...core.url_parser import URLParserError, normalize_project_id
|
|
26
39
|
from .client import LinearGraphQLClient
|
|
27
40
|
from .mappers import (
|
|
28
41
|
build_linear_issue_input,
|
|
29
42
|
build_linear_issue_update_input,
|
|
43
|
+
map_linear_attachment_to_attachment,
|
|
30
44
|
map_linear_comment_to_comment,
|
|
31
45
|
map_linear_issue_to_task,
|
|
32
46
|
map_linear_project_to_epic,
|
|
33
47
|
)
|
|
34
48
|
from .queries import (
|
|
35
49
|
ALL_FRAGMENTS,
|
|
50
|
+
ARCHIVE_CYCLE_MUTATION,
|
|
51
|
+
CREATE_CYCLE_MUTATION,
|
|
36
52
|
CREATE_ISSUE_MUTATION,
|
|
53
|
+
CREATE_LABEL_MUTATION,
|
|
54
|
+
CREATE_PROJECT_UPDATE_MUTATION,
|
|
55
|
+
GET_CUSTOM_VIEW_QUERY,
|
|
56
|
+
GET_CYCLE_ISSUES_QUERY,
|
|
57
|
+
GET_CYCLE_QUERY,
|
|
58
|
+
GET_ISSUE_STATUS_QUERY,
|
|
59
|
+
GET_PROJECT_UPDATE_QUERY,
|
|
60
|
+
LIST_CYCLES_QUERY,
|
|
61
|
+
LIST_ISSUE_STATUSES_QUERY,
|
|
37
62
|
LIST_ISSUES_QUERY,
|
|
63
|
+
LIST_PROJECT_UPDATES_QUERY,
|
|
64
|
+
LIST_PROJECTS_QUERY,
|
|
38
65
|
SEARCH_ISSUES_QUERY,
|
|
66
|
+
UPDATE_CYCLE_MUTATION,
|
|
39
67
|
UPDATE_ISSUE_MUTATION,
|
|
40
68
|
WORKFLOW_STATES_QUERY,
|
|
41
69
|
)
|
|
@@ -70,14 +98,17 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
70
98
|
"""Initialize Linear adapter.
|
|
71
99
|
|
|
72
100
|
Args:
|
|
101
|
+
----
|
|
73
102
|
config: Configuration with:
|
|
74
103
|
- api_key: Linear API key (or LINEAR_API_KEY env var)
|
|
75
104
|
- workspace: Linear workspace name (optional, for documentation)
|
|
76
105
|
- team_key: Linear team key (e.g., 'BTA') OR
|
|
77
106
|
- team_id: Linear team UUID (e.g., '02d15669-7351-4451-9719-807576c16049')
|
|
78
107
|
- api_url: Optional Linear API URL (defaults to https://api.linear.app/graphql)
|
|
108
|
+
- labels_ttl: TTL for label cache in seconds (default: 300)
|
|
79
109
|
|
|
80
110
|
Raises:
|
|
111
|
+
------
|
|
81
112
|
ValueError: If required configuration is missing
|
|
82
113
|
|
|
83
114
|
"""
|
|
@@ -85,7 +116,8 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
85
116
|
# because parent constructor calls _get_state_mapping()
|
|
86
117
|
self._team_data: dict[str, Any] | None = None
|
|
87
118
|
self._workflow_states: dict[str, dict[str, Any]] | None = None
|
|
88
|
-
self.
|
|
119
|
+
self._labels_ttl = config.get("labels_ttl", 300.0) # 5 min default
|
|
120
|
+
self._labels_cache = MemoryCache(default_ttl=self._labels_ttl)
|
|
89
121
|
self._users_cache: dict[str, dict[str, Any]] | None = None
|
|
90
122
|
self._initialized = False
|
|
91
123
|
|
|
@@ -124,6 +156,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
124
156
|
self.workspace = config.get("workspace", "")
|
|
125
157
|
self.team_key = config.get("team_key")
|
|
126
158
|
self.team_id = config.get("team_id")
|
|
159
|
+
self.user_email = config.get("user_email") # Optional default assignee
|
|
127
160
|
self.api_url = config.get("api_url", "https://api.linear.app/graphql")
|
|
128
161
|
|
|
129
162
|
# Validate team configuration
|
|
@@ -137,6 +170,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
137
170
|
"""Validate Linear API credentials.
|
|
138
171
|
|
|
139
172
|
Returns:
|
|
173
|
+
-------
|
|
140
174
|
Tuple of (is_valid, error_message)
|
|
141
175
|
|
|
142
176
|
"""
|
|
@@ -149,86 +183,432 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
149
183
|
return True, ""
|
|
150
184
|
|
|
151
185
|
async def initialize(self) -> None:
|
|
152
|
-
"""Initialize adapter by preloading team, states, and labels data concurrently.
|
|
186
|
+
"""Initialize adapter by preloading team, states, and labels data concurrently.
|
|
187
|
+
|
|
188
|
+
Design Decision: Enhanced Error Handling (1M-431)
|
|
189
|
+
--------------------------------------------------
|
|
190
|
+
Improved error messages to provide actionable troubleshooting guidance.
|
|
191
|
+
Added logging to track initialization progress and identify failure points.
|
|
192
|
+
Preserves original ValueError type for backward compatibility.
|
|
193
|
+
|
|
194
|
+
Raises:
|
|
195
|
+
------
|
|
196
|
+
ValueError: If connection fails or initialization encounters errors
|
|
197
|
+
with detailed troubleshooting information
|
|
198
|
+
|
|
199
|
+
"""
|
|
153
200
|
if self._initialized:
|
|
154
201
|
return
|
|
155
202
|
|
|
203
|
+
import logging
|
|
204
|
+
|
|
205
|
+
logger = logging.getLogger(__name__)
|
|
206
|
+
|
|
156
207
|
try:
|
|
157
208
|
# Test connection first
|
|
158
|
-
|
|
159
|
-
|
|
209
|
+
logger.info(
|
|
210
|
+
f"Testing Linear API connection for team {self.team_key or self.team_id}..."
|
|
211
|
+
)
|
|
212
|
+
connection_ok = await self.client.test_connection()
|
|
213
|
+
|
|
214
|
+
if not connection_ok:
|
|
215
|
+
raise ValueError(
|
|
216
|
+
"Failed to connect to Linear API. Troubleshooting:\n"
|
|
217
|
+
"1. Verify API key is valid (starts with 'lin_api_')\n"
|
|
218
|
+
"2. Check team_key matches your Linear workspace\n"
|
|
219
|
+
"3. Ensure API key has proper permissions\n"
|
|
220
|
+
"4. Review logs for detailed error information\n"
|
|
221
|
+
f" API key preview: {self.api_key[:20] if self.api_key else 'None'}...\n"
|
|
222
|
+
f" Team: {self.team_key or self.team_id}"
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
logger.info("Linear API connection successful")
|
|
160
226
|
|
|
161
227
|
# Load team data and workflow states concurrently
|
|
228
|
+
logger.debug("Loading team data and workflow states...")
|
|
162
229
|
team_id = await self._ensure_team_id()
|
|
163
230
|
|
|
231
|
+
# Validate team_id before initialization
|
|
232
|
+
if not team_id:
|
|
233
|
+
raise ValueError(
|
|
234
|
+
"Cannot initialize Linear adapter without team_id. "
|
|
235
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
236
|
+
)
|
|
237
|
+
|
|
164
238
|
# Load workflow states and labels for the team
|
|
165
239
|
await self._load_workflow_states(team_id)
|
|
166
240
|
await self._load_team_labels(team_id)
|
|
167
241
|
|
|
168
242
|
self._initialized = True
|
|
243
|
+
logger.info("Linear adapter initialized successfully")
|
|
169
244
|
|
|
245
|
+
except ValueError:
|
|
246
|
+
# Re-raise ValueError with original message (for connection failures)
|
|
247
|
+
raise
|
|
170
248
|
except Exception as e:
|
|
171
|
-
|
|
249
|
+
logger.error(
|
|
250
|
+
f"Linear adapter initialization failed: {type(e).__name__}: {e}",
|
|
251
|
+
exc_info=True,
|
|
252
|
+
)
|
|
253
|
+
raise ValueError(
|
|
254
|
+
f"Failed to initialize Linear adapter: {type(e).__name__}: {e}\n"
|
|
255
|
+
"Check your credentials and network connection."
|
|
256
|
+
) from e
|
|
172
257
|
|
|
173
258
|
async def _ensure_team_id(self) -> str:
|
|
174
259
|
"""Ensure we have a team ID, resolving from team_key if needed.
|
|
175
260
|
|
|
261
|
+
Validates that team_id is a UUID. If it looks like a team_key,
|
|
262
|
+
resolves it to the actual UUID.
|
|
263
|
+
|
|
176
264
|
Returns:
|
|
177
|
-
|
|
265
|
+
-------
|
|
266
|
+
Valid Linear team UUID
|
|
178
267
|
|
|
179
268
|
Raises:
|
|
180
|
-
|
|
269
|
+
------
|
|
270
|
+
ValueError: If neither team_id nor team_key provided, or resolution fails
|
|
181
271
|
|
|
182
272
|
"""
|
|
273
|
+
logger = logging.getLogger(__name__)
|
|
274
|
+
|
|
275
|
+
# If we have a team_id, validate it's actually a UUID
|
|
183
276
|
if self.team_id:
|
|
184
|
-
|
|
277
|
+
# Check if it looks like a UUID (36 chars with hyphens)
|
|
278
|
+
import re
|
|
279
|
+
|
|
280
|
+
uuid_pattern = re.compile(
|
|
281
|
+
r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$",
|
|
282
|
+
re.IGNORECASE,
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
if uuid_pattern.match(self.team_id):
|
|
286
|
+
# Already a valid UUID
|
|
287
|
+
return str(self.team_id)
|
|
288
|
+
# Looks like a team_key string - need to resolve it
|
|
289
|
+
logger.warning(
|
|
290
|
+
f"team_id '{self.team_id}' is not a UUID - treating as team_key and resolving"
|
|
291
|
+
)
|
|
292
|
+
teams = await self._get_team_by_key(self.team_id)
|
|
293
|
+
if teams and len(teams) > 0:
|
|
294
|
+
resolved_id = teams[0]["id"]
|
|
295
|
+
logger.info(
|
|
296
|
+
f"Resolved team_key '{self.team_id}' to UUID: {resolved_id}"
|
|
297
|
+
)
|
|
298
|
+
# Cache the resolved UUID
|
|
299
|
+
self.team_id = resolved_id
|
|
300
|
+
return resolved_id
|
|
301
|
+
raise ValueError(
|
|
302
|
+
f"Cannot resolve team_id '{self.team_id}' to a valid Linear team UUID. "
|
|
303
|
+
f"Please use team_key instead for team short codes like 'ENG'."
|
|
304
|
+
)
|
|
185
305
|
|
|
306
|
+
# No team_id, must have team_key
|
|
186
307
|
if not self.team_key:
|
|
187
|
-
raise ValueError(
|
|
308
|
+
raise ValueError(
|
|
309
|
+
"Either team_id (UUID) or team_key (short code) must be provided"
|
|
310
|
+
)
|
|
188
311
|
|
|
189
312
|
# Query team by key
|
|
313
|
+
teams = await self._get_team_by_key(self.team_key)
|
|
314
|
+
|
|
315
|
+
if not teams or len(teams) == 0:
|
|
316
|
+
raise ValueError(f"Team with key '{self.team_key}' not found")
|
|
317
|
+
|
|
318
|
+
team = teams[0]
|
|
319
|
+
team_id = team["id"]
|
|
320
|
+
|
|
321
|
+
# Cache the resolved team_id
|
|
322
|
+
self.team_id = team_id
|
|
323
|
+
self._team_data = team
|
|
324
|
+
logger.info(f"Resolved team_key '{self.team_key}' to team_id: {team_id}")
|
|
325
|
+
|
|
326
|
+
return team_id
|
|
327
|
+
|
|
328
|
+
async def _get_team_by_key(self, team_key: str) -> list[dict[str, Any]]:
|
|
329
|
+
"""Query Linear API to get team by key.
|
|
330
|
+
|
|
331
|
+
Args:
|
|
332
|
+
----
|
|
333
|
+
team_key: Short team identifier (e.g., 'ENG', 'BTA')
|
|
334
|
+
|
|
335
|
+
Returns:
|
|
336
|
+
-------
|
|
337
|
+
List of matching teams
|
|
338
|
+
|
|
339
|
+
"""
|
|
190
340
|
query = """
|
|
191
341
|
query GetTeamByKey($key: String!) {
|
|
192
342
|
teams(filter: { key: { eq: $key } }) {
|
|
193
343
|
nodes {
|
|
194
344
|
id
|
|
195
|
-
name
|
|
196
345
|
key
|
|
197
|
-
|
|
346
|
+
name
|
|
198
347
|
}
|
|
199
348
|
}
|
|
200
349
|
}
|
|
201
350
|
"""
|
|
202
351
|
|
|
352
|
+
result = await self.client.execute_query(query, {"key": team_key})
|
|
353
|
+
|
|
354
|
+
if "teams" in result and "nodes" in result["teams"]:
|
|
355
|
+
return result["teams"]["nodes"]
|
|
356
|
+
|
|
357
|
+
return []
|
|
358
|
+
|
|
359
|
+
async def _get_custom_view(self, view_id: str) -> dict[str, Any] | None:
|
|
360
|
+
"""Get a Linear custom view by ID to check if it exists.
|
|
361
|
+
|
|
362
|
+
Args:
|
|
363
|
+
----
|
|
364
|
+
view_id: View identifier (slug-uuid format)
|
|
365
|
+
|
|
366
|
+
Returns:
|
|
367
|
+
-------
|
|
368
|
+
View dict with fields (id, name, description, issues) or None if not found
|
|
369
|
+
|
|
370
|
+
"""
|
|
371
|
+
logging.debug(f"[VIEW DEBUG] _get_custom_view called with view_id: {view_id}")
|
|
372
|
+
|
|
373
|
+
if not view_id:
|
|
374
|
+
logging.debug("[VIEW DEBUG] view_id is empty, returning None")
|
|
375
|
+
return None
|
|
376
|
+
|
|
203
377
|
try:
|
|
204
|
-
|
|
205
|
-
|
|
378
|
+
logging.debug(
|
|
379
|
+
f"[VIEW DEBUG] Executing GET_CUSTOM_VIEW_QUERY for view_id: {view_id}"
|
|
380
|
+
)
|
|
381
|
+
result = await self.client.execute_query(
|
|
382
|
+
GET_CUSTOM_VIEW_QUERY, {"viewId": view_id, "first": 10}
|
|
383
|
+
)
|
|
384
|
+
logging.debug(f"[VIEW DEBUG] Query result: {result}")
|
|
385
|
+
|
|
386
|
+
if result.get("customView"):
|
|
387
|
+
logging.debug(
|
|
388
|
+
f"[VIEW DEBUG] customView found in result: {result.get('customView')}"
|
|
389
|
+
)
|
|
390
|
+
return result["customView"]
|
|
206
391
|
|
|
207
|
-
|
|
208
|
-
|
|
392
|
+
logging.debug(
|
|
393
|
+
f"[VIEW DEBUG] No customView in result. Checking pattern: has_hyphen={'-' in view_id}, length={len(view_id)}"
|
|
394
|
+
)
|
|
209
395
|
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
396
|
+
# API query failed but check if this looks like a view identifier
|
|
397
|
+
# View IDs from URLs have format: slug-uuid (e.g., "mcp-skills-issues-0d0359fabcf9")
|
|
398
|
+
# If it has hyphens and is longer than 12 chars, it's likely a view URL identifier
|
|
399
|
+
if "-" in view_id and len(view_id) > 12:
|
|
400
|
+
logging.debug(
|
|
401
|
+
"[VIEW DEBUG] Pattern matched! Returning minimal view object"
|
|
402
|
+
)
|
|
403
|
+
# Return minimal view object to trigger helpful error message
|
|
404
|
+
# We can't fetch the actual name, so use generic "Linear View"
|
|
405
|
+
return {
|
|
406
|
+
"id": view_id,
|
|
407
|
+
"name": "Linear View",
|
|
408
|
+
"issues": {"nodes": [], "pageInfo": {"hasNextPage": False}},
|
|
409
|
+
}
|
|
213
410
|
|
|
214
|
-
|
|
411
|
+
logging.debug("[VIEW DEBUG] Pattern did not match, returning None")
|
|
412
|
+
return None
|
|
215
413
|
|
|
216
414
|
except Exception as e:
|
|
217
|
-
|
|
415
|
+
logging.debug(
|
|
416
|
+
f"[VIEW DEBUG] Exception caught: {type(e).__name__}: {str(e)}"
|
|
417
|
+
)
|
|
418
|
+
# Linear returns error if view not found
|
|
419
|
+
# Check if this looks like a view identifier to provide helpful error
|
|
420
|
+
if "-" in view_id and len(view_id) > 12:
|
|
421
|
+
logging.debug(
|
|
422
|
+
"[VIEW DEBUG] Exception handler: Pattern matched! Returning minimal view object"
|
|
423
|
+
)
|
|
424
|
+
# Return minimal view object to trigger helpful error message
|
|
425
|
+
return {
|
|
426
|
+
"id": view_id,
|
|
427
|
+
"name": "Linear View",
|
|
428
|
+
"issues": {"nodes": [], "pageInfo": {"hasNextPage": False}},
|
|
429
|
+
}
|
|
430
|
+
logging.debug(
|
|
431
|
+
"[VIEW DEBUG] Exception handler: Pattern did not match, returning None"
|
|
432
|
+
)
|
|
433
|
+
return None
|
|
434
|
+
|
|
435
|
+
async def get_project(self, project_id: str) -> dict[str, Any] | None:
|
|
436
|
+
"""Get a Linear project by ID using direct query.
|
|
437
|
+
|
|
438
|
+
This method uses Linear's direct project(id:) GraphQL query for efficient lookups.
|
|
439
|
+
Supports UUID, slugId, or short ID formats.
|
|
440
|
+
|
|
441
|
+
Args:
|
|
442
|
+
----
|
|
443
|
+
project_id: Project UUID, slugId, or short ID
|
|
444
|
+
|
|
445
|
+
Returns:
|
|
446
|
+
-------
|
|
447
|
+
Project dict with fields (id, name, description, state, etc.) or None if not found
|
|
448
|
+
|
|
449
|
+
Examples:
|
|
450
|
+
--------
|
|
451
|
+
- "a1b2c3d4-e5f6-7890-abcd-ef1234567890" (UUID)
|
|
452
|
+
- "crm-smart-monitoring-system-f59a41a96c52" (slugId)
|
|
453
|
+
- "6cf55cfcfad4" (short ID - 12 hex chars)
|
|
454
|
+
|
|
455
|
+
"""
|
|
456
|
+
if not project_id:
|
|
457
|
+
return None
|
|
458
|
+
|
|
459
|
+
# Direct query using Linear's project(id:) endpoint
|
|
460
|
+
query = """
|
|
461
|
+
query GetProject($id: String!) {
|
|
462
|
+
project(id: $id) {
|
|
463
|
+
id
|
|
464
|
+
name
|
|
465
|
+
description
|
|
466
|
+
state
|
|
467
|
+
slugId
|
|
468
|
+
createdAt
|
|
469
|
+
updatedAt
|
|
470
|
+
url
|
|
471
|
+
icon
|
|
472
|
+
color
|
|
473
|
+
targetDate
|
|
474
|
+
startedAt
|
|
475
|
+
completedAt
|
|
476
|
+
teams {
|
|
477
|
+
nodes {
|
|
478
|
+
id
|
|
479
|
+
name
|
|
480
|
+
key
|
|
481
|
+
description
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
"""
|
|
487
|
+
|
|
488
|
+
try:
|
|
489
|
+
result = await self.client.execute_query(query, {"id": project_id})
|
|
490
|
+
|
|
491
|
+
if result.get("project"):
|
|
492
|
+
return result["project"]
|
|
493
|
+
|
|
494
|
+
# No match found
|
|
495
|
+
return None
|
|
496
|
+
|
|
497
|
+
except Exception:
|
|
498
|
+
# Linear returns error if project not found - return None instead of raising
|
|
499
|
+
return None
|
|
500
|
+
|
|
501
|
+
async def get_epic(self, epic_id: str, include_issues: bool = True) -> Epic | None:
|
|
502
|
+
"""Get Linear project as Epic with optional issue loading.
|
|
503
|
+
|
|
504
|
+
This is the preferred method for reading projects/epics as it provides
|
|
505
|
+
explicit control over whether to load child issues.
|
|
506
|
+
|
|
507
|
+
Args:
|
|
508
|
+
----
|
|
509
|
+
epic_id: Project UUID, slugId, or short ID
|
|
510
|
+
include_issues: Whether to fetch and populate child_issues (default True)
|
|
511
|
+
|
|
512
|
+
Returns:
|
|
513
|
+
-------
|
|
514
|
+
Epic object with child_issues populated if include_issues=True,
|
|
515
|
+
or None if project not found
|
|
516
|
+
|
|
517
|
+
Raises:
|
|
518
|
+
------
|
|
519
|
+
ValueError: If credentials invalid
|
|
520
|
+
|
|
521
|
+
Example:
|
|
522
|
+
-------
|
|
523
|
+
# Get project with issues
|
|
524
|
+
epic = await adapter.get_epic("c0e6db5a-03b6-479f-8796-5070b8fb7895")
|
|
525
|
+
|
|
526
|
+
# Get project metadata only (faster)
|
|
527
|
+
epic = await adapter.get_epic("c0e6db5a-03b6-479f-8796-5070b8fb7895", include_issues=False)
|
|
528
|
+
|
|
529
|
+
"""
|
|
530
|
+
# Validate credentials
|
|
531
|
+
is_valid, error_message = self.validate_credentials()
|
|
532
|
+
if not is_valid:
|
|
533
|
+
raise ValueError(error_message)
|
|
534
|
+
|
|
535
|
+
# Fetch project data
|
|
536
|
+
project_data = await self.get_project(epic_id)
|
|
537
|
+
if not project_data:
|
|
538
|
+
return None
|
|
539
|
+
|
|
540
|
+
# Map to Epic
|
|
541
|
+
epic = map_linear_project_to_epic(project_data)
|
|
542
|
+
|
|
543
|
+
# Optionally fetch and populate child issues
|
|
544
|
+
if include_issues:
|
|
545
|
+
issues = await self._get_project_issues(epic_id)
|
|
546
|
+
epic.child_issues = [issue.id for issue in issues if issue.id is not None]
|
|
547
|
+
|
|
548
|
+
return epic
|
|
549
|
+
|
|
550
|
+
def _validate_linear_uuid(self, uuid_value: str, field_name: str = "UUID") -> bool:
|
|
551
|
+
"""Validate Linear UUID format (36 chars, 8-4-4-4-12 pattern).
|
|
552
|
+
|
|
553
|
+
Linear UUIDs follow standard UUID v4 format:
|
|
554
|
+
- Total length: 36 characters
|
|
555
|
+
- Pattern: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
|
|
556
|
+
- Contains exactly 4 hyphens at positions 8, 13, 18, 23
|
|
557
|
+
|
|
558
|
+
Args:
|
|
559
|
+
----
|
|
560
|
+
uuid_value: UUID string to validate
|
|
561
|
+
field_name: Name of field for error messages (default: "UUID")
|
|
562
|
+
|
|
563
|
+
Returns:
|
|
564
|
+
-------
|
|
565
|
+
True if valid UUID format, False otherwise
|
|
566
|
+
|
|
567
|
+
Examples:
|
|
568
|
+
--------
|
|
569
|
+
>>> _validate_linear_uuid("12345678-1234-1234-1234-123456789012", "projectId")
|
|
570
|
+
True
|
|
571
|
+
>>> _validate_linear_uuid("invalid-uuid", "projectId")
|
|
572
|
+
False
|
|
573
|
+
"""
|
|
574
|
+
logger = logging.getLogger(__name__)
|
|
575
|
+
|
|
576
|
+
if not isinstance(uuid_value, str):
|
|
577
|
+
logger.warning(f"{field_name} is not a string: {type(uuid_value).__name__}")
|
|
578
|
+
return False
|
|
579
|
+
|
|
580
|
+
if len(uuid_value) != 36:
|
|
581
|
+
logger.warning(
|
|
582
|
+
f"{field_name} has invalid length {len(uuid_value)}, expected 36 characters"
|
|
583
|
+
)
|
|
584
|
+
return False
|
|
585
|
+
|
|
586
|
+
if uuid_value.count("-") != 4:
|
|
587
|
+
logger.warning(
|
|
588
|
+
f"{field_name} has invalid format: {uuid_value}. "
|
|
589
|
+
f"Expected xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx pattern"
|
|
590
|
+
)
|
|
591
|
+
return False
|
|
592
|
+
|
|
593
|
+
return True
|
|
218
594
|
|
|
219
595
|
async def _resolve_project_id(self, project_identifier: str) -> str | None:
|
|
220
596
|
"""Resolve project identifier (slug, name, short ID, or URL) to full UUID.
|
|
221
597
|
|
|
222
598
|
Args:
|
|
599
|
+
----
|
|
223
600
|
project_identifier: Project slug, name, short ID, or URL
|
|
224
601
|
|
|
225
602
|
Returns:
|
|
603
|
+
-------
|
|
226
604
|
Full Linear project UUID, or None if not found
|
|
227
605
|
|
|
228
606
|
Raises:
|
|
607
|
+
------
|
|
229
608
|
ValueError: If project lookup fails
|
|
230
609
|
|
|
231
610
|
Examples:
|
|
611
|
+
--------
|
|
232
612
|
- "crm-smart-monitoring-system" (slug)
|
|
233
613
|
- "CRM Smart Monitoring System" (name)
|
|
234
614
|
- "f59a41a96c52" (short ID from URL)
|
|
@@ -238,55 +618,120 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
238
618
|
if not project_identifier:
|
|
239
619
|
return None
|
|
240
620
|
|
|
241
|
-
#
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
621
|
+
# Use tested URL parser to normalize the identifier
|
|
622
|
+
# This correctly extracts project IDs from URLs and handles:
|
|
623
|
+
# - Full URLs: https://linear.app/team/project/slug-id/overview
|
|
624
|
+
# - Slug-ID format: slug-id
|
|
625
|
+
# - Plain identifiers: id
|
|
626
|
+
try:
|
|
627
|
+
project_identifier = normalize_project_id(
|
|
628
|
+
project_identifier, adapter_type="linear"
|
|
629
|
+
)
|
|
630
|
+
except URLParserError as e:
|
|
631
|
+
logging.getLogger(__name__).warning(
|
|
632
|
+
f"Failed to parse project identifier: {e}"
|
|
633
|
+
)
|
|
634
|
+
# Continue with original identifier - may still work if it's a name
|
|
253
635
|
|
|
254
636
|
# If it looks like a full UUID already (exactly 36 chars with exactly 4 dashes), return it
|
|
255
637
|
# UUID format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
|
|
256
638
|
if len(project_identifier) == 36 and project_identifier.count("-") == 4:
|
|
257
639
|
return project_identifier
|
|
258
640
|
|
|
259
|
-
#
|
|
641
|
+
# OPTIMIZATION: Try direct query first if it looks like a UUID, slugId, or short ID
|
|
642
|
+
# This is more efficient than listing all projects
|
|
643
|
+
should_try_direct_query = False
|
|
644
|
+
|
|
645
|
+
# Check if it looks like a short ID (exactly 12 hex characters)
|
|
646
|
+
if len(project_identifier) == 12 and all(
|
|
647
|
+
c in "0123456789abcdefABCDEF" for c in project_identifier
|
|
648
|
+
):
|
|
649
|
+
should_try_direct_query = True
|
|
650
|
+
|
|
651
|
+
# Check if it looks like a slugId format (contains dashes and ends with 12 hex chars)
|
|
652
|
+
if "-" in project_identifier:
|
|
653
|
+
parts = project_identifier.rsplit("-", 1)
|
|
654
|
+
if len(parts) > 1:
|
|
655
|
+
potential_short_id = parts[1]
|
|
656
|
+
if len(potential_short_id) == 12 and all(
|
|
657
|
+
c in "0123456789abcdefABCDEF" for c in potential_short_id
|
|
658
|
+
):
|
|
659
|
+
should_try_direct_query = True
|
|
660
|
+
|
|
661
|
+
# Try direct query first if identifier format suggests it might work
|
|
662
|
+
if should_try_direct_query:
|
|
663
|
+
try:
|
|
664
|
+
project = await self.get_project(project_identifier)
|
|
665
|
+
if project:
|
|
666
|
+
return project["id"]
|
|
667
|
+
except Exception as e:
|
|
668
|
+
# Direct query failed - fall through to list-based search
|
|
669
|
+
logging.getLogger(__name__).debug(
|
|
670
|
+
f"Direct project query failed for '{project_identifier}': {e}. "
|
|
671
|
+
f"Falling back to listing all projects."
|
|
672
|
+
)
|
|
673
|
+
|
|
674
|
+
# FALLBACK: Query all projects with pagination support
|
|
675
|
+
# This is less efficient but handles name-based lookups and edge cases
|
|
260
676
|
query = """
|
|
261
|
-
query GetProjects {
|
|
262
|
-
projects(first:
|
|
677
|
+
query GetProjects($first: Int!, $after: String) {
|
|
678
|
+
projects(first: $first, after: $after) {
|
|
263
679
|
nodes {
|
|
264
680
|
id
|
|
265
681
|
name
|
|
266
682
|
slugId
|
|
267
683
|
}
|
|
684
|
+
pageInfo {
|
|
685
|
+
hasNextPage
|
|
686
|
+
endCursor
|
|
687
|
+
}
|
|
268
688
|
}
|
|
269
689
|
}
|
|
270
690
|
"""
|
|
271
691
|
|
|
272
692
|
try:
|
|
273
|
-
|
|
274
|
-
|
|
693
|
+
# Fetch all projects across multiple pages
|
|
694
|
+
all_projects = []
|
|
695
|
+
has_next_page = True
|
|
696
|
+
after_cursor = None
|
|
697
|
+
|
|
698
|
+
while has_next_page:
|
|
699
|
+
variables = {"first": 100}
|
|
700
|
+
if after_cursor:
|
|
701
|
+
variables["after"] = after_cursor
|
|
702
|
+
|
|
703
|
+
result = await self.client.execute_query(query, variables)
|
|
704
|
+
projects_data = result.get("projects", {})
|
|
705
|
+
page_projects = projects_data.get("nodes", [])
|
|
706
|
+
page_info = projects_data.get("pageInfo", {})
|
|
707
|
+
|
|
708
|
+
all_projects.extend(page_projects)
|
|
709
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
710
|
+
after_cursor = page_info.get("endCursor")
|
|
275
711
|
|
|
276
712
|
# Search for match by slug, slugId, name (case-insensitive)
|
|
277
713
|
project_lower = project_identifier.lower()
|
|
278
|
-
for project in
|
|
714
|
+
for project in all_projects:
|
|
279
715
|
# Check if identifier matches slug pattern (extracted from slugId)
|
|
280
716
|
slug_id = project.get("slugId", "")
|
|
281
717
|
if slug_id:
|
|
282
718
|
# slugId format: "crm-smart-monitoring-system-f59a41a96c52"
|
|
719
|
+
# Linear short IDs are always exactly 12 hexadecimal characters
|
|
283
720
|
# Extract both the slug part and short ID
|
|
284
721
|
if "-" in slug_id:
|
|
285
|
-
parts = slug_id.rsplit(
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
722
|
+
parts = slug_id.rsplit("-", 1)
|
|
723
|
+
potential_short_id = parts[1] if len(parts) > 1 else ""
|
|
724
|
+
|
|
725
|
+
# Validate it's exactly 12 hex characters
|
|
726
|
+
if len(potential_short_id) == 12 and all(
|
|
727
|
+
c in "0123456789abcdefABCDEF" for c in potential_short_id
|
|
728
|
+
):
|
|
729
|
+
slug_part = parts[0]
|
|
730
|
+
short_id = potential_short_id
|
|
731
|
+
else:
|
|
732
|
+
# Fallback: treat entire slugId as slug if last part isn't valid
|
|
733
|
+
slug_part = slug_id
|
|
734
|
+
short_id = ""
|
|
290
735
|
|
|
291
736
|
# Match full slugId, slug part, or short ID
|
|
292
737
|
if (
|
|
@@ -294,11 +739,31 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
294
739
|
or slug_part.lower() == project_lower
|
|
295
740
|
or short_id.lower() == project_lower
|
|
296
741
|
):
|
|
297
|
-
|
|
742
|
+
project_uuid = project["id"]
|
|
743
|
+
# Validate UUID format before returning
|
|
744
|
+
if not self._validate_linear_uuid(
|
|
745
|
+
project_uuid, "projectId"
|
|
746
|
+
):
|
|
747
|
+
logging.getLogger(__name__).error(
|
|
748
|
+
f"Project '{project_identifier}' resolved to invalid UUID format: '{project_uuid}'. "
|
|
749
|
+
f"Expected 36-character UUID (xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx). "
|
|
750
|
+
f"This indicates a data inconsistency in Linear API response."
|
|
751
|
+
)
|
|
752
|
+
return None
|
|
753
|
+
return project_uuid
|
|
298
754
|
|
|
299
755
|
# Also check exact name match (case-insensitive)
|
|
300
756
|
if project["name"].lower() == project_lower:
|
|
301
|
-
|
|
757
|
+
project_uuid = project["id"]
|
|
758
|
+
# Validate UUID format before returning
|
|
759
|
+
if not self._validate_linear_uuid(project_uuid, "projectId"):
|
|
760
|
+
logging.getLogger(__name__).error(
|
|
761
|
+
f"Project '{project_identifier}' resolved to invalid UUID format: '{project_uuid}'. "
|
|
762
|
+
f"Expected 36-character UUID (xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx). "
|
|
763
|
+
f"This indicates a data inconsistency in Linear API response."
|
|
764
|
+
)
|
|
765
|
+
return None
|
|
766
|
+
return project_uuid
|
|
302
767
|
|
|
303
768
|
# No match found
|
|
304
769
|
return None
|
|
@@ -308,19 +773,157 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
308
773
|
f"Failed to resolve project '{project_identifier}': {e}"
|
|
309
774
|
) from e
|
|
310
775
|
|
|
776
|
+
async def _validate_project_team_association(
|
|
777
|
+
self, project_id: str, team_id: str
|
|
778
|
+
) -> tuple[bool, list[str]]:
|
|
779
|
+
"""Check if team is associated with project.
|
|
780
|
+
|
|
781
|
+
Args:
|
|
782
|
+
----
|
|
783
|
+
project_id: Linear project UUID
|
|
784
|
+
team_id: Linear team UUID
|
|
785
|
+
|
|
786
|
+
Returns:
|
|
787
|
+
-------
|
|
788
|
+
Tuple of (is_associated, list_of_project_team_ids)
|
|
789
|
+
|
|
790
|
+
"""
|
|
791
|
+
project = await self.get_project(project_id)
|
|
792
|
+
if not project:
|
|
793
|
+
return False, []
|
|
794
|
+
|
|
795
|
+
# Extract team IDs from project's teams
|
|
796
|
+
project_team_ids = [
|
|
797
|
+
team["id"] for team in project.get("teams", {}).get("nodes", [])
|
|
798
|
+
]
|
|
799
|
+
|
|
800
|
+
return team_id in project_team_ids, project_team_ids
|
|
801
|
+
|
|
802
|
+
async def _ensure_team_in_project(self, project_id: str, team_id: str) -> bool:
|
|
803
|
+
"""Add team to project if not already associated.
|
|
804
|
+
|
|
805
|
+
Args:
|
|
806
|
+
----
|
|
807
|
+
project_id: Linear project UUID
|
|
808
|
+
team_id: Linear team UUID to add
|
|
809
|
+
|
|
810
|
+
Returns:
|
|
811
|
+
-------
|
|
812
|
+
True if successful, False otherwise
|
|
813
|
+
|
|
814
|
+
"""
|
|
815
|
+
# First check current association
|
|
816
|
+
is_associated, existing_team_ids = (
|
|
817
|
+
await self._validate_project_team_association(project_id, team_id)
|
|
818
|
+
)
|
|
819
|
+
|
|
820
|
+
if is_associated:
|
|
821
|
+
return True # Already associated, nothing to do
|
|
822
|
+
|
|
823
|
+
# Add team to project by updating project's teamIds
|
|
824
|
+
update_query = """
|
|
825
|
+
mutation UpdateProject($id: String!, $input: ProjectUpdateInput!) {
|
|
826
|
+
projectUpdate(id: $id, input: $input) {
|
|
827
|
+
success
|
|
828
|
+
project {
|
|
829
|
+
id
|
|
830
|
+
teams {
|
|
831
|
+
nodes {
|
|
832
|
+
id
|
|
833
|
+
name
|
|
834
|
+
}
|
|
835
|
+
}
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
}
|
|
839
|
+
"""
|
|
840
|
+
|
|
841
|
+
# Include existing teams + new team
|
|
842
|
+
all_team_ids = existing_team_ids + [team_id]
|
|
843
|
+
|
|
844
|
+
try:
|
|
845
|
+
result = await self.client.execute_mutation(
|
|
846
|
+
update_query, {"id": project_id, "input": {"teamIds": all_team_ids}}
|
|
847
|
+
)
|
|
848
|
+
success = result.get("projectUpdate", {}).get("success", False)
|
|
849
|
+
|
|
850
|
+
if success:
|
|
851
|
+
logging.getLogger(__name__).info(
|
|
852
|
+
f"Successfully added team {team_id} to project {project_id}"
|
|
853
|
+
)
|
|
854
|
+
else:
|
|
855
|
+
logging.getLogger(__name__).warning(
|
|
856
|
+
f"Failed to add team {team_id} to project {project_id}"
|
|
857
|
+
)
|
|
858
|
+
|
|
859
|
+
return success
|
|
860
|
+
except Exception as e:
|
|
861
|
+
logging.getLogger(__name__).error(
|
|
862
|
+
f"Error adding team {team_id} to project {project_id}: {e}"
|
|
863
|
+
)
|
|
864
|
+
return False
|
|
865
|
+
|
|
866
|
+
async def _get_project_issues(
|
|
867
|
+
self, project_id: str, limit: int = 100
|
|
868
|
+
) -> list[Task]:
|
|
869
|
+
"""Fetch all issues belonging to a Linear project.
|
|
870
|
+
|
|
871
|
+
Uses existing build_issue_filter() and LIST_ISSUES_QUERY infrastructure
|
|
872
|
+
to fetch issues filtered by project_id.
|
|
873
|
+
|
|
874
|
+
Args:
|
|
875
|
+
----
|
|
876
|
+
project_id: Project UUID, slugId, or short ID
|
|
877
|
+
limit: Maximum issues to return (default 100, max 250)
|
|
878
|
+
|
|
879
|
+
Returns:
|
|
880
|
+
-------
|
|
881
|
+
List of Task objects representing project's issues
|
|
882
|
+
|
|
883
|
+
Raises:
|
|
884
|
+
------
|
|
885
|
+
ValueError: If credentials invalid or query fails
|
|
886
|
+
|
|
887
|
+
"""
|
|
888
|
+
logger = logging.getLogger(__name__)
|
|
889
|
+
|
|
890
|
+
# Build filter for issues belonging to this project
|
|
891
|
+
issue_filter = build_issue_filter(project_id=project_id)
|
|
892
|
+
|
|
893
|
+
variables = {
|
|
894
|
+
"filter": issue_filter,
|
|
895
|
+
"first": min(limit, 250), # Linear API max per page
|
|
896
|
+
}
|
|
897
|
+
|
|
898
|
+
try:
|
|
899
|
+
result = await self.client.execute_query(LIST_ISSUES_QUERY, variables)
|
|
900
|
+
issues = result.get("issues", {}).get("nodes", [])
|
|
901
|
+
|
|
902
|
+
# Map Linear issues to Task objects
|
|
903
|
+
return [map_linear_issue_to_task(issue) for issue in issues]
|
|
904
|
+
|
|
905
|
+
except Exception as e:
|
|
906
|
+
# Log but don't fail - return empty list if issues can't be fetched
|
|
907
|
+
logger.warning(f"Failed to fetch project issues for {project_id}: {e}")
|
|
908
|
+
return []
|
|
909
|
+
|
|
311
910
|
async def _resolve_issue_id(self, issue_identifier: str) -> str | None:
|
|
312
911
|
"""Resolve issue identifier (like "ENG-842") to full UUID.
|
|
313
912
|
|
|
314
913
|
Args:
|
|
914
|
+
----
|
|
315
915
|
issue_identifier: Issue identifier (e.g., "ENG-842") or UUID
|
|
316
916
|
|
|
317
917
|
Returns:
|
|
918
|
+
-------
|
|
318
919
|
Full Linear issue UUID, or None if not found
|
|
319
920
|
|
|
320
921
|
Raises:
|
|
922
|
+
------
|
|
321
923
|
ValueError: If issue lookup fails
|
|
322
924
|
|
|
323
925
|
Examples:
|
|
926
|
+
--------
|
|
324
927
|
- "ENG-842" (issue identifier)
|
|
325
928
|
- "BTA-123" (issue identifier)
|
|
326
929
|
- "a1b2c3d4-e5f6-7890-abcd-ef1234567890" (already a UUID)
|
|
@@ -360,49 +963,132 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
360
963
|
) from e
|
|
361
964
|
|
|
362
965
|
async def _load_workflow_states(self, team_id: str) -> None:
|
|
363
|
-
"""Load and cache workflow states for the team.
|
|
966
|
+
"""Load and cache workflow states for the team with semantic name matching.
|
|
967
|
+
|
|
968
|
+
Implements two-level mapping strategy to handle Linear workflows with
|
|
969
|
+
multiple states of the same type (e.g., "Todo", "Backlog", "Ready" all
|
|
970
|
+
being "unstarted"):
|
|
971
|
+
|
|
972
|
+
1. Semantic name matching: Match state names to universal states using
|
|
973
|
+
predefined mappings (flexible, respects custom workflows)
|
|
974
|
+
2. State type fallback: Use first state of matching type for unmapped
|
|
975
|
+
universal states (backward compatible)
|
|
976
|
+
|
|
977
|
+
This fixes issue 1M-552 where transitions to READY/TESTED/WAITING states
|
|
978
|
+
failed with "Discrepancy between issue state and state type" errors.
|
|
364
979
|
|
|
365
980
|
Args:
|
|
981
|
+
----
|
|
366
982
|
team_id: Linear team ID
|
|
367
983
|
|
|
368
984
|
"""
|
|
985
|
+
logger = logging.getLogger(__name__)
|
|
369
986
|
try:
|
|
370
987
|
result = await self.client.execute_query(
|
|
371
988
|
WORKFLOW_STATES_QUERY, {"teamId": team_id}
|
|
372
989
|
)
|
|
373
990
|
|
|
374
|
-
|
|
375
|
-
for state in result["team"]["states"]["nodes"]:
|
|
376
|
-
state_type = state["type"].lower()
|
|
377
|
-
if state_type not in workflow_states:
|
|
378
|
-
workflow_states[state_type] = state
|
|
379
|
-
elif state["position"] < workflow_states[state_type]["position"]:
|
|
380
|
-
workflow_states[state_type] = state
|
|
991
|
+
states = result["team"]["states"]["nodes"]
|
|
381
992
|
|
|
382
|
-
|
|
993
|
+
# Build auxiliary mappings for efficient lookup
|
|
994
|
+
state_by_name: dict[str, tuple[str, str]] = {} # name → (state_id, type)
|
|
995
|
+
state_by_type: dict[str, str] = {} # type → state_id (first occurrence)
|
|
383
996
|
|
|
384
|
-
|
|
385
|
-
|
|
997
|
+
# Sort states by position to ensure consistent selection
|
|
998
|
+
sorted_states = sorted(states, key=lambda s: s["position"])
|
|
386
999
|
|
|
387
|
-
|
|
388
|
-
|
|
1000
|
+
for state in sorted_states:
|
|
1001
|
+
state_id = state["id"]
|
|
1002
|
+
state_name = state["name"].lower()
|
|
1003
|
+
state_type = state["type"].lower()
|
|
389
1004
|
|
|
390
|
-
|
|
391
|
-
|
|
1005
|
+
# Store by name for semantic matching (first occurrence wins)
|
|
1006
|
+
if state_name not in state_by_name:
|
|
1007
|
+
state_by_name[state_name] = (state_id, state_type)
|
|
392
1008
|
|
|
393
|
-
|
|
1009
|
+
# Store by type for fallback (keep first occurrence by position)
|
|
1010
|
+
if state_type not in state_by_type:
|
|
1011
|
+
state_by_type[state_type] = state_id
|
|
1012
|
+
|
|
1013
|
+
# Build final state map with semantic matching
|
|
1014
|
+
workflow_states = {}
|
|
1015
|
+
|
|
1016
|
+
for universal_state in TicketState:
|
|
1017
|
+
state_id = None
|
|
1018
|
+
matched_strategy = None
|
|
1019
|
+
|
|
1020
|
+
# Strategy 1: Try semantic name matching
|
|
1021
|
+
if universal_state in LinearStateMapping.SEMANTIC_NAMES:
|
|
1022
|
+
for semantic_name in LinearStateMapping.SEMANTIC_NAMES[
|
|
1023
|
+
universal_state
|
|
1024
|
+
]:
|
|
1025
|
+
if semantic_name in state_by_name:
|
|
1026
|
+
state_id = state_by_name[semantic_name][0]
|
|
1027
|
+
matched_strategy = f"name:{semantic_name}"
|
|
1028
|
+
break
|
|
1029
|
+
|
|
1030
|
+
# Strategy 2: Fallback to type mapping
|
|
1031
|
+
if not state_id:
|
|
1032
|
+
linear_type = LinearStateMapping.TO_LINEAR.get(universal_state)
|
|
1033
|
+
if linear_type:
|
|
1034
|
+
state_id = state_by_type.get(linear_type)
|
|
1035
|
+
if state_id:
|
|
1036
|
+
matched_strategy = f"type:{linear_type}"
|
|
1037
|
+
|
|
1038
|
+
if state_id:
|
|
1039
|
+
workflow_states[universal_state.value] = state_id
|
|
1040
|
+
logger.debug(
|
|
1041
|
+
f"Mapped {universal_state.value} → {state_id} "
|
|
1042
|
+
f"(strategy: {matched_strategy})"
|
|
1043
|
+
)
|
|
1044
|
+
|
|
1045
|
+
self._workflow_states = workflow_states
|
|
1046
|
+
|
|
1047
|
+
# Log warning if multiple states of same type detected
|
|
1048
|
+
type_counts: dict[str, int] = {}
|
|
1049
|
+
for state in states:
|
|
1050
|
+
state_type = state["type"].lower()
|
|
1051
|
+
type_counts[state_type] = type_counts.get(state_type, 0) + 1
|
|
1052
|
+
|
|
1053
|
+
multi_state_types = {
|
|
1054
|
+
type_: count for type_, count in type_counts.items() if count > 1
|
|
1055
|
+
}
|
|
1056
|
+
if multi_state_types:
|
|
1057
|
+
logger.info(
|
|
1058
|
+
f"Team {team_id} has multiple states per type: {multi_state_types}. "
|
|
1059
|
+
"Using semantic name matching for state resolution."
|
|
1060
|
+
)
|
|
1061
|
+
|
|
1062
|
+
except Exception as e:
|
|
1063
|
+
raise ValueError(f"Failed to load workflow states: {e}") from e
|
|
1064
|
+
|
|
1065
|
+
async def _load_team_labels(self, team_id: str) -> None:
|
|
1066
|
+
"""Load and cache labels for the team with retry logic and pagination.
|
|
1067
|
+
|
|
1068
|
+
Fetches ALL labels for the team using cursor-based pagination.
|
|
1069
|
+
Handles teams with >250 labels (Linear's default page size).
|
|
1070
|
+
|
|
1071
|
+
Args:
|
|
1072
|
+
----
|
|
1073
|
+
team_id: Linear team ID
|
|
1074
|
+
|
|
1075
|
+
"""
|
|
394
1076
|
logger = logging.getLogger(__name__)
|
|
395
1077
|
|
|
396
1078
|
query = """
|
|
397
|
-
query GetTeamLabels($teamId: String
|
|
1079
|
+
query GetTeamLabels($teamId: String!, $first: Int!, $after: String) {
|
|
398
1080
|
team(id: $teamId) {
|
|
399
|
-
labels {
|
|
1081
|
+
labels(first: $first, after: $after) {
|
|
400
1082
|
nodes {
|
|
401
1083
|
id
|
|
402
1084
|
name
|
|
403
1085
|
color
|
|
404
1086
|
description
|
|
405
1087
|
}
|
|
1088
|
+
pageInfo {
|
|
1089
|
+
hasNextPage
|
|
1090
|
+
endCursor
|
|
1091
|
+
}
|
|
406
1092
|
}
|
|
407
1093
|
}
|
|
408
1094
|
}
|
|
@@ -411,10 +1097,40 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
411
1097
|
max_retries = 3
|
|
412
1098
|
for attempt in range(max_retries):
|
|
413
1099
|
try:
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
1100
|
+
# Fetch all labels with pagination
|
|
1101
|
+
all_labels: list[dict] = []
|
|
1102
|
+
has_next_page = True
|
|
1103
|
+
after_cursor = None
|
|
1104
|
+
page_count = 0
|
|
1105
|
+
max_pages = 10 # Safety limit: 10 pages * 250 labels = 2500 labels max
|
|
1106
|
+
|
|
1107
|
+
while has_next_page and page_count < max_pages:
|
|
1108
|
+
page_count += 1
|
|
1109
|
+
variables = {"teamId": team_id, "first": 250}
|
|
1110
|
+
if after_cursor:
|
|
1111
|
+
variables["after"] = after_cursor
|
|
1112
|
+
|
|
1113
|
+
result = await self.client.execute_query(query, variables)
|
|
1114
|
+
labels_data = result.get("team", {}).get("labels", {})
|
|
1115
|
+
page_labels = labels_data.get("nodes", [])
|
|
1116
|
+
page_info = labels_data.get("pageInfo", {})
|
|
1117
|
+
|
|
1118
|
+
all_labels.extend(page_labels)
|
|
1119
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
1120
|
+
after_cursor = page_info.get("endCursor")
|
|
1121
|
+
|
|
1122
|
+
if page_count >= max_pages and has_next_page:
|
|
1123
|
+
logger.warning(
|
|
1124
|
+
f"Reached max page limit ({max_pages}) for team {team_id}. "
|
|
1125
|
+
f"Loaded {len(all_labels)} labels, but more may exist."
|
|
1126
|
+
)
|
|
1127
|
+
|
|
1128
|
+
# Store in TTL-based cache
|
|
1129
|
+
cache_key = f"linear_labels:{team_id}"
|
|
1130
|
+
await self._labels_cache.set(cache_key, all_labels)
|
|
1131
|
+
logger.info(
|
|
1132
|
+
f"Loaded {len(all_labels)} labels for team {team_id} ({page_count} page(s))"
|
|
1133
|
+
)
|
|
418
1134
|
return # Success
|
|
419
1135
|
|
|
420
1136
|
except Exception as e:
|
|
@@ -430,72 +1146,436 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
430
1146
|
f"Failed to load team labels after {max_retries} attempts: {e}",
|
|
431
1147
|
exc_info=True,
|
|
432
1148
|
)
|
|
433
|
-
|
|
1149
|
+
# Store empty list in cache on failure
|
|
1150
|
+
cache_key = f"linear_labels:{team_id}"
|
|
1151
|
+
await self._labels_cache.set(cache_key, [])
|
|
434
1152
|
|
|
435
|
-
async def
|
|
436
|
-
|
|
1153
|
+
async def _find_label_by_name(
|
|
1154
|
+
self, name: str, team_id: str, max_retries: int = 3
|
|
1155
|
+
) -> dict | None:
|
|
1156
|
+
"""Find a label by name using Linear API (server-side check) with retry logic and pagination.
|
|
1157
|
+
|
|
1158
|
+
Handles cache staleness by checking Linear's server-side state.
|
|
1159
|
+
This method is used when cache lookup misses to prevent duplicate
|
|
1160
|
+
label creation attempts.
|
|
1161
|
+
|
|
1162
|
+
Implements retry logic with exponential backoff to handle transient
|
|
1163
|
+
network failures and distinguish between "label not found" (None) and
|
|
1164
|
+
"check failed" (exception).
|
|
1165
|
+
|
|
1166
|
+
Uses cursor-based pagination with early exit optimization to handle
|
|
1167
|
+
teams with >250 labels efficiently. Stops searching as soon as the
|
|
1168
|
+
label is found.
|
|
437
1169
|
|
|
438
1170
|
Args:
|
|
439
|
-
|
|
1171
|
+
----
|
|
1172
|
+
name: Label name to search for (case-insensitive)
|
|
1173
|
+
team_id: Linear team ID
|
|
1174
|
+
max_retries: Maximum retry attempts for transient failures (default: 3)
|
|
440
1175
|
|
|
441
1176
|
Returns:
|
|
442
|
-
|
|
1177
|
+
-------
|
|
1178
|
+
dict: Label data if found (with id, name, color, description)
|
|
1179
|
+
None: Label definitively doesn't exist (checked successfully)
|
|
1180
|
+
|
|
1181
|
+
Raises:
|
|
1182
|
+
------
|
|
1183
|
+
Exception: Unable to check label existence after retries exhausted
|
|
1184
|
+
(network/API failure). Caller must handle to prevent
|
|
1185
|
+
duplicate label creation.
|
|
1186
|
+
|
|
1187
|
+
Related:
|
|
1188
|
+
-------
|
|
1189
|
+
1M-443: Fix duplicate label error when setting existing labels
|
|
1190
|
+
1M-443 hotfix: Add retry logic to prevent ambiguous error handling
|
|
443
1191
|
|
|
444
1192
|
"""
|
|
445
1193
|
logger = logging.getLogger(__name__)
|
|
446
1194
|
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
1195
|
+
query = """
|
|
1196
|
+
query GetTeamLabels($teamId: String!, $first: Int!, $after: String) {
|
|
1197
|
+
team(id: $teamId) {
|
|
1198
|
+
labels(first: $first, after: $after) {
|
|
1199
|
+
nodes {
|
|
1200
|
+
id
|
|
1201
|
+
name
|
|
1202
|
+
color
|
|
1203
|
+
description
|
|
1204
|
+
}
|
|
1205
|
+
pageInfo {
|
|
1206
|
+
hasNextPage
|
|
1207
|
+
endCursor
|
|
1208
|
+
}
|
|
1209
|
+
}
|
|
1210
|
+
}
|
|
1211
|
+
}
|
|
1212
|
+
"""
|
|
451
1213
|
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
1214
|
+
for attempt in range(max_retries):
|
|
1215
|
+
try:
|
|
1216
|
+
# Search with pagination and early exit
|
|
1217
|
+
name_lower = name.lower()
|
|
1218
|
+
has_next_page = True
|
|
1219
|
+
after_cursor = None
|
|
1220
|
+
page_count = 0
|
|
1221
|
+
max_pages = 10 # Safety limit: 10 pages * 250 labels = 2500 labels max
|
|
1222
|
+
total_checked = 0
|
|
1223
|
+
|
|
1224
|
+
while has_next_page and page_count < max_pages:
|
|
1225
|
+
page_count += 1
|
|
1226
|
+
variables = {"teamId": team_id, "first": 250}
|
|
1227
|
+
if after_cursor:
|
|
1228
|
+
variables["after"] = after_cursor
|
|
1229
|
+
|
|
1230
|
+
result = await self.client.execute_query(query, variables)
|
|
1231
|
+
labels_data = result.get("team", {}).get("labels", {})
|
|
1232
|
+
page_labels = labels_data.get("nodes", [])
|
|
1233
|
+
page_info = labels_data.get("pageInfo", {})
|
|
1234
|
+
|
|
1235
|
+
total_checked += len(page_labels)
|
|
1236
|
+
|
|
1237
|
+
# Case-insensitive search in current page
|
|
1238
|
+
for label in page_labels:
|
|
1239
|
+
if label["name"].lower() == name_lower:
|
|
1240
|
+
logger.debug(
|
|
1241
|
+
f"Found label '{name}' via server-side search "
|
|
1242
|
+
f"(ID: {label['id']}, checked {total_checked} labels)"
|
|
1243
|
+
)
|
|
1244
|
+
return label
|
|
1245
|
+
|
|
1246
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
1247
|
+
after_cursor = page_info.get("endCursor")
|
|
1248
|
+
|
|
1249
|
+
if page_count >= max_pages and has_next_page:
|
|
1250
|
+
logger.warning(
|
|
1251
|
+
f"Reached max page limit ({max_pages}) searching for label '{name}'. "
|
|
1252
|
+
f"Checked {total_checked} labels, but more exist."
|
|
1253
|
+
)
|
|
1254
|
+
|
|
1255
|
+
# Label definitively doesn't exist (successful check)
|
|
1256
|
+
logger.debug(f"Label '{name}' not found in {total_checked} team labels")
|
|
1257
|
+
return None
|
|
1258
|
+
|
|
1259
|
+
except Exception as e:
|
|
1260
|
+
if attempt < max_retries - 1:
|
|
1261
|
+
# Transient failure, retry with exponential backoff
|
|
1262
|
+
wait_time = 2**attempt
|
|
1263
|
+
await asyncio.sleep(wait_time)
|
|
1264
|
+
logger.debug(
|
|
1265
|
+
f"Retry {attempt + 1}/{max_retries} for label '{name}' search: {e}"
|
|
1266
|
+
)
|
|
1267
|
+
continue
|
|
1268
|
+
else:
|
|
1269
|
+
# All retries exhausted, propagate exception
|
|
1270
|
+
# CRITICAL: Caller must handle to prevent duplicate creation
|
|
1271
|
+
logger.error(
|
|
1272
|
+
f"Failed to check label '{name}' after {max_retries} attempts: {e}"
|
|
1273
|
+
)
|
|
1274
|
+
raise
|
|
1275
|
+
|
|
1276
|
+
# This should never be reached (all paths return/raise in loop)
|
|
1277
|
+
return None
|
|
1278
|
+
|
|
1279
|
+
async def _create_label(
|
|
1280
|
+
self, name: str, team_id: str, color: str = "#0366d6"
|
|
1281
|
+
) -> str:
|
|
1282
|
+
"""Create a new label in Linear.
|
|
1283
|
+
|
|
1284
|
+
Implements race condition recovery: if creation fails due to duplicate,
|
|
1285
|
+
retry lookup from server (Tier 2) to get the existing label ID.
|
|
1286
|
+
|
|
1287
|
+
Related: 1M-398 - Label duplicate error handling
|
|
1288
|
+
|
|
1289
|
+
Args:
|
|
1290
|
+
----
|
|
1291
|
+
name: Label name
|
|
1292
|
+
team_id: Linear team ID
|
|
1293
|
+
color: Label color (hex format, default: blue)
|
|
1294
|
+
|
|
1295
|
+
Returns:
|
|
1296
|
+
-------
|
|
1297
|
+
str: Label ID (either newly created or existing after recovery)
|
|
1298
|
+
|
|
1299
|
+
Raises:
|
|
1300
|
+
------
|
|
1301
|
+
ValueError: If label creation fails and recovery lookup also fails
|
|
1302
|
+
|
|
1303
|
+
"""
|
|
1304
|
+
logger = logging.getLogger(__name__)
|
|
1305
|
+
|
|
1306
|
+
label_input = {
|
|
1307
|
+
"name": name,
|
|
1308
|
+
"teamId": team_id,
|
|
1309
|
+
"color": color,
|
|
1310
|
+
}
|
|
1311
|
+
|
|
1312
|
+
try:
|
|
1313
|
+
result = await self.client.execute_mutation(
|
|
1314
|
+
CREATE_LABEL_MUTATION, {"input": label_input}
|
|
456
1315
|
)
|
|
1316
|
+
|
|
1317
|
+
if not result["issueLabelCreate"]["success"]:
|
|
1318
|
+
raise ValueError(f"Failed to create label '{name}'")
|
|
1319
|
+
|
|
1320
|
+
created_label = result["issueLabelCreate"]["issueLabel"]
|
|
1321
|
+
label_id = created_label["id"]
|
|
1322
|
+
|
|
1323
|
+
# Invalidate cache to force refresh on next access
|
|
1324
|
+
if self._labels_cache is not None:
|
|
1325
|
+
await self._labels_cache.clear()
|
|
1326
|
+
|
|
1327
|
+
logger.info(f"Created new label '{name}' with ID: {label_id}")
|
|
1328
|
+
return label_id
|
|
1329
|
+
|
|
1330
|
+
except Exception as e:
|
|
1331
|
+
"""
|
|
1332
|
+
Race condition recovery: Another process may have created this label
|
|
1333
|
+
between our Tier 2 lookup and creation attempt.
|
|
1334
|
+
|
|
1335
|
+
Graceful recovery:
|
|
1336
|
+
1. Check if error is duplicate label error
|
|
1337
|
+
2. Retry Tier 2 lookup (query server)
|
|
1338
|
+
3. Return existing label ID if found
|
|
1339
|
+
4. Raise error if recovery fails
|
|
1340
|
+
"""
|
|
1341
|
+
error_str = str(e).lower()
|
|
1342
|
+
|
|
1343
|
+
# Check if this is a duplicate label error
|
|
1344
|
+
if "duplicate" in error_str and "label" in error_str:
|
|
1345
|
+
logger.debug(
|
|
1346
|
+
f"Duplicate label detected for '{name}', attempting recovery lookup"
|
|
1347
|
+
)
|
|
1348
|
+
|
|
1349
|
+
# Retry Tier 2 with backoff: API eventual consistency requires delay
|
|
1350
|
+
# Linear API has 100-500ms propagation delay between write and read
|
|
1351
|
+
max_recovery_attempts = 5
|
|
1352
|
+
backoff_delays = [0.1, 0.2, 0.5, 1.0, 1.5] # Total: 3.3s max
|
|
1353
|
+
|
|
1354
|
+
for attempt in range(max_recovery_attempts):
|
|
1355
|
+
try:
|
|
1356
|
+
if attempt > 0:
|
|
1357
|
+
# Wait before retry (skip delay on first attempt)
|
|
1358
|
+
delay = backoff_delays[
|
|
1359
|
+
min(attempt - 1, len(backoff_delays) - 1)
|
|
1360
|
+
]
|
|
1361
|
+
logger.debug(
|
|
1362
|
+
f"Label '{name}' duplicate detected. "
|
|
1363
|
+
f"Retrying retrieval (attempt {attempt + 1}/{max_recovery_attempts}) "
|
|
1364
|
+
f"after {delay}s delay for API propagation..."
|
|
1365
|
+
)
|
|
1366
|
+
await asyncio.sleep(delay)
|
|
1367
|
+
|
|
1368
|
+
# Query server for existing label
|
|
1369
|
+
server_label = await self._find_label_by_name(name, team_id)
|
|
1370
|
+
|
|
1371
|
+
if server_label:
|
|
1372
|
+
label_id = server_label["id"]
|
|
1373
|
+
|
|
1374
|
+
# Invalidate cache to force refresh on next access
|
|
1375
|
+
if self._labels_cache is not None:
|
|
1376
|
+
await self._labels_cache.clear()
|
|
1377
|
+
|
|
1378
|
+
logger.info(
|
|
1379
|
+
f"Successfully recovered existing label '{name}' (ID: {label_id}) "
|
|
1380
|
+
f"after {attempt + 1} attempt(s)"
|
|
1381
|
+
)
|
|
1382
|
+
return label_id
|
|
1383
|
+
|
|
1384
|
+
# Label still not found, log and continue to next retry
|
|
1385
|
+
logger.debug(
|
|
1386
|
+
f"Label '{name}' not found in recovery attempt {attempt + 1}/{max_recovery_attempts}"
|
|
1387
|
+
)
|
|
1388
|
+
|
|
1389
|
+
except Exception as lookup_error:
|
|
1390
|
+
logger.warning(
|
|
1391
|
+
f"Recovery lookup failed on attempt {attempt + 1}/{max_recovery_attempts}: {lookup_error}"
|
|
1392
|
+
)
|
|
1393
|
+
|
|
1394
|
+
# If this is the last attempt, raise with context
|
|
1395
|
+
if attempt == max_recovery_attempts - 1:
|
|
1396
|
+
raise ValueError(
|
|
1397
|
+
f"Failed to recover label '{name}' after {max_recovery_attempts} attempts. "
|
|
1398
|
+
f"Last error: {lookup_error}. This may indicate:\n"
|
|
1399
|
+
f" 1. Network connectivity issues\n"
|
|
1400
|
+
f" 2. API propagation delay >{sum(backoff_delays):.1f}s (very unusual)\n"
|
|
1401
|
+
f" 3. Label exists beyond first 250 labels in team\n"
|
|
1402
|
+
f" 4. Permissions issue preventing label query\n"
|
|
1403
|
+
f"Please retry the operation or check Linear workspace status."
|
|
1404
|
+
) from lookup_error
|
|
1405
|
+
|
|
1406
|
+
# Not the last attempt, continue to next retry
|
|
1407
|
+
continue
|
|
1408
|
+
|
|
1409
|
+
# If we get here, all recovery attempts failed (label never found, no exceptions)
|
|
1410
|
+
raise ValueError(
|
|
1411
|
+
f"Label '{name}' already exists but could not retrieve ID after "
|
|
1412
|
+
f"{max_recovery_attempts} attempts. The label query succeeded but returned no results.\n"
|
|
1413
|
+
f"This may indicate:\n"
|
|
1414
|
+
f" 1. API propagation delay >{sum(backoff_delays):.1f}s (very unusual)\n"
|
|
1415
|
+
f" 2. Label exists beyond first 250 labels in team\n"
|
|
1416
|
+
f" 3. Permissions issue preventing label query\n"
|
|
1417
|
+
f" 4. Team ID mismatch\n"
|
|
1418
|
+
f"Please retry the operation or check Linear workspace permissions."
|
|
1419
|
+
) from e
|
|
1420
|
+
|
|
1421
|
+
# Not a duplicate error - re-raise original exception
|
|
1422
|
+
logger.error(f"Failed to create label '{name}': {e}")
|
|
1423
|
+
raise ValueError(f"Failed to create label '{name}': {e}") from e
|
|
1424
|
+
|
|
1425
|
+
async def _ensure_labels_exist(self, label_names: list[str]) -> list[str]:
|
|
1426
|
+
"""Ensure labels exist, creating them if necessary.
|
|
1427
|
+
|
|
1428
|
+
This method implements a three-tier label resolution flow to prevent
|
|
1429
|
+
duplicate label creation errors:
|
|
1430
|
+
|
|
1431
|
+
1. **Tier 1 (Cache)**: Check local cache (fast, 0 API calls)
|
|
1432
|
+
2. **Tier 2 (Server)**: Query Linear API for label (handles staleness, +1 API call)
|
|
1433
|
+
3. **Tier 3 (Create)**: Create new label only if truly doesn't exist
|
|
1434
|
+
|
|
1435
|
+
The three-tier approach solves cache staleness issues where labels exist
|
|
1436
|
+
in Linear but not in local cache, preventing "label already exists" errors.
|
|
1437
|
+
|
|
1438
|
+
Behavior (1M-396):
|
|
1439
|
+
- Fail-fast: If any label creation fails, the exception is propagated
|
|
1440
|
+
- All-or-nothing: Partial label updates are not allowed
|
|
1441
|
+
- Clear errors: Callers receive actionable error messages
|
|
1442
|
+
|
|
1443
|
+
Performance:
|
|
1444
|
+
- Cached labels: 0 additional API calls (Tier 1 hit)
|
|
1445
|
+
- New labels: +1 API call for existence check (Tier 2) + 1 for creation (Tier 3)
|
|
1446
|
+
- Trade-off: Accepts +1 API call to prevent duplicate errors
|
|
1447
|
+
|
|
1448
|
+
Args:
|
|
1449
|
+
----
|
|
1450
|
+
label_names: List of label names (strings)
|
|
1451
|
+
|
|
1452
|
+
Returns:
|
|
1453
|
+
-------
|
|
1454
|
+
List of Linear label IDs (UUIDs)
|
|
1455
|
+
|
|
1456
|
+
Raises:
|
|
1457
|
+
------
|
|
1458
|
+
ValueError: If any label creation fails
|
|
1459
|
+
|
|
1460
|
+
Related:
|
|
1461
|
+
-------
|
|
1462
|
+
1M-443: Fix duplicate label error when setting existing labels
|
|
1463
|
+
1M-396: Fail-fast label creation behavior
|
|
1464
|
+
|
|
1465
|
+
"""
|
|
1466
|
+
logger = logging.getLogger(__name__)
|
|
1467
|
+
|
|
1468
|
+
if not label_names:
|
|
457
1469
|
return []
|
|
458
1470
|
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
1471
|
+
# Get team ID for label operations
|
|
1472
|
+
team_id = await self._ensure_team_id()
|
|
1473
|
+
|
|
1474
|
+
# Validate team_id before loading labels
|
|
1475
|
+
if not team_id:
|
|
1476
|
+
raise ValueError(
|
|
1477
|
+
"Cannot resolve Linear labels without team_id. "
|
|
1478
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
1479
|
+
)
|
|
1480
|
+
|
|
1481
|
+
# Check cache for labels
|
|
1482
|
+
cache_key = f"linear_labels:{team_id}"
|
|
1483
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
1484
|
+
|
|
1485
|
+
# Load labels if not cached
|
|
1486
|
+
if cached_labels is None:
|
|
1487
|
+
await self._load_team_labels(team_id)
|
|
1488
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
1489
|
+
|
|
1490
|
+
if not cached_labels:
|
|
1491
|
+
logger.error(
|
|
1492
|
+
"Label cache is empty after load attempt. Tags will be skipped."
|
|
463
1493
|
)
|
|
464
1494
|
return []
|
|
465
1495
|
|
|
466
1496
|
# Create name -> ID mapping (case-insensitive)
|
|
467
|
-
label_map = {label["name"].lower(): label["id"] for label in
|
|
1497
|
+
label_map = {label["name"].lower(): label["id"] for label in cached_labels}
|
|
468
1498
|
|
|
469
1499
|
logger.debug(f"Available labels in team: {list(label_map.keys())}")
|
|
470
1500
|
|
|
471
|
-
#
|
|
1501
|
+
# Map or create each label
|
|
472
1502
|
label_ids = []
|
|
473
|
-
unmatched_labels = []
|
|
474
|
-
|
|
475
1503
|
for name in label_names:
|
|
476
|
-
|
|
477
|
-
|
|
1504
|
+
name_lower = name.lower()
|
|
1505
|
+
|
|
1506
|
+
# Tier 1: Check cache (fast path, 0 API calls)
|
|
1507
|
+
if name_lower in label_map:
|
|
1508
|
+
label_id = label_map[name_lower]
|
|
478
1509
|
label_ids.append(label_id)
|
|
479
|
-
logger.debug(
|
|
480
|
-
|
|
481
|
-
unmatched_labels.append(name)
|
|
482
|
-
logger.warning(
|
|
483
|
-
f"Label '{name}' not found in team. Available labels: {list(label_map.keys())}"
|
|
1510
|
+
logger.debug(
|
|
1511
|
+
f"[Tier 1] Resolved cached label '{name}' to ID: {label_id}"
|
|
484
1512
|
)
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
1513
|
+
else:
|
|
1514
|
+
# Tier 2: Check server for label (handles cache staleness)
|
|
1515
|
+
try:
|
|
1516
|
+
server_label = await self._find_label_by_name(name, team_id)
|
|
1517
|
+
except Exception as e:
|
|
1518
|
+
# Server check failed after retries (1M-443 hotfix)
|
|
1519
|
+
# CRITICAL: Do NOT proceed to creation to prevent duplicates
|
|
1520
|
+
# Re-raise to signal failure to verify label existence
|
|
1521
|
+
logger.error(
|
|
1522
|
+
f"Unable to verify label '{name}' existence. "
|
|
1523
|
+
f"Cannot safely create to avoid duplicates. Error: {e}"
|
|
1524
|
+
)
|
|
1525
|
+
raise ValueError(
|
|
1526
|
+
f"Unable to verify label '{name}' existence. "
|
|
1527
|
+
f"Cannot safely create to avoid duplicates. Error: {e}"
|
|
1528
|
+
) from e
|
|
1529
|
+
|
|
1530
|
+
if server_label:
|
|
1531
|
+
# Label exists on server but not in cache - invalidate cache
|
|
1532
|
+
label_id = server_label["id"]
|
|
1533
|
+
label_ids.append(label_id)
|
|
1534
|
+
label_map[name_lower] = label_id
|
|
1535
|
+
|
|
1536
|
+
# Invalidate cache to force refresh on next access
|
|
1537
|
+
if self._labels_cache is not None:
|
|
1538
|
+
await self._labels_cache.clear()
|
|
1539
|
+
|
|
1540
|
+
logger.info(
|
|
1541
|
+
f"[Tier 2] Found stale label '{name}' on server (ID: {label_id}), "
|
|
1542
|
+
"invalidated cache for refresh"
|
|
1543
|
+
)
|
|
1544
|
+
else:
|
|
1545
|
+
# Tier 3: Label truly doesn't exist - create it
|
|
1546
|
+
# Propagate exceptions for fail-fast behavior (1M-396)
|
|
1547
|
+
new_label_id = await self._create_label(name, team_id)
|
|
1548
|
+
label_ids.append(new_label_id)
|
|
1549
|
+
# Update local map for subsequent labels in same call
|
|
1550
|
+
label_map[name_lower] = new_label_id
|
|
1551
|
+
logger.info(
|
|
1552
|
+
f"[Tier 3] Created new label '{name}' with ID: {new_label_id}"
|
|
1553
|
+
)
|
|
491
1554
|
|
|
492
1555
|
return label_ids
|
|
493
1556
|
|
|
1557
|
+
async def _resolve_label_ids(self, label_names: list[str]) -> list[str]:
|
|
1558
|
+
"""Resolve label names to Linear label IDs, creating labels if needed.
|
|
1559
|
+
|
|
1560
|
+
This method wraps _ensure_labels_exist for backward compatibility.
|
|
1561
|
+
|
|
1562
|
+
Args:
|
|
1563
|
+
----
|
|
1564
|
+
label_names: List of label names
|
|
1565
|
+
|
|
1566
|
+
Returns:
|
|
1567
|
+
-------
|
|
1568
|
+
List of Linear label IDs
|
|
1569
|
+
|
|
1570
|
+
"""
|
|
1571
|
+
return await self._ensure_labels_exist(label_names)
|
|
1572
|
+
|
|
494
1573
|
def _get_state_mapping(self) -> dict[TicketState, str]:
|
|
495
1574
|
"""Get mapping from universal states to Linear workflow state IDs.
|
|
496
1575
|
|
|
497
1576
|
Returns:
|
|
498
|
-
|
|
1577
|
+
-------
|
|
1578
|
+
Dictionary mapping TicketState to Linear state ID (UUID)
|
|
499
1579
|
|
|
500
1580
|
"""
|
|
501
1581
|
if not self._workflow_states:
|
|
@@ -512,13 +1592,18 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
512
1592
|
}
|
|
513
1593
|
|
|
514
1594
|
# Return ID-based mapping using cached workflow states
|
|
1595
|
+
# _workflow_states is keyed by universal_state.value (e.g., "open")
|
|
1596
|
+
# and contains state UUIDs directly
|
|
515
1597
|
mapping = {}
|
|
516
|
-
for universal_state
|
|
517
|
-
|
|
518
|
-
|
|
1598
|
+
for universal_state in TicketState:
|
|
1599
|
+
state_uuid = self._workflow_states.get(universal_state.value)
|
|
1600
|
+
if state_uuid:
|
|
1601
|
+
mapping[universal_state] = state_uuid
|
|
519
1602
|
else:
|
|
520
|
-
# Fallback to type name
|
|
521
|
-
|
|
1603
|
+
# Fallback to type name if state not found in cache
|
|
1604
|
+
linear_type = LinearStateMapping.TO_LINEAR.get(universal_state)
|
|
1605
|
+
if linear_type:
|
|
1606
|
+
mapping[universal_state] = linear_type
|
|
522
1607
|
|
|
523
1608
|
return mapping
|
|
524
1609
|
|
|
@@ -526,9 +1611,11 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
526
1611
|
"""Get Linear user ID from email, display name, or user ID.
|
|
527
1612
|
|
|
528
1613
|
Args:
|
|
1614
|
+
----
|
|
529
1615
|
user_identifier: Email, display name, or user ID
|
|
530
1616
|
|
|
531
1617
|
Returns:
|
|
1618
|
+
-------
|
|
532
1619
|
Linear user ID or None if not found
|
|
533
1620
|
|
|
534
1621
|
"""
|
|
@@ -572,12 +1659,15 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
572
1659
|
"""Create a new Linear issue or project with full field support.
|
|
573
1660
|
|
|
574
1661
|
Args:
|
|
1662
|
+
----
|
|
575
1663
|
ticket: Epic or Task to create
|
|
576
1664
|
|
|
577
1665
|
Returns:
|
|
1666
|
+
-------
|
|
578
1667
|
Created ticket with populated ID and metadata
|
|
579
1668
|
|
|
580
1669
|
Raises:
|
|
1670
|
+
------
|
|
581
1671
|
ValueError: If credentials are invalid or creation fails
|
|
582
1672
|
|
|
583
1673
|
"""
|
|
@@ -606,14 +1696,24 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
606
1696
|
- Sub-issue: Child work item (has parent issue)
|
|
607
1697
|
|
|
608
1698
|
Args:
|
|
1699
|
+
----
|
|
609
1700
|
task: Task to create
|
|
610
1701
|
|
|
611
1702
|
Returns:
|
|
1703
|
+
-------
|
|
612
1704
|
Created task with Linear metadata
|
|
613
1705
|
|
|
614
1706
|
"""
|
|
1707
|
+
logger = logging.getLogger(__name__)
|
|
615
1708
|
team_id = await self._ensure_team_id()
|
|
616
1709
|
|
|
1710
|
+
# Validate team_id before creating issue
|
|
1711
|
+
if not team_id:
|
|
1712
|
+
raise ValueError(
|
|
1713
|
+
"Cannot create Linear issue without team_id. "
|
|
1714
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
1715
|
+
)
|
|
1716
|
+
|
|
617
1717
|
# Build issue input using mapper
|
|
618
1718
|
issue_input = build_linear_issue_input(task, team_id)
|
|
619
1719
|
|
|
@@ -625,8 +1725,14 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
625
1725
|
issue_input["stateId"] = state_mapping[TicketState.OPEN]
|
|
626
1726
|
|
|
627
1727
|
# Resolve assignee to user ID if provided
|
|
628
|
-
if
|
|
629
|
-
|
|
1728
|
+
# Use configured default user if no assignee specified
|
|
1729
|
+
assignee = task.assignee
|
|
1730
|
+
if not assignee and self.user_email:
|
|
1731
|
+
assignee = self.user_email
|
|
1732
|
+
logger.debug(f"Using default assignee from config: {assignee}")
|
|
1733
|
+
|
|
1734
|
+
if assignee:
|
|
1735
|
+
user_id = await self._get_user_id(assignee)
|
|
630
1736
|
if user_id:
|
|
631
1737
|
issue_input["assigneeId"] = user_id
|
|
632
1738
|
|
|
@@ -643,7 +1749,35 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
643
1749
|
if task.parent_epic:
|
|
644
1750
|
project_id = await self._resolve_project_id(task.parent_epic)
|
|
645
1751
|
if project_id:
|
|
646
|
-
|
|
1752
|
+
# Validate team-project association before assigning
|
|
1753
|
+
is_valid, _ = await self._validate_project_team_association(
|
|
1754
|
+
project_id, team_id
|
|
1755
|
+
)
|
|
1756
|
+
|
|
1757
|
+
if not is_valid:
|
|
1758
|
+
# Attempt to add team to project automatically
|
|
1759
|
+
logging.getLogger(__name__).info(
|
|
1760
|
+
f"Team {team_id} not associated with project {project_id}. "
|
|
1761
|
+
f"Attempting to add team to project..."
|
|
1762
|
+
)
|
|
1763
|
+
success = await self._ensure_team_in_project(project_id, team_id)
|
|
1764
|
+
|
|
1765
|
+
if success:
|
|
1766
|
+
issue_input["projectId"] = project_id
|
|
1767
|
+
logging.getLogger(__name__).info(
|
|
1768
|
+
"Successfully associated team with project. "
|
|
1769
|
+
"Issue will be assigned to project."
|
|
1770
|
+
)
|
|
1771
|
+
else:
|
|
1772
|
+
logging.getLogger(__name__).warning(
|
|
1773
|
+
"Could not associate team with project. "
|
|
1774
|
+
"Issue will be created without project assignment. "
|
|
1775
|
+
"Manual assignment required."
|
|
1776
|
+
)
|
|
1777
|
+
issue_input.pop("projectId", None)
|
|
1778
|
+
else:
|
|
1779
|
+
# Team already associated - safe to assign
|
|
1780
|
+
issue_input["projectId"] = project_id
|
|
647
1781
|
else:
|
|
648
1782
|
# Log warning but don't fail - user may have provided invalid project
|
|
649
1783
|
logging.getLogger(__name__).warning(
|
|
@@ -668,6 +1802,44 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
668
1802
|
# Remove parentId if we couldn't resolve it
|
|
669
1803
|
issue_input.pop("parentId", None)
|
|
670
1804
|
|
|
1805
|
+
# Validate labelIds are proper UUIDs before sending to Linear API
|
|
1806
|
+
# Bug Fix (v1.1.1): This validation prevents "Argument Validation Error"
|
|
1807
|
+
# by ensuring labelIds contains UUIDs (e.g., "uuid-1"), not names (e.g., "bug").
|
|
1808
|
+
# Linear's GraphQL API requires labelIds to be [String!]! (non-null array of
|
|
1809
|
+
# non-null UUID strings). If tag names leak through, we detect and remove them
|
|
1810
|
+
# here to prevent API errors.
|
|
1811
|
+
#
|
|
1812
|
+
# See: docs/TROUBLESHOOTING.md#issue-argument-validation-error-when-creating-issues-with-labels
|
|
1813
|
+
if "labelIds" in issue_input:
|
|
1814
|
+
invalid_labels = []
|
|
1815
|
+
for label_id in issue_input["labelIds"]:
|
|
1816
|
+
# Linear UUIDs are 36 characters with hyphens (8-4-4-4-12 format)
|
|
1817
|
+
if not isinstance(label_id, str) or len(label_id) != 36:
|
|
1818
|
+
invalid_labels.append(label_id)
|
|
1819
|
+
|
|
1820
|
+
if invalid_labels:
|
|
1821
|
+
logging.getLogger(__name__).error(
|
|
1822
|
+
f"Invalid label ID format detected: {invalid_labels}. "
|
|
1823
|
+
f"Labels must be UUIDs (36 chars), not names. Removing labelIds from request."
|
|
1824
|
+
)
|
|
1825
|
+
issue_input.pop("labelIds")
|
|
1826
|
+
|
|
1827
|
+
# Debug logging: Log mutation input before execution for troubleshooting
|
|
1828
|
+
logger.debug(
|
|
1829
|
+
"Creating Linear issue with input: %s",
|
|
1830
|
+
{
|
|
1831
|
+
"title": task.title,
|
|
1832
|
+
"teamId": team_id,
|
|
1833
|
+
"projectId": issue_input.get("projectId"),
|
|
1834
|
+
"parentId": issue_input.get("parentId"),
|
|
1835
|
+
"stateId": issue_input.get("stateId"),
|
|
1836
|
+
"priority": issue_input.get("priority"),
|
|
1837
|
+
"labelIds": issue_input.get("labelIds"),
|
|
1838
|
+
"assigneeId": issue_input.get("assigneeId"),
|
|
1839
|
+
"hasDescription": bool(task.description),
|
|
1840
|
+
},
|
|
1841
|
+
)
|
|
1842
|
+
|
|
671
1843
|
try:
|
|
672
1844
|
result = await self.client.execute_mutation(
|
|
673
1845
|
CREATE_ISSUE_MUTATION, {"input": issue_input}
|
|
@@ -688,21 +1860,55 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
688
1860
|
"""Create a Linear project from an Epic.
|
|
689
1861
|
|
|
690
1862
|
Args:
|
|
1863
|
+
----
|
|
691
1864
|
epic: Epic to create
|
|
692
1865
|
|
|
693
1866
|
Returns:
|
|
1867
|
+
-------
|
|
694
1868
|
Created epic with Linear metadata
|
|
695
1869
|
|
|
696
1870
|
"""
|
|
697
1871
|
team_id = await self._ensure_team_id()
|
|
698
1872
|
|
|
1873
|
+
# Validate team_id before creating teamIds array
|
|
1874
|
+
if not team_id:
|
|
1875
|
+
raise ValueError(
|
|
1876
|
+
"Cannot create Linear project without team_id. "
|
|
1877
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
1878
|
+
)
|
|
1879
|
+
|
|
699
1880
|
project_input = {
|
|
700
1881
|
"name": epic.title,
|
|
701
1882
|
"teamIds": [team_id],
|
|
702
1883
|
}
|
|
703
1884
|
|
|
704
1885
|
if epic.description:
|
|
705
|
-
|
|
1886
|
+
# Validate description length (Linear limit: 255 chars for project description)
|
|
1887
|
+
# Matches validation in update_epic() for consistency
|
|
1888
|
+
from mcp_ticketer.core.validators import FieldValidator, ValidationError
|
|
1889
|
+
|
|
1890
|
+
try:
|
|
1891
|
+
validated_description = FieldValidator.validate_field(
|
|
1892
|
+
"linear", "epic_description", epic.description, truncate=False
|
|
1893
|
+
)
|
|
1894
|
+
project_input["description"] = validated_description
|
|
1895
|
+
except ValidationError as e:
|
|
1896
|
+
raise ValueError(
|
|
1897
|
+
f"Epic description validation failed: {e}. "
|
|
1898
|
+
f"Linear projects have a 255 character limit for descriptions. "
|
|
1899
|
+
f"Current length: {len(epic.description)} characters."
|
|
1900
|
+
) from e
|
|
1901
|
+
|
|
1902
|
+
# Debug logging: Log mutation input before execution for troubleshooting
|
|
1903
|
+
logging.getLogger(__name__).debug(
|
|
1904
|
+
"Creating Linear project with input: %s",
|
|
1905
|
+
{
|
|
1906
|
+
"name": epic.title,
|
|
1907
|
+
"teamIds": [team_id],
|
|
1908
|
+
"hasDescription": bool(project_input.get("description")),
|
|
1909
|
+
"leadId": project_input.get("leadId"),
|
|
1910
|
+
},
|
|
1911
|
+
)
|
|
706
1912
|
|
|
707
1913
|
# Create project mutation
|
|
708
1914
|
create_query = """
|
|
@@ -753,6 +1959,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
753
1959
|
"""Update a Linear project (Epic) with specified fields.
|
|
754
1960
|
|
|
755
1961
|
Args:
|
|
1962
|
+
----
|
|
756
1963
|
epic_id: Linear project UUID or slug-shortid
|
|
757
1964
|
updates: Dictionary of fields to update. Supported fields:
|
|
758
1965
|
- title: Project name
|
|
@@ -763,9 +1970,11 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
763
1970
|
- icon: Project icon
|
|
764
1971
|
|
|
765
1972
|
Returns:
|
|
1973
|
+
-------
|
|
766
1974
|
Updated Epic object or None if not found
|
|
767
1975
|
|
|
768
1976
|
Raises:
|
|
1977
|
+
------
|
|
769
1978
|
ValueError: If update fails or project not found
|
|
770
1979
|
|
|
771
1980
|
"""
|
|
@@ -779,13 +1988,29 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
779
1988
|
if not project_uuid:
|
|
780
1989
|
raise ValueError(f"Project '{epic_id}' not found")
|
|
781
1990
|
|
|
1991
|
+
# Validate field lengths before building update input
|
|
1992
|
+
from mcp_ticketer.core.validators import FieldValidator, ValidationError
|
|
1993
|
+
|
|
782
1994
|
# Build update input from updates dict
|
|
783
1995
|
update_input = {}
|
|
784
1996
|
|
|
785
1997
|
if "title" in updates:
|
|
786
|
-
|
|
1998
|
+
try:
|
|
1999
|
+
validated_title = FieldValidator.validate_field(
|
|
2000
|
+
"linear", "epic_name", updates["title"], truncate=False
|
|
2001
|
+
)
|
|
2002
|
+
update_input["name"] = validated_title
|
|
2003
|
+
except ValidationError as e:
|
|
2004
|
+
raise ValueError(str(e)) from e
|
|
2005
|
+
|
|
787
2006
|
if "description" in updates:
|
|
788
|
-
|
|
2007
|
+
try:
|
|
2008
|
+
validated_description = FieldValidator.validate_field(
|
|
2009
|
+
"linear", "epic_description", updates["description"], truncate=False
|
|
2010
|
+
)
|
|
2011
|
+
update_input["description"] = validated_description
|
|
2012
|
+
except ValidationError as e:
|
|
2013
|
+
raise ValueError(str(e)) from e
|
|
789
2014
|
if "state" in updates:
|
|
790
2015
|
update_input["state"] = updates["state"]
|
|
791
2016
|
if "target_date" in updates:
|
|
@@ -795,6 +2020,20 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
795
2020
|
if "icon" in updates:
|
|
796
2021
|
update_input["icon"] = updates["icon"]
|
|
797
2022
|
|
|
2023
|
+
# Debug logging: Log mutation input before execution for troubleshooting
|
|
2024
|
+
logging.getLogger(__name__).debug(
|
|
2025
|
+
"Updating Linear project %s with input: %s",
|
|
2026
|
+
epic_id,
|
|
2027
|
+
{
|
|
2028
|
+
"name": update_input.get("name"),
|
|
2029
|
+
"hasDescription": bool(update_input.get("description")),
|
|
2030
|
+
"state": update_input.get("state"),
|
|
2031
|
+
"targetDate": update_input.get("targetDate"),
|
|
2032
|
+
"color": update_input.get("color"),
|
|
2033
|
+
"icon": update_input.get("icon"),
|
|
2034
|
+
},
|
|
2035
|
+
)
|
|
2036
|
+
|
|
798
2037
|
# ProjectUpdate mutation
|
|
799
2038
|
update_query = """
|
|
800
2039
|
mutation UpdateProject($id: String!, $input: ProjectUpdateInput!) {
|
|
@@ -840,14 +2079,22 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
840
2079
|
except Exception as e:
|
|
841
2080
|
raise ValueError(f"Failed to update Linear project: {e}") from e
|
|
842
2081
|
|
|
843
|
-
async def read(self, ticket_id: str) -> Task | None:
|
|
844
|
-
"""Read a Linear issue by identifier with full details.
|
|
2082
|
+
async def read(self, ticket_id: str) -> Task | Epic | None:
|
|
2083
|
+
"""Read a Linear issue OR project by identifier with full details.
|
|
845
2084
|
|
|
846
2085
|
Args:
|
|
847
|
-
|
|
2086
|
+
----
|
|
2087
|
+
ticket_id: Linear issue identifier (e.g., 'BTA-123') or project UUID
|
|
848
2088
|
|
|
849
2089
|
Returns:
|
|
850
|
-
|
|
2090
|
+
-------
|
|
2091
|
+
Task with full details if issue found,
|
|
2092
|
+
Epic with full details if project found,
|
|
2093
|
+
None if not found
|
|
2094
|
+
|
|
2095
|
+
Raises:
|
|
2096
|
+
------
|
|
2097
|
+
ValueError: If ticket_id is a view URL (views are not supported in ticket_read)
|
|
851
2098
|
|
|
852
2099
|
"""
|
|
853
2100
|
# Validate credentials before attempting operation
|
|
@@ -855,6 +2102,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
855
2102
|
if not is_valid:
|
|
856
2103
|
raise ValueError(error_message)
|
|
857
2104
|
|
|
2105
|
+
# Try reading as an issue first (most common case)
|
|
858
2106
|
query = (
|
|
859
2107
|
ALL_FRAGMENTS
|
|
860
2108
|
+ """
|
|
@@ -872,20 +2120,88 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
872
2120
|
if result.get("issue"):
|
|
873
2121
|
return map_linear_issue_to_task(result["issue"])
|
|
874
2122
|
|
|
875
|
-
except
|
|
876
|
-
#
|
|
2123
|
+
except Exception:
|
|
2124
|
+
# Not found as issue, continue to project/view check
|
|
2125
|
+
pass
|
|
2126
|
+
|
|
2127
|
+
# If not found as issue, try reading as project
|
|
2128
|
+
try:
|
|
2129
|
+
project_data = await self.get_project(ticket_id)
|
|
2130
|
+
if project_data:
|
|
2131
|
+
# Fetch project's issues to populate child_issues field
|
|
2132
|
+
issues = await self._get_project_issues(ticket_id)
|
|
2133
|
+
|
|
2134
|
+
# Map to Epic
|
|
2135
|
+
epic = map_linear_project_to_epic(project_data)
|
|
2136
|
+
|
|
2137
|
+
# Populate child_issues with issue IDs
|
|
2138
|
+
epic.child_issues = [issue.id for issue in issues]
|
|
2139
|
+
|
|
2140
|
+
return epic
|
|
2141
|
+
except Exception:
|
|
2142
|
+
# Not found as project either
|
|
2143
|
+
pass
|
|
2144
|
+
|
|
2145
|
+
# If not found as issue or project, check if it's a view URL
|
|
2146
|
+
# Views are collections of issues, not individual tickets
|
|
2147
|
+
logging.debug(
|
|
2148
|
+
f"[VIEW DEBUG] read() checking if ticket_id is a view: {ticket_id}"
|
|
2149
|
+
)
|
|
2150
|
+
try:
|
|
2151
|
+
view_data = await self._get_custom_view(ticket_id)
|
|
2152
|
+
logging.debug(f"[VIEW DEBUG] read() _get_custom_view returned: {view_data}")
|
|
2153
|
+
|
|
2154
|
+
if view_data:
|
|
2155
|
+
logging.debug(
|
|
2156
|
+
"[VIEW DEBUG] read() view_data is truthy, preparing to raise ValueError"
|
|
2157
|
+
)
|
|
2158
|
+
# View found - raise informative error
|
|
2159
|
+
view_name = view_data.get("name", "Unknown")
|
|
2160
|
+
issues_data = view_data.get("issues", {})
|
|
2161
|
+
issue_count = len(issues_data.get("nodes", []))
|
|
2162
|
+
has_more = issues_data.get("pageInfo", {}).get("hasNextPage", False)
|
|
2163
|
+
count_str = f"{issue_count}+" if has_more else str(issue_count)
|
|
2164
|
+
|
|
2165
|
+
logging.debug(
|
|
2166
|
+
f"[VIEW DEBUG] read() raising ValueError with view_name={view_name}, count={count_str}"
|
|
2167
|
+
)
|
|
2168
|
+
raise ValueError(
|
|
2169
|
+
f"Linear view URLs are not supported in ticket_read.\n"
|
|
2170
|
+
f"\n"
|
|
2171
|
+
f"View: '{view_name}' ({ticket_id})\n"
|
|
2172
|
+
f"This view contains {count_str} issues.\n"
|
|
2173
|
+
f"\n"
|
|
2174
|
+
f"Use ticket_list or ticket_search to query issues instead."
|
|
2175
|
+
)
|
|
2176
|
+
else:
|
|
2177
|
+
logging.debug("[VIEW DEBUG] read() view_data is falsy (None or empty)")
|
|
2178
|
+
except ValueError:
|
|
2179
|
+
# Re-raise ValueError (our informative error message)
|
|
2180
|
+
logging.debug("[VIEW DEBUG] read() re-raising ValueError")
|
|
2181
|
+
raise
|
|
2182
|
+
except Exception as e:
|
|
2183
|
+
# View query failed - not a view
|
|
2184
|
+
logging.debug(
|
|
2185
|
+
f"[VIEW DEBUG] read() caught exception in view check: {type(e).__name__}: {str(e)}"
|
|
2186
|
+
)
|
|
877
2187
|
pass
|
|
878
2188
|
|
|
2189
|
+
# Not found as either issue, project, or view
|
|
2190
|
+
logging.debug(
|
|
2191
|
+
"[VIEW DEBUG] read() returning None - not found as issue, project, or view"
|
|
2192
|
+
)
|
|
879
2193
|
return None
|
|
880
2194
|
|
|
881
2195
|
async def update(self, ticket_id: str, updates: dict[str, Any]) -> Task | None:
|
|
882
2196
|
"""Update a Linear issue with comprehensive field support.
|
|
883
2197
|
|
|
884
2198
|
Args:
|
|
2199
|
+
----
|
|
885
2200
|
ticket_id: Linear issue identifier
|
|
886
2201
|
updates: Dictionary of fields to update
|
|
887
2202
|
|
|
888
2203
|
Returns:
|
|
2204
|
+
-------
|
|
889
2205
|
Updated task or None if not found
|
|
890
2206
|
|
|
891
2207
|
"""
|
|
@@ -894,6 +2210,9 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
894
2210
|
if not is_valid:
|
|
895
2211
|
raise ValueError(error_message)
|
|
896
2212
|
|
|
2213
|
+
# Ensure adapter is initialized (loads workflow states for state transitions)
|
|
2214
|
+
await self.initialize()
|
|
2215
|
+
|
|
897
2216
|
# First get the Linear internal ID
|
|
898
2217
|
id_query = """
|
|
899
2218
|
query GetIssueId($identifier: String!) {
|
|
@@ -936,9 +2255,18 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
936
2255
|
# Resolve label names to IDs if provided
|
|
937
2256
|
if "tags" in updates:
|
|
938
2257
|
if updates["tags"]: # Non-empty list
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
2258
|
+
try:
|
|
2259
|
+
label_ids = await self._resolve_label_ids(updates["tags"])
|
|
2260
|
+
if label_ids:
|
|
2261
|
+
update_input["labelIds"] = label_ids
|
|
2262
|
+
except ValueError as e:
|
|
2263
|
+
# Label creation failed - provide clear error message (1M-396)
|
|
2264
|
+
raise ValueError(
|
|
2265
|
+
f"Failed to update labels for issue {ticket_id}. "
|
|
2266
|
+
f"Label creation error: {e}. "
|
|
2267
|
+
f"Tip: Use the 'label_list' tool to check existing labels, "
|
|
2268
|
+
f"or verify you have permissions to create new labels."
|
|
2269
|
+
) from e
|
|
942
2270
|
else: # Empty list = remove all labels
|
|
943
2271
|
update_input["labelIds"] = []
|
|
944
2272
|
|
|
@@ -952,6 +2280,21 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
952
2280
|
f"Could not resolve project identifier '{updates['parent_epic']}'"
|
|
953
2281
|
)
|
|
954
2282
|
|
|
2283
|
+
# Validate labelIds are proper UUIDs before sending to Linear API
|
|
2284
|
+
if "labelIds" in update_input and update_input["labelIds"]:
|
|
2285
|
+
invalid_labels = []
|
|
2286
|
+
for label_id in update_input["labelIds"]:
|
|
2287
|
+
# Linear UUIDs are 36 characters with hyphens (8-4-4-4-12 format)
|
|
2288
|
+
if not isinstance(label_id, str) or len(label_id) != 36:
|
|
2289
|
+
invalid_labels.append(label_id)
|
|
2290
|
+
|
|
2291
|
+
if invalid_labels:
|
|
2292
|
+
logging.getLogger(__name__).error(
|
|
2293
|
+
f"Invalid label ID format detected in update: {invalid_labels}. "
|
|
2294
|
+
f"Labels must be UUIDs (36 chars), not names. Removing labelIds from request."
|
|
2295
|
+
)
|
|
2296
|
+
update_input.pop("labelIds")
|
|
2297
|
+
|
|
955
2298
|
# Execute update
|
|
956
2299
|
result = await self.client.execute_mutation(
|
|
957
2300
|
UPDATE_ISSUE_MUTATION, {"id": linear_id, "input": update_input}
|
|
@@ -970,9 +2313,11 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
970
2313
|
"""Delete a Linear issue (archive it).
|
|
971
2314
|
|
|
972
2315
|
Args:
|
|
2316
|
+
----
|
|
973
2317
|
ticket_id: Linear issue identifier
|
|
974
2318
|
|
|
975
2319
|
Returns:
|
|
2320
|
+
-------
|
|
976
2321
|
True if successfully deleted/archived
|
|
977
2322
|
|
|
978
2323
|
"""
|
|
@@ -984,17 +2329,35 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
984
2329
|
return False
|
|
985
2330
|
|
|
986
2331
|
async def list(
|
|
987
|
-
self,
|
|
988
|
-
|
|
989
|
-
|
|
2332
|
+
self,
|
|
2333
|
+
limit: int = 20,
|
|
2334
|
+
offset: int = 0,
|
|
2335
|
+
filters: dict[str, Any] | None = None,
|
|
2336
|
+
compact: bool = False,
|
|
2337
|
+
) -> dict[str, Any] | builtins.list[Task]:
|
|
2338
|
+
"""List Linear issues with optional filtering and compact output.
|
|
990
2339
|
|
|
991
2340
|
Args:
|
|
992
|
-
|
|
2341
|
+
----
|
|
2342
|
+
limit: Maximum number of issues to return (default: 20, max: 100)
|
|
993
2343
|
offset: Number of issues to skip (Note: Linear uses cursor-based pagination)
|
|
994
2344
|
filters: Optional filters (state, assignee, priority, etc.)
|
|
2345
|
+
compact: Return compact format for token efficiency (default: False for backward compatibility)
|
|
995
2346
|
|
|
996
2347
|
Returns:
|
|
997
|
-
|
|
2348
|
+
-------
|
|
2349
|
+
When compact=True: Dictionary with items and pagination metadata
|
|
2350
|
+
When compact=False: List of Task objects (backward compatible, default)
|
|
2351
|
+
|
|
2352
|
+
Design Decision: Backward Compatible Default (1M-554)
|
|
2353
|
+
------------------------------------------------------
|
|
2354
|
+
Rationale: Backward compatibility prioritized to avoid breaking existing code.
|
|
2355
|
+
Compact mode available via explicit compact=True for new code.
|
|
2356
|
+
|
|
2357
|
+
Default compact=False maintains existing return type (list[Task]).
|
|
2358
|
+
Users can opt-in to compact mode for 77% token reduction.
|
|
2359
|
+
|
|
2360
|
+
Recommended: Use compact=True for new code to reduce token usage by ~77%.
|
|
998
2361
|
|
|
999
2362
|
"""
|
|
1000
2363
|
# Validate credentials
|
|
@@ -1005,6 +2368,17 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1005
2368
|
await self.initialize()
|
|
1006
2369
|
team_id = await self._ensure_team_id()
|
|
1007
2370
|
|
|
2371
|
+
# Validate team_id before filtering
|
|
2372
|
+
if not team_id:
|
|
2373
|
+
raise ValueError(
|
|
2374
|
+
"Cannot list Linear issues without team_id. "
|
|
2375
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
2376
|
+
)
|
|
2377
|
+
|
|
2378
|
+
# Enforce maximum limit to prevent excessive responses
|
|
2379
|
+
if limit > 100:
|
|
2380
|
+
limit = 100
|
|
2381
|
+
|
|
1008
2382
|
# Build issue filter
|
|
1009
2383
|
issue_filter = build_issue_filter(
|
|
1010
2384
|
team_id=team_id,
|
|
@@ -1022,6 +2396,12 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1022
2396
|
if user_id:
|
|
1023
2397
|
issue_filter["assignee"] = {"id": {"eq": user_id}}
|
|
1024
2398
|
|
|
2399
|
+
# Support parent_issue filter for listing children (critical for parent state constraints)
|
|
2400
|
+
if "parent_issue" in filters:
|
|
2401
|
+
parent_id = await self._resolve_issue_id(filters["parent_issue"])
|
|
2402
|
+
if parent_id:
|
|
2403
|
+
issue_filter["parent"] = {"id": {"eq": parent_id}}
|
|
2404
|
+
|
|
1025
2405
|
if "created_after" in filters:
|
|
1026
2406
|
issue_filter["createdAt"] = {"gte": filters["created_after"]}
|
|
1027
2407
|
if "updated_after" in filters:
|
|
@@ -1038,6 +2418,24 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1038
2418
|
for issue in result["issues"]["nodes"]:
|
|
1039
2419
|
tasks.append(map_linear_issue_to_task(issue))
|
|
1040
2420
|
|
|
2421
|
+
# Return compact format with pagination metadata
|
|
2422
|
+
if compact:
|
|
2423
|
+
from .mappers import task_to_compact_format
|
|
2424
|
+
|
|
2425
|
+
compact_items = [task_to_compact_format(task) for task in tasks]
|
|
2426
|
+
return {
|
|
2427
|
+
"status": "success",
|
|
2428
|
+
"items": compact_items,
|
|
2429
|
+
"pagination": {
|
|
2430
|
+
"total_returned": len(compact_items),
|
|
2431
|
+
"limit": limit,
|
|
2432
|
+
"offset": offset,
|
|
2433
|
+
"has_more": len(tasks)
|
|
2434
|
+
== limit, # Heuristic: full page likely means more
|
|
2435
|
+
},
|
|
2436
|
+
}
|
|
2437
|
+
|
|
2438
|
+
# Backward compatible: return list of Task objects
|
|
1041
2439
|
return tasks
|
|
1042
2440
|
|
|
1043
2441
|
except Exception as e:
|
|
@@ -1047,9 +2445,11 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1047
2445
|
"""Search Linear issues using comprehensive filters.
|
|
1048
2446
|
|
|
1049
2447
|
Args:
|
|
2448
|
+
----
|
|
1050
2449
|
query: Search query with filters and criteria
|
|
1051
2450
|
|
|
1052
2451
|
Returns:
|
|
2452
|
+
-------
|
|
1053
2453
|
List of tasks matching the search criteria
|
|
1054
2454
|
|
|
1055
2455
|
"""
|
|
@@ -1061,6 +2461,13 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1061
2461
|
await self.initialize()
|
|
1062
2462
|
team_id = await self._ensure_team_id()
|
|
1063
2463
|
|
|
2464
|
+
# Validate team_id before searching
|
|
2465
|
+
if not team_id:
|
|
2466
|
+
raise ValueError(
|
|
2467
|
+
"Cannot search Linear issues without team_id. "
|
|
2468
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
2469
|
+
)
|
|
2470
|
+
|
|
1064
2471
|
# Build comprehensive issue filter
|
|
1065
2472
|
issue_filter = {"team": {"id": {"eq": team_id}}}
|
|
1066
2473
|
|
|
@@ -1071,9 +2478,15 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1071
2478
|
issue_filter["title"] = {"containsIgnoreCase": query.query}
|
|
1072
2479
|
|
|
1073
2480
|
# State filter
|
|
2481
|
+
# Bug fix: Handle OPEN state specially to include both unstarted AND backlog
|
|
2482
|
+
# tickets, as both Linear states map to TicketState.OPEN
|
|
1074
2483
|
if query.state:
|
|
1075
|
-
|
|
1076
|
-
|
|
2484
|
+
if query.state == TicketState.OPEN:
|
|
2485
|
+
# Include both "unstarted" and "backlog" states for OPEN
|
|
2486
|
+
issue_filter["state"] = {"type": {"in": ["unstarted", "backlog"]}}
|
|
2487
|
+
else:
|
|
2488
|
+
state_type = get_linear_state_type(query.state)
|
|
2489
|
+
issue_filter["state"] = {"type": {"eq": state_type}}
|
|
1077
2490
|
|
|
1078
2491
|
# Priority filter
|
|
1079
2492
|
if query.priority:
|
|
@@ -1086,6 +2499,13 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1086
2499
|
if user_id:
|
|
1087
2500
|
issue_filter["assignee"] = {"id": {"eq": user_id}}
|
|
1088
2501
|
|
|
2502
|
+
# Project filter (Bug fix: Add support for filtering by project/epic)
|
|
2503
|
+
if query.project:
|
|
2504
|
+
# Resolve project ID (supports ID, name, or URL)
|
|
2505
|
+
project_id = await self._resolve_project_id(query.project)
|
|
2506
|
+
if project_id:
|
|
2507
|
+
issue_filter["project"] = {"id": {"eq": project_id}}
|
|
2508
|
+
|
|
1089
2509
|
# Tags filter (labels in Linear)
|
|
1090
2510
|
if query.tags:
|
|
1091
2511
|
issue_filter["labels"] = {"some": {"name": {"in": query.tags}}}
|
|
@@ -1113,10 +2533,12 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1113
2533
|
"""Transition Linear issue to new state with workflow validation.
|
|
1114
2534
|
|
|
1115
2535
|
Args:
|
|
2536
|
+
----
|
|
1116
2537
|
ticket_id: Linear issue identifier
|
|
1117
2538
|
target_state: Target state to transition to
|
|
1118
2539
|
|
|
1119
2540
|
Returns:
|
|
2541
|
+
-------
|
|
1120
2542
|
Updated task or None if transition failed
|
|
1121
2543
|
|
|
1122
2544
|
"""
|
|
@@ -1132,25 +2554,36 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1132
2554
|
) -> bool:
|
|
1133
2555
|
"""Validate if state transition is allowed.
|
|
1134
2556
|
|
|
2557
|
+
Delegates to BaseAdapter for:
|
|
2558
|
+
- Workflow state machine validation
|
|
2559
|
+
- Parent/child state constraint validation (from 1M-93 requirement)
|
|
2560
|
+
|
|
2561
|
+
The BaseAdapter implementation (core/adapter.py lines 312-370) ensures:
|
|
2562
|
+
1. Valid workflow state transitions (OPEN → IN_PROGRESS → READY → etc.)
|
|
2563
|
+
2. Parent issues maintain completion level ≥ max child completion level
|
|
2564
|
+
|
|
1135
2565
|
Args:
|
|
2566
|
+
----
|
|
1136
2567
|
ticket_id: Linear issue identifier
|
|
1137
2568
|
target_state: Target state to validate
|
|
1138
2569
|
|
|
1139
2570
|
Returns:
|
|
1140
|
-
|
|
2571
|
+
-------
|
|
2572
|
+
True if transition is valid, False otherwise
|
|
1141
2573
|
|
|
1142
2574
|
"""
|
|
1143
|
-
#
|
|
1144
|
-
|
|
1145
|
-
return True
|
|
2575
|
+
# Call parent implementation for all validation logic
|
|
2576
|
+
return await super().validate_transition(ticket_id, target_state)
|
|
1146
2577
|
|
|
1147
2578
|
async def add_comment(self, comment: Comment) -> Comment:
|
|
1148
2579
|
"""Add a comment to a Linear issue.
|
|
1149
2580
|
|
|
1150
2581
|
Args:
|
|
2582
|
+
----
|
|
1151
2583
|
comment: Comment to add
|
|
1152
2584
|
|
|
1153
2585
|
Returns:
|
|
2586
|
+
-------
|
|
1154
2587
|
Created comment with ID
|
|
1155
2588
|
|
|
1156
2589
|
"""
|
|
@@ -1218,11 +2651,13 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1218
2651
|
"""Get comments for a Linear issue.
|
|
1219
2652
|
|
|
1220
2653
|
Args:
|
|
2654
|
+
----
|
|
1221
2655
|
ticket_id: Linear issue identifier
|
|
1222
2656
|
limit: Maximum number of comments to return
|
|
1223
2657
|
offset: Number of comments to skip
|
|
1224
2658
|
|
|
1225
2659
|
Returns:
|
|
2660
|
+
-------
|
|
1226
2661
|
List of comments for the issue
|
|
1227
2662
|
|
|
1228
2663
|
"""
|
|
@@ -1272,16 +2707,30 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1272
2707
|
"""List all labels available in the Linear team.
|
|
1273
2708
|
|
|
1274
2709
|
Returns:
|
|
2710
|
+
-------
|
|
1275
2711
|
List of label dictionaries with 'id', 'name', and 'color' fields
|
|
1276
2712
|
|
|
1277
2713
|
"""
|
|
1278
|
-
#
|
|
1279
|
-
|
|
1280
|
-
|
|
2714
|
+
# Get team ID for label operations
|
|
2715
|
+
team_id = await self._ensure_team_id()
|
|
2716
|
+
# Validate team_id before loading labels
|
|
2717
|
+
if not team_id:
|
|
2718
|
+
raise ValueError(
|
|
2719
|
+
"Cannot list Linear labels without team_id. "
|
|
2720
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
2721
|
+
)
|
|
2722
|
+
|
|
2723
|
+
# Check cache for labels
|
|
2724
|
+
cache_key = f"linear_labels:{team_id}"
|
|
2725
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
2726
|
+
|
|
2727
|
+
# Load labels if not cached
|
|
2728
|
+
if cached_labels is None:
|
|
1281
2729
|
await self._load_team_labels(team_id)
|
|
2730
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
1282
2731
|
|
|
1283
2732
|
# Return cached labels or empty list if not available
|
|
1284
|
-
if not
|
|
2733
|
+
if not cached_labels:
|
|
1285
2734
|
return []
|
|
1286
2735
|
|
|
1287
2736
|
# Transform to standardized format
|
|
@@ -1291,9 +2740,19 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1291
2740
|
"name": label["name"],
|
|
1292
2741
|
"color": label.get("color", ""),
|
|
1293
2742
|
}
|
|
1294
|
-
for label in
|
|
2743
|
+
for label in cached_labels
|
|
1295
2744
|
]
|
|
1296
2745
|
|
|
2746
|
+
async def invalidate_label_cache(self) -> None:
|
|
2747
|
+
"""Manually invalidate the label cache.
|
|
2748
|
+
|
|
2749
|
+
Useful when labels are modified externally or after creating new labels.
|
|
2750
|
+
The cache will be automatically refreshed on the next label operation.
|
|
2751
|
+
|
|
2752
|
+
"""
|
|
2753
|
+
if self._labels_cache is not None:
|
|
2754
|
+
await self._labels_cache.clear()
|
|
2755
|
+
|
|
1297
2756
|
async def upload_file(self, file_path: str, mime_type: str | None = None) -> str:
|
|
1298
2757
|
"""Upload a file to Linear's storage and return the asset URL.
|
|
1299
2758
|
|
|
@@ -1303,13 +2762,16 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1303
2762
|
3. Return the asset URL for use in attachments
|
|
1304
2763
|
|
|
1305
2764
|
Args:
|
|
2765
|
+
----
|
|
1306
2766
|
file_path: Path to the file to upload
|
|
1307
2767
|
mime_type: MIME type of the file. If None, will be auto-detected.
|
|
1308
2768
|
|
|
1309
2769
|
Returns:
|
|
2770
|
+
-------
|
|
1310
2771
|
Asset URL that can be used with attachmentCreate mutation
|
|
1311
2772
|
|
|
1312
2773
|
Raises:
|
|
2774
|
+
------
|
|
1313
2775
|
ValueError: If file doesn't exist, upload fails, or httpx not available
|
|
1314
2776
|
FileNotFoundError: If the specified file doesn't exist
|
|
1315
2777
|
|
|
@@ -1418,6 +2880,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1418
2880
|
accessible URL.
|
|
1419
2881
|
|
|
1420
2882
|
Args:
|
|
2883
|
+
----
|
|
1421
2884
|
issue_id: Linear issue identifier (e.g., "ENG-842") or UUID
|
|
1422
2885
|
file_url: URL of the file (from upload_file() or external URL)
|
|
1423
2886
|
title: Title for the attachment
|
|
@@ -1425,9 +2888,11 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1425
2888
|
comment_body: Optional comment text to include with the attachment
|
|
1426
2889
|
|
|
1427
2890
|
Returns:
|
|
2891
|
+
-------
|
|
1428
2892
|
Dictionary with attachment details including id, title, url, etc.
|
|
1429
2893
|
|
|
1430
2894
|
Raises:
|
|
2895
|
+
------
|
|
1431
2896
|
ValueError: If attachment creation fails or issue not found
|
|
1432
2897
|
|
|
1433
2898
|
"""
|
|
@@ -1497,15 +2962,18 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1497
2962
|
accessible URL.
|
|
1498
2963
|
|
|
1499
2964
|
Args:
|
|
2965
|
+
----
|
|
1500
2966
|
epic_id: Linear project UUID or slug-shortid
|
|
1501
2967
|
file_url: URL of the file (from upload_file() or external URL)
|
|
1502
2968
|
title: Title for the attachment
|
|
1503
2969
|
subtitle: Optional subtitle for the attachment
|
|
1504
2970
|
|
|
1505
2971
|
Returns:
|
|
2972
|
+
-------
|
|
1506
2973
|
Dictionary with attachment details including id, title, url, etc.
|
|
1507
2974
|
|
|
1508
2975
|
Raises:
|
|
2976
|
+
------
|
|
1509
2977
|
ValueError: If attachment creation fails or project not found
|
|
1510
2978
|
|
|
1511
2979
|
"""
|
|
@@ -1561,6 +3029,1129 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1561
3029
|
f"Failed to attach file to project '{epic_id}': {e}"
|
|
1562
3030
|
) from e
|
|
1563
3031
|
|
|
3032
|
+
async def get_attachments(self, ticket_id: str) -> builtins.list[Attachment]:
|
|
3033
|
+
"""Get all attachments for a Linear issue or project.
|
|
3034
|
+
|
|
3035
|
+
This method retrieves attachment metadata from Linear's GraphQL API.
|
|
3036
|
+
Note that Linear attachment URLs require authentication to access.
|
|
3037
|
+
|
|
3038
|
+
Args:
|
|
3039
|
+
----
|
|
3040
|
+
ticket_id: Linear issue identifier (e.g., "ENG-842") or project UUID
|
|
3041
|
+
|
|
3042
|
+
Returns:
|
|
3043
|
+
-------
|
|
3044
|
+
List of Attachment objects with metadata
|
|
3045
|
+
|
|
3046
|
+
Raises:
|
|
3047
|
+
------
|
|
3048
|
+
ValueError: If credentials are invalid
|
|
3049
|
+
|
|
3050
|
+
Authentication Note:
|
|
3051
|
+
-------------------
|
|
3052
|
+
Linear attachment URLs require authentication headers:
|
|
3053
|
+
Authorization: Bearer {api_key}
|
|
3054
|
+
|
|
3055
|
+
URLs are in format: https://files.linear.app/workspace/attachment-id/filename
|
|
3056
|
+
Direct access without authentication will return 401 Unauthorized.
|
|
3057
|
+
|
|
3058
|
+
"""
|
|
3059
|
+
logger = logging.getLogger(__name__)
|
|
3060
|
+
|
|
3061
|
+
# Validate credentials
|
|
3062
|
+
is_valid, error_message = self.validate_credentials()
|
|
3063
|
+
if not is_valid:
|
|
3064
|
+
raise ValueError(error_message)
|
|
3065
|
+
|
|
3066
|
+
# Try as issue first (most common case)
|
|
3067
|
+
issue_uuid = await self._resolve_issue_id(ticket_id)
|
|
3068
|
+
|
|
3069
|
+
if issue_uuid:
|
|
3070
|
+
# Query issue attachments
|
|
3071
|
+
query = """
|
|
3072
|
+
query GetIssueAttachments($issueId: String!) {
|
|
3073
|
+
issue(id: $issueId) {
|
|
3074
|
+
id
|
|
3075
|
+
identifier
|
|
3076
|
+
attachments {
|
|
3077
|
+
nodes {
|
|
3078
|
+
id
|
|
3079
|
+
title
|
|
3080
|
+
url
|
|
3081
|
+
subtitle
|
|
3082
|
+
metadata
|
|
3083
|
+
createdAt
|
|
3084
|
+
updatedAt
|
|
3085
|
+
}
|
|
3086
|
+
}
|
|
3087
|
+
}
|
|
3088
|
+
}
|
|
3089
|
+
"""
|
|
3090
|
+
|
|
3091
|
+
try:
|
|
3092
|
+
result = await self.client.execute_query(query, {"issueId": issue_uuid})
|
|
3093
|
+
|
|
3094
|
+
if not result.get("issue"):
|
|
3095
|
+
logger.warning(f"Issue {ticket_id} not found")
|
|
3096
|
+
return []
|
|
3097
|
+
|
|
3098
|
+
attachments_data = (
|
|
3099
|
+
result["issue"].get("attachments", {}).get("nodes", [])
|
|
3100
|
+
)
|
|
3101
|
+
|
|
3102
|
+
# Map to Attachment objects using identifier (not UUID)
|
|
3103
|
+
return [
|
|
3104
|
+
map_linear_attachment_to_attachment(att, ticket_id)
|
|
3105
|
+
for att in attachments_data
|
|
3106
|
+
]
|
|
3107
|
+
|
|
3108
|
+
except Exception as e:
|
|
3109
|
+
logger.error(f"Failed to get attachments for issue {ticket_id}: {e}")
|
|
3110
|
+
return []
|
|
3111
|
+
|
|
3112
|
+
# Try as project if not an issue
|
|
3113
|
+
project_uuid = await self._resolve_project_id(ticket_id)
|
|
3114
|
+
|
|
3115
|
+
if project_uuid:
|
|
3116
|
+
# Query project attachments (documents)
|
|
3117
|
+
query = """
|
|
3118
|
+
query GetProjectAttachments($projectId: String!) {
|
|
3119
|
+
project(id: $projectId) {
|
|
3120
|
+
id
|
|
3121
|
+
name
|
|
3122
|
+
documents {
|
|
3123
|
+
nodes {
|
|
3124
|
+
id
|
|
3125
|
+
title
|
|
3126
|
+
url
|
|
3127
|
+
createdAt
|
|
3128
|
+
updatedAt
|
|
3129
|
+
}
|
|
3130
|
+
}
|
|
3131
|
+
}
|
|
3132
|
+
}
|
|
3133
|
+
"""
|
|
3134
|
+
|
|
3135
|
+
try:
|
|
3136
|
+
result = await self.client.execute_query(
|
|
3137
|
+
query, {"projectId": project_uuid}
|
|
3138
|
+
)
|
|
3139
|
+
|
|
3140
|
+
if not result.get("project"):
|
|
3141
|
+
logger.warning(f"Project {ticket_id} not found")
|
|
3142
|
+
return []
|
|
3143
|
+
|
|
3144
|
+
documents_data = result["project"].get("documents", {}).get("nodes", [])
|
|
3145
|
+
|
|
3146
|
+
# Map documents to Attachment objects
|
|
3147
|
+
return [
|
|
3148
|
+
map_linear_attachment_to_attachment(doc, ticket_id)
|
|
3149
|
+
for doc in documents_data
|
|
3150
|
+
]
|
|
3151
|
+
|
|
3152
|
+
except Exception as e:
|
|
3153
|
+
logger.error(f"Failed to get attachments for project {ticket_id}: {e}")
|
|
3154
|
+
return []
|
|
3155
|
+
|
|
3156
|
+
# Not found as either issue or project
|
|
3157
|
+
logger.warning(f"Ticket {ticket_id} not found as issue or project")
|
|
3158
|
+
return []
|
|
3159
|
+
|
|
3160
|
+
async def list_cycles(
|
|
3161
|
+
self, team_id: str | None = None, limit: int = 50
|
|
3162
|
+
) -> builtins.list[dict[str, Any]]:
|
|
3163
|
+
"""List Linear Cycles (Sprints) for the team.
|
|
3164
|
+
|
|
3165
|
+
Args:
|
|
3166
|
+
----
|
|
3167
|
+
team_id: Linear team UUID. If None, uses the configured team.
|
|
3168
|
+
limit: Maximum number of cycles to return (default: 50)
|
|
3169
|
+
|
|
3170
|
+
Returns:
|
|
3171
|
+
-------
|
|
3172
|
+
List of cycle dictionaries with fields:
|
|
3173
|
+
- id: Cycle UUID
|
|
3174
|
+
- name: Cycle name
|
|
3175
|
+
- number: Cycle number
|
|
3176
|
+
- startsAt: Start date (ISO format)
|
|
3177
|
+
- endsAt: End date (ISO format)
|
|
3178
|
+
- completedAt: Completion date (ISO format, None if not completed)
|
|
3179
|
+
- progress: Progress percentage (0-1)
|
|
3180
|
+
|
|
3181
|
+
Raises:
|
|
3182
|
+
------
|
|
3183
|
+
ValueError: If credentials are invalid or query fails
|
|
3184
|
+
|
|
3185
|
+
"""
|
|
3186
|
+
# Validate credentials
|
|
3187
|
+
is_valid, error_message = self.validate_credentials()
|
|
3188
|
+
if not is_valid:
|
|
3189
|
+
raise ValueError(error_message)
|
|
3190
|
+
|
|
3191
|
+
await self.initialize()
|
|
3192
|
+
|
|
3193
|
+
# Use configured team if not specified
|
|
3194
|
+
if team_id is None:
|
|
3195
|
+
team_id = await self._ensure_team_id()
|
|
3196
|
+
|
|
3197
|
+
# Validate team_id before listing cycles
|
|
3198
|
+
if not team_id:
|
|
3199
|
+
raise ValueError(
|
|
3200
|
+
"Cannot list Linear cycles without team_id. "
|
|
3201
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
3202
|
+
)
|
|
3203
|
+
|
|
3204
|
+
try:
|
|
3205
|
+
# Fetch all cycles with pagination
|
|
3206
|
+
all_cycles: list[dict[str, Any]] = []
|
|
3207
|
+
has_next_page = True
|
|
3208
|
+
after_cursor = None
|
|
3209
|
+
|
|
3210
|
+
while has_next_page and len(all_cycles) < limit:
|
|
3211
|
+
# Calculate remaining items needed
|
|
3212
|
+
remaining = limit - len(all_cycles)
|
|
3213
|
+
page_size = min(remaining, 50) # Linear max page size is typically 50
|
|
3214
|
+
|
|
3215
|
+
variables = {"teamId": team_id, "first": page_size}
|
|
3216
|
+
if after_cursor:
|
|
3217
|
+
variables["after"] = after_cursor
|
|
3218
|
+
|
|
3219
|
+
result = await self.client.execute_query(LIST_CYCLES_QUERY, variables)
|
|
3220
|
+
|
|
3221
|
+
cycles_data = result.get("team", {}).get("cycles", {})
|
|
3222
|
+
page_cycles = cycles_data.get("nodes", [])
|
|
3223
|
+
page_info = cycles_data.get("pageInfo", {})
|
|
3224
|
+
|
|
3225
|
+
all_cycles.extend(page_cycles)
|
|
3226
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
3227
|
+
after_cursor = page_info.get("endCursor")
|
|
3228
|
+
|
|
3229
|
+
return all_cycles[:limit] # Ensure we don't exceed limit
|
|
3230
|
+
|
|
3231
|
+
except Exception as e:
|
|
3232
|
+
raise ValueError(f"Failed to list Linear cycles: {e}") from e
|
|
3233
|
+
|
|
3234
|
+
async def get_issue_status(self, issue_id: str) -> dict[str, Any] | None:
|
|
3235
|
+
"""Get rich issue status information for a Linear issue.
|
|
3236
|
+
|
|
3237
|
+
Args:
|
|
3238
|
+
----
|
|
3239
|
+
issue_id: Linear issue identifier (e.g., 'BTA-123') or UUID
|
|
3240
|
+
|
|
3241
|
+
Returns:
|
|
3242
|
+
-------
|
|
3243
|
+
Dictionary with workflow state details:
|
|
3244
|
+
- id: State UUID
|
|
3245
|
+
- name: State name (e.g., "In Progress")
|
|
3246
|
+
- type: State type (e.g., "started", "completed")
|
|
3247
|
+
- color: State color (hex format)
|
|
3248
|
+
- description: State description
|
|
3249
|
+
- position: Position in workflow
|
|
3250
|
+
Returns None if issue not found.
|
|
3251
|
+
|
|
3252
|
+
Raises:
|
|
3253
|
+
------
|
|
3254
|
+
ValueError: If credentials are invalid or query fails
|
|
3255
|
+
|
|
3256
|
+
"""
|
|
3257
|
+
# Validate credentials
|
|
3258
|
+
is_valid, error_message = self.validate_credentials()
|
|
3259
|
+
if not is_valid:
|
|
3260
|
+
raise ValueError(error_message)
|
|
3261
|
+
|
|
3262
|
+
await self.initialize()
|
|
3263
|
+
|
|
3264
|
+
# Resolve issue identifier to UUID if needed
|
|
3265
|
+
issue_uuid = await self._resolve_issue_id(issue_id)
|
|
3266
|
+
if not issue_uuid:
|
|
3267
|
+
return None
|
|
3268
|
+
|
|
3269
|
+
try:
|
|
3270
|
+
result = await self.client.execute_query(
|
|
3271
|
+
GET_ISSUE_STATUS_QUERY, {"issueId": issue_uuid}
|
|
3272
|
+
)
|
|
3273
|
+
|
|
3274
|
+
issue_data = result.get("issue")
|
|
3275
|
+
if not issue_data:
|
|
3276
|
+
return None
|
|
3277
|
+
|
|
3278
|
+
return issue_data.get("state")
|
|
3279
|
+
|
|
3280
|
+
except Exception as e:
|
|
3281
|
+
raise ValueError(f"Failed to get issue status for '{issue_id}': {e}") from e
|
|
3282
|
+
|
|
3283
|
+
async def list_issue_statuses(
|
|
3284
|
+
self, team_id: str | None = None
|
|
3285
|
+
) -> builtins.list[dict[str, Any]]:
|
|
3286
|
+
"""List all workflow states for the team.
|
|
3287
|
+
|
|
3288
|
+
Args:
|
|
3289
|
+
----
|
|
3290
|
+
team_id: Linear team UUID. If None, uses the configured team.
|
|
3291
|
+
|
|
3292
|
+
Returns:
|
|
3293
|
+
-------
|
|
3294
|
+
List of workflow state dictionaries with fields:
|
|
3295
|
+
- id: State UUID
|
|
3296
|
+
- name: State name (e.g., "Backlog", "In Progress", "Done")
|
|
3297
|
+
- type: State type (e.g., "backlog", "unstarted", "started", "completed", "canceled")
|
|
3298
|
+
- color: State color (hex format)
|
|
3299
|
+
- description: State description
|
|
3300
|
+
- position: Position in workflow (lower = earlier)
|
|
3301
|
+
|
|
3302
|
+
Raises:
|
|
3303
|
+
------
|
|
3304
|
+
ValueError: If credentials are invalid or query fails
|
|
3305
|
+
|
|
3306
|
+
"""
|
|
3307
|
+
# Validate credentials
|
|
3308
|
+
is_valid, error_message = self.validate_credentials()
|
|
3309
|
+
if not is_valid:
|
|
3310
|
+
raise ValueError(error_message)
|
|
3311
|
+
|
|
3312
|
+
await self.initialize()
|
|
3313
|
+
|
|
3314
|
+
# Use configured team if not specified
|
|
3315
|
+
if team_id is None:
|
|
3316
|
+
team_id = await self._ensure_team_id()
|
|
3317
|
+
|
|
3318
|
+
# Validate team_id before listing statuses
|
|
3319
|
+
if not team_id:
|
|
3320
|
+
raise ValueError(
|
|
3321
|
+
"Cannot list Linear issue statuses without team_id. "
|
|
3322
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
3323
|
+
)
|
|
3324
|
+
|
|
3325
|
+
try:
|
|
3326
|
+
result = await self.client.execute_query(
|
|
3327
|
+
LIST_ISSUE_STATUSES_QUERY, {"teamId": team_id}
|
|
3328
|
+
)
|
|
3329
|
+
|
|
3330
|
+
states_data = result.get("team", {}).get("states", {})
|
|
3331
|
+
states = states_data.get("nodes", [])
|
|
3332
|
+
|
|
3333
|
+
# Sort by position to maintain workflow order
|
|
3334
|
+
states.sort(key=lambda s: s.get("position", 0))
|
|
3335
|
+
|
|
3336
|
+
return states
|
|
3337
|
+
|
|
3338
|
+
except Exception as e:
|
|
3339
|
+
raise ValueError(f"Failed to list workflow states: {e}") from e
|
|
3340
|
+
|
|
3341
|
+
async def list_epics(
|
|
3342
|
+
self,
|
|
3343
|
+
limit: int = 20,
|
|
3344
|
+
offset: int = 0,
|
|
3345
|
+
state: str | None = None,
|
|
3346
|
+
include_completed: bool = True,
|
|
3347
|
+
compact: bool = False,
|
|
3348
|
+
**kwargs: Any,
|
|
3349
|
+
) -> dict[str, Any] | builtins.list[Epic]:
|
|
3350
|
+
"""List Linear projects (epics) with efficient pagination and compact output.
|
|
3351
|
+
|
|
3352
|
+
Args:
|
|
3353
|
+
----
|
|
3354
|
+
limit: Maximum number of projects to return (default: 20, max: 100)
|
|
3355
|
+
offset: Number of projects to skip (note: Linear uses cursor-based pagination)
|
|
3356
|
+
state: Filter by project state (e.g., "planned", "started", "completed", "canceled")
|
|
3357
|
+
include_completed: Whether to include completed projects (default: True)
|
|
3358
|
+
compact: Return compact format for token efficiency (default: False for backward compatibility)
|
|
3359
|
+
**kwargs: Additional filter parameters (reserved for future use)
|
|
3360
|
+
|
|
3361
|
+
Returns:
|
|
3362
|
+
-------
|
|
3363
|
+
When compact=True: Dictionary with items and pagination metadata
|
|
3364
|
+
When compact=False: List of Epic objects (backward compatible, default)
|
|
3365
|
+
|
|
3366
|
+
Raises:
|
|
3367
|
+
------
|
|
3368
|
+
ValueError: If credentials are invalid or query fails
|
|
3369
|
+
|
|
3370
|
+
Design Decision: Backward Compatible with Opt-in Compact Mode (1M-554)
|
|
3371
|
+
----------------------------------------------------------------------
|
|
3372
|
+
Rationale: Reduced default limit from 50 to 20 to match list() behavior.
|
|
3373
|
+
Compact mode provides ~77% token reduction when explicitly enabled.
|
|
3374
|
+
|
|
3375
|
+
Recommended: Use compact=True for new code to reduce token usage.
|
|
3376
|
+
|
|
3377
|
+
"""
|
|
3378
|
+
# Validate credentials
|
|
3379
|
+
is_valid, error_message = self.validate_credentials()
|
|
3380
|
+
if not is_valid:
|
|
3381
|
+
raise ValueError(error_message)
|
|
3382
|
+
|
|
3383
|
+
await self.initialize()
|
|
3384
|
+
team_id = await self._ensure_team_id()
|
|
3385
|
+
|
|
3386
|
+
# Validate team_id before listing projects
|
|
3387
|
+
if not team_id:
|
|
3388
|
+
raise ValueError(
|
|
3389
|
+
"Cannot list Linear projects without team_id. "
|
|
3390
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
3391
|
+
)
|
|
3392
|
+
|
|
3393
|
+
# Enforce maximum limit to prevent excessive responses
|
|
3394
|
+
if limit > 100:
|
|
3395
|
+
limit = 100
|
|
3396
|
+
|
|
3397
|
+
# Build project filter using existing helper
|
|
3398
|
+
from .types import build_project_filter
|
|
3399
|
+
|
|
3400
|
+
project_filter = build_project_filter(
|
|
3401
|
+
state=state,
|
|
3402
|
+
team_id=team_id,
|
|
3403
|
+
include_completed=include_completed,
|
|
3404
|
+
)
|
|
3405
|
+
|
|
3406
|
+
try:
|
|
3407
|
+
# Fetch projects with pagination
|
|
3408
|
+
all_projects = []
|
|
3409
|
+
has_next_page = True
|
|
3410
|
+
after_cursor = None
|
|
3411
|
+
projects_fetched = 0
|
|
3412
|
+
|
|
3413
|
+
while has_next_page and projects_fetched < limit + offset:
|
|
3414
|
+
# Calculate how many more we need
|
|
3415
|
+
remaining = (limit + offset) - projects_fetched
|
|
3416
|
+
page_size = min(remaining, 50) # Linear max page size is typically 50
|
|
3417
|
+
|
|
3418
|
+
variables = {"filter": project_filter, "first": page_size}
|
|
3419
|
+
if after_cursor:
|
|
3420
|
+
variables["after"] = after_cursor
|
|
3421
|
+
|
|
3422
|
+
result = await self.client.execute_query(LIST_PROJECTS_QUERY, variables)
|
|
3423
|
+
|
|
3424
|
+
projects_data = result.get("projects", {})
|
|
3425
|
+
page_projects = projects_data.get("nodes", [])
|
|
3426
|
+
page_info = projects_data.get("pageInfo", {})
|
|
3427
|
+
|
|
3428
|
+
all_projects.extend(page_projects)
|
|
3429
|
+
projects_fetched += len(page_projects)
|
|
3430
|
+
|
|
3431
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
3432
|
+
after_cursor = page_info.get("endCursor")
|
|
3433
|
+
|
|
3434
|
+
# Stop if no more results on this page
|
|
3435
|
+
if not page_projects:
|
|
3436
|
+
break
|
|
3437
|
+
|
|
3438
|
+
# Apply offset and limit
|
|
3439
|
+
paginated_projects = all_projects[offset : offset + limit]
|
|
3440
|
+
|
|
3441
|
+
# Map Linear projects to Epic objects using existing mapper
|
|
3442
|
+
epics = []
|
|
3443
|
+
for project in paginated_projects:
|
|
3444
|
+
epics.append(map_linear_project_to_epic(project))
|
|
3445
|
+
|
|
3446
|
+
# Return compact format with pagination metadata
|
|
3447
|
+
if compact:
|
|
3448
|
+
from .mappers import epic_to_compact_format
|
|
3449
|
+
|
|
3450
|
+
compact_items = [epic_to_compact_format(epic) for epic in epics]
|
|
3451
|
+
return {
|
|
3452
|
+
"status": "success",
|
|
3453
|
+
"items": compact_items,
|
|
3454
|
+
"pagination": {
|
|
3455
|
+
"total_returned": len(compact_items),
|
|
3456
|
+
"limit": limit,
|
|
3457
|
+
"offset": offset,
|
|
3458
|
+
"has_more": has_next_page, # Use actual Linear pagination status
|
|
3459
|
+
},
|
|
3460
|
+
}
|
|
3461
|
+
|
|
3462
|
+
# Backward compatible: return list of Epic objects
|
|
3463
|
+
return epics
|
|
3464
|
+
|
|
3465
|
+
except Exception as e:
|
|
3466
|
+
raise ValueError(f"Failed to list Linear projects: {e}") from e
|
|
3467
|
+
|
|
3468
|
+
def _linear_update_to_model(self, linear_data: dict[str, Any]) -> ProjectUpdate:
|
|
3469
|
+
"""Convert Linear GraphQL response to ProjectUpdate model (1M-238).
|
|
3470
|
+
|
|
3471
|
+
Maps Linear's ProjectUpdate entity fields to the universal ProjectUpdate model,
|
|
3472
|
+
handling health value transformations and optional fields.
|
|
3473
|
+
|
|
3474
|
+
Args:
|
|
3475
|
+
----
|
|
3476
|
+
linear_data: GraphQL response data for a ProjectUpdate entity
|
|
3477
|
+
|
|
3478
|
+
Returns:
|
|
3479
|
+
-------
|
|
3480
|
+
ProjectUpdate instance with mapped fields
|
|
3481
|
+
|
|
3482
|
+
Linear Health Mapping:
|
|
3483
|
+
---------------------
|
|
3484
|
+
Linear uses camelCase enum values: onTrack, atRisk, offTrack
|
|
3485
|
+
Universal model uses snake_case: ON_TRACK, AT_RISK, OFF_TRACK
|
|
3486
|
+
|
|
3487
|
+
"""
|
|
3488
|
+
# Map Linear health values (camelCase) to universal enum (UPPER_SNAKE_CASE)
|
|
3489
|
+
health_mapping = {
|
|
3490
|
+
"onTrack": ProjectUpdateHealth.ON_TRACK,
|
|
3491
|
+
"atRisk": ProjectUpdateHealth.AT_RISK,
|
|
3492
|
+
"offTrack": ProjectUpdateHealth.OFF_TRACK,
|
|
3493
|
+
}
|
|
3494
|
+
|
|
3495
|
+
health_value = linear_data.get("health")
|
|
3496
|
+
health = health_mapping.get(health_value) if health_value else None
|
|
3497
|
+
|
|
3498
|
+
# Extract user info
|
|
3499
|
+
user_data = linear_data.get("user", {})
|
|
3500
|
+
author_id = user_data.get("id") if user_data else None
|
|
3501
|
+
author_name = user_data.get("name") if user_data else None
|
|
3502
|
+
|
|
3503
|
+
# Extract project info
|
|
3504
|
+
project_data = linear_data.get("project", {})
|
|
3505
|
+
project_id = project_data.get("id", "")
|
|
3506
|
+
project_name = project_data.get("name")
|
|
3507
|
+
|
|
3508
|
+
# Parse timestamps
|
|
3509
|
+
created_at = datetime.fromisoformat(
|
|
3510
|
+
linear_data["createdAt"].replace("Z", "+00:00")
|
|
3511
|
+
)
|
|
3512
|
+
updated_at = None
|
|
3513
|
+
if linear_data.get("updatedAt"):
|
|
3514
|
+
updated_at = datetime.fromisoformat(
|
|
3515
|
+
linear_data["updatedAt"].replace("Z", "+00:00")
|
|
3516
|
+
)
|
|
3517
|
+
|
|
3518
|
+
return ProjectUpdate(
|
|
3519
|
+
id=linear_data["id"],
|
|
3520
|
+
project_id=project_id,
|
|
3521
|
+
project_name=project_name,
|
|
3522
|
+
body=linear_data["body"],
|
|
3523
|
+
health=health,
|
|
3524
|
+
created_at=created_at,
|
|
3525
|
+
updated_at=updated_at,
|
|
3526
|
+
author_id=author_id,
|
|
3527
|
+
author_name=author_name,
|
|
3528
|
+
url=linear_data.get("url"),
|
|
3529
|
+
diff_markdown=linear_data.get("diffMarkdown"),
|
|
3530
|
+
)
|
|
3531
|
+
|
|
3532
|
+
async def create_project_update(
|
|
3533
|
+
self,
|
|
3534
|
+
project_id: str,
|
|
3535
|
+
body: str,
|
|
3536
|
+
health: ProjectUpdateHealth | None = None,
|
|
3537
|
+
) -> ProjectUpdate:
|
|
3538
|
+
"""Create a project status update in Linear (1M-238).
|
|
3539
|
+
|
|
3540
|
+
Creates a new status update for a Linear project with optional health indicator.
|
|
3541
|
+
Linear will automatically generate a diff showing changes since the last update.
|
|
3542
|
+
|
|
3543
|
+
Args:
|
|
3544
|
+
----
|
|
3545
|
+
project_id: Linear project UUID, slugId, or short ID
|
|
3546
|
+
body: Markdown-formatted update content (required)
|
|
3547
|
+
health: Optional health status (ON_TRACK, AT_RISK, OFF_TRACK)
|
|
3548
|
+
|
|
3549
|
+
Returns:
|
|
3550
|
+
-------
|
|
3551
|
+
Created ProjectUpdate with Linear metadata including auto-generated diff
|
|
3552
|
+
|
|
3553
|
+
Raises:
|
|
3554
|
+
------
|
|
3555
|
+
ValueError: If credentials invalid, project not found, or creation fails
|
|
3556
|
+
|
|
3557
|
+
Example:
|
|
3558
|
+
-------
|
|
3559
|
+
>>> update = await adapter.create_project_update(
|
|
3560
|
+
... project_id="PROJ-123",
|
|
3561
|
+
... body="Sprint 23 completed. 15/20 stories done.",
|
|
3562
|
+
... health=ProjectUpdateHealth.AT_RISK
|
|
3563
|
+
... )
|
|
3564
|
+
|
|
3565
|
+
"""
|
|
3566
|
+
logger = logging.getLogger(__name__)
|
|
3567
|
+
|
|
3568
|
+
# Validate credentials
|
|
3569
|
+
is_valid, error_message = self.validate_credentials()
|
|
3570
|
+
if not is_valid:
|
|
3571
|
+
raise ValueError(error_message)
|
|
3572
|
+
|
|
3573
|
+
await self.initialize()
|
|
3574
|
+
|
|
3575
|
+
# Resolve project identifier to UUID if needed
|
|
3576
|
+
project_uuid = await self._resolve_project_id(project_id)
|
|
3577
|
+
if not project_uuid:
|
|
3578
|
+
raise ValueError(f"Project '{project_id}' not found")
|
|
3579
|
+
|
|
3580
|
+
# Build mutation variables
|
|
3581
|
+
variables: dict[str, Any] = {
|
|
3582
|
+
"projectId": project_uuid,
|
|
3583
|
+
"body": body,
|
|
3584
|
+
}
|
|
3585
|
+
|
|
3586
|
+
# Map health enum to Linear's camelCase format
|
|
3587
|
+
if health:
|
|
3588
|
+
health_mapping = {
|
|
3589
|
+
ProjectUpdateHealth.ON_TRACK: "onTrack",
|
|
3590
|
+
ProjectUpdateHealth.AT_RISK: "atRisk",
|
|
3591
|
+
ProjectUpdateHealth.OFF_TRACK: "offTrack",
|
|
3592
|
+
}
|
|
3593
|
+
variables["health"] = health_mapping.get(health)
|
|
3594
|
+
|
|
3595
|
+
try:
|
|
3596
|
+
result = await self.client.execute_mutation(
|
|
3597
|
+
CREATE_PROJECT_UPDATE_MUTATION, variables
|
|
3598
|
+
)
|
|
3599
|
+
|
|
3600
|
+
if not result["projectUpdateCreate"]["success"]:
|
|
3601
|
+
raise ValueError(f"Failed to create project update for '{project_id}'")
|
|
3602
|
+
|
|
3603
|
+
update_data = result["projectUpdateCreate"]["projectUpdate"]
|
|
3604
|
+
logger.info(
|
|
3605
|
+
f"Created project update for project '{project_id}' (UUID: {project_uuid})"
|
|
3606
|
+
)
|
|
3607
|
+
|
|
3608
|
+
return self._linear_update_to_model(update_data)
|
|
3609
|
+
|
|
3610
|
+
except Exception as e:
|
|
3611
|
+
raise ValueError(
|
|
3612
|
+
f"Failed to create project update for '{project_id}': {e}"
|
|
3613
|
+
) from e
|
|
3614
|
+
|
|
3615
|
+
async def list_project_updates(
|
|
3616
|
+
self,
|
|
3617
|
+
project_id: str,
|
|
3618
|
+
limit: int = 10,
|
|
3619
|
+
) -> list[ProjectUpdate]:
|
|
3620
|
+
"""List project updates for a project (1M-238).
|
|
3621
|
+
|
|
3622
|
+
Retrieves recent status updates for a Linear project, ordered by creation date.
|
|
3623
|
+
|
|
3624
|
+
Args:
|
|
3625
|
+
----
|
|
3626
|
+
project_id: Linear project UUID, slugId, or short ID
|
|
3627
|
+
limit: Maximum number of updates to return (default: 10, max: 250)
|
|
3628
|
+
|
|
3629
|
+
Returns:
|
|
3630
|
+
-------
|
|
3631
|
+
List of ProjectUpdate objects ordered by creation date (newest first)
|
|
3632
|
+
|
|
3633
|
+
Raises:
|
|
3634
|
+
------
|
|
3635
|
+
ValueError: If credentials invalid or query fails
|
|
3636
|
+
|
|
3637
|
+
Example:
|
|
3638
|
+
-------
|
|
3639
|
+
>>> updates = await adapter.list_project_updates("PROJ-123", limit=5)
|
|
3640
|
+
>>> for update in updates:
|
|
3641
|
+
... print(f"{update.created_at}: {update.health} - {update.body[:50]}")
|
|
3642
|
+
|
|
3643
|
+
"""
|
|
3644
|
+
logger = logging.getLogger(__name__)
|
|
3645
|
+
|
|
3646
|
+
# Validate credentials
|
|
3647
|
+
is_valid, error_message = self.validate_credentials()
|
|
3648
|
+
if not is_valid:
|
|
3649
|
+
raise ValueError(error_message)
|
|
3650
|
+
|
|
3651
|
+
await self.initialize()
|
|
3652
|
+
|
|
3653
|
+
# Resolve project identifier to UUID if needed
|
|
3654
|
+
project_uuid = await self._resolve_project_id(project_id)
|
|
3655
|
+
if not project_uuid:
|
|
3656
|
+
raise ValueError(f"Project '{project_id}' not found")
|
|
3657
|
+
|
|
3658
|
+
try:
|
|
3659
|
+
result = await self.client.execute_query(
|
|
3660
|
+
LIST_PROJECT_UPDATES_QUERY,
|
|
3661
|
+
{"projectId": project_uuid, "first": min(limit, 250)},
|
|
3662
|
+
)
|
|
3663
|
+
|
|
3664
|
+
project_data = result.get("project")
|
|
3665
|
+
if not project_data:
|
|
3666
|
+
raise ValueError(f"Project '{project_id}' not found")
|
|
3667
|
+
|
|
3668
|
+
updates_data = project_data.get("projectUpdates", {}).get("nodes", [])
|
|
3669
|
+
|
|
3670
|
+
# Map Linear updates to ProjectUpdate models
|
|
3671
|
+
return [self._linear_update_to_model(update) for update in updates_data]
|
|
3672
|
+
|
|
3673
|
+
except Exception as e:
|
|
3674
|
+
logger.warning(f"Failed to list project updates for {project_id}: {e}")
|
|
3675
|
+
raise ValueError(
|
|
3676
|
+
f"Failed to list project updates for '{project_id}': {e}"
|
|
3677
|
+
) from e
|
|
3678
|
+
|
|
3679
|
+
async def get_project_update(
|
|
3680
|
+
self,
|
|
3681
|
+
update_id: str,
|
|
3682
|
+
) -> ProjectUpdate:
|
|
3683
|
+
"""Get a specific project update by ID (1M-238).
|
|
3684
|
+
|
|
3685
|
+
Retrieves detailed information about a single project status update.
|
|
3686
|
+
|
|
3687
|
+
Args:
|
|
3688
|
+
----
|
|
3689
|
+
update_id: Linear ProjectUpdate UUID
|
|
3690
|
+
|
|
3691
|
+
Returns:
|
|
3692
|
+
-------
|
|
3693
|
+
ProjectUpdate object with full details
|
|
3694
|
+
|
|
3695
|
+
Raises:
|
|
3696
|
+
------
|
|
3697
|
+
ValueError: If credentials invalid, update not found, or query fails
|
|
3698
|
+
|
|
3699
|
+
Example:
|
|
3700
|
+
-------
|
|
3701
|
+
>>> update = await adapter.get_project_update("update-uuid-here")
|
|
3702
|
+
>>> print(f"Update: {update.body}")
|
|
3703
|
+
>>> print(f"Health: {update.health}")
|
|
3704
|
+
>>> print(f"Diff: {update.diff_markdown}")
|
|
3705
|
+
|
|
3706
|
+
"""
|
|
3707
|
+
logger = logging.getLogger(__name__)
|
|
3708
|
+
|
|
3709
|
+
# Validate credentials
|
|
3710
|
+
is_valid, error_message = self.validate_credentials()
|
|
3711
|
+
if not is_valid:
|
|
3712
|
+
raise ValueError(error_message)
|
|
3713
|
+
|
|
3714
|
+
await self.initialize()
|
|
3715
|
+
|
|
3716
|
+
try:
|
|
3717
|
+
result = await self.client.execute_query(
|
|
3718
|
+
GET_PROJECT_UPDATE_QUERY, {"id": update_id}
|
|
3719
|
+
)
|
|
3720
|
+
|
|
3721
|
+
update_data = result.get("projectUpdate")
|
|
3722
|
+
if not update_data:
|
|
3723
|
+
raise ValueError(f"Project update '{update_id}' not found")
|
|
3724
|
+
|
|
3725
|
+
return self._linear_update_to_model(update_data)
|
|
3726
|
+
|
|
3727
|
+
except Exception as e:
|
|
3728
|
+
logger.error(f"Failed to get project update {update_id}: {e}")
|
|
3729
|
+
raise ValueError(f"Failed to get project update '{update_id}': {e}") from e
|
|
3730
|
+
|
|
3731
|
+
# Milestone Operations (1M-607 Phase 2: Linear Adapter Integration)
|
|
3732
|
+
|
|
3733
|
+
async def milestone_create(
|
|
3734
|
+
self,
|
|
3735
|
+
name: str,
|
|
3736
|
+
target_date: datetime | None = None,
|
|
3737
|
+
labels: list[str] | None = None,
|
|
3738
|
+
description: str = "",
|
|
3739
|
+
project_id: str | None = None,
|
|
3740
|
+
) -> Milestone:
|
|
3741
|
+
"""Create milestone using Linear Cycles.
|
|
3742
|
+
|
|
3743
|
+
Linear Cycles require start and end dates. If target_date is provided,
|
|
3744
|
+
set startsAt to today and endsAt to target_date. If no target_date,
|
|
3745
|
+
defaults to a 2-week cycle.
|
|
3746
|
+
|
|
3747
|
+
Args:
|
|
3748
|
+
----
|
|
3749
|
+
name: Milestone name
|
|
3750
|
+
target_date: Target completion date (optional)
|
|
3751
|
+
labels: Labels for milestone grouping (optional, stored in metadata)
|
|
3752
|
+
description: Milestone description
|
|
3753
|
+
project_id: Associated project ID (optional)
|
|
3754
|
+
|
|
3755
|
+
Returns:
|
|
3756
|
+
-------
|
|
3757
|
+
Created Milestone object
|
|
3758
|
+
|
|
3759
|
+
Raises:
|
|
3760
|
+
------
|
|
3761
|
+
ValueError: If credentials invalid or creation fails
|
|
3762
|
+
|
|
3763
|
+
"""
|
|
3764
|
+
logger = logging.getLogger(__name__)
|
|
3765
|
+
|
|
3766
|
+
# Validate credentials
|
|
3767
|
+
is_valid, error_message = self.validate_credentials()
|
|
3768
|
+
if not is_valid:
|
|
3769
|
+
raise ValueError(error_message)
|
|
3770
|
+
|
|
3771
|
+
await self.initialize()
|
|
3772
|
+
team_id = await self._ensure_team_id()
|
|
3773
|
+
|
|
3774
|
+
# Linear requires both start and end dates for cycles
|
|
3775
|
+
from datetime import timedelta, timezone
|
|
3776
|
+
|
|
3777
|
+
starts_at = datetime.now(timezone.utc)
|
|
3778
|
+
if target_date:
|
|
3779
|
+
ends_at = target_date
|
|
3780
|
+
# Ensure ends_at has timezone info
|
|
3781
|
+
if ends_at.tzinfo is None:
|
|
3782
|
+
ends_at = ends_at.replace(tzinfo=timezone.utc)
|
|
3783
|
+
else:
|
|
3784
|
+
# Default to 2 weeks from now
|
|
3785
|
+
ends_at = starts_at + timedelta(days=14)
|
|
3786
|
+
|
|
3787
|
+
try:
|
|
3788
|
+
result = await self.client.execute_query(
|
|
3789
|
+
CREATE_CYCLE_MUTATION,
|
|
3790
|
+
{
|
|
3791
|
+
"input": {
|
|
3792
|
+
"name": name,
|
|
3793
|
+
"description": description,
|
|
3794
|
+
"startsAt": starts_at.isoformat(),
|
|
3795
|
+
"endsAt": ends_at.isoformat(),
|
|
3796
|
+
"teamId": team_id,
|
|
3797
|
+
}
|
|
3798
|
+
},
|
|
3799
|
+
)
|
|
3800
|
+
|
|
3801
|
+
if not result.get("cycleCreate", {}).get("success"):
|
|
3802
|
+
raise ValueError("Failed to create cycle")
|
|
3803
|
+
|
|
3804
|
+
cycle_data = result["cycleCreate"]["cycle"]
|
|
3805
|
+
logger.info(
|
|
3806
|
+
f"Created Linear cycle {cycle_data['id']} for milestone '{name}'"
|
|
3807
|
+
)
|
|
3808
|
+
|
|
3809
|
+
# Convert Linear Cycle to Milestone model
|
|
3810
|
+
return self._cycle_to_milestone(cycle_data, labels)
|
|
3811
|
+
|
|
3812
|
+
except Exception as e:
|
|
3813
|
+
logger.error(f"Failed to create milestone '{name}': {e}")
|
|
3814
|
+
raise ValueError(f"Failed to create milestone: {e}") from e
|
|
3815
|
+
|
|
3816
|
+
async def milestone_get(self, milestone_id: str) -> Milestone | None:
|
|
3817
|
+
"""Get milestone by ID with progress calculation.
|
|
3818
|
+
|
|
3819
|
+
Args:
|
|
3820
|
+
----
|
|
3821
|
+
milestone_id: Milestone/Cycle identifier
|
|
3822
|
+
|
|
3823
|
+
Returns:
|
|
3824
|
+
-------
|
|
3825
|
+
Milestone object with calculated progress, None if not found
|
|
3826
|
+
|
|
3827
|
+
"""
|
|
3828
|
+
logger = logging.getLogger(__name__)
|
|
3829
|
+
|
|
3830
|
+
# Validate credentials
|
|
3831
|
+
is_valid, error_message = self.validate_credentials()
|
|
3832
|
+
if not is_valid:
|
|
3833
|
+
raise ValueError(error_message)
|
|
3834
|
+
|
|
3835
|
+
await self.initialize()
|
|
3836
|
+
|
|
3837
|
+
try:
|
|
3838
|
+
result = await self.client.execute_query(
|
|
3839
|
+
GET_CYCLE_QUERY, {"id": milestone_id}
|
|
3840
|
+
)
|
|
3841
|
+
|
|
3842
|
+
cycle_data = result.get("cycle")
|
|
3843
|
+
if not cycle_data:
|
|
3844
|
+
logger.debug(f"Cycle {milestone_id} not found")
|
|
3845
|
+
return None
|
|
3846
|
+
|
|
3847
|
+
return self._cycle_to_milestone(cycle_data)
|
|
3848
|
+
|
|
3849
|
+
except Exception as e:
|
|
3850
|
+
logger.warning(f"Failed to get milestone {milestone_id}: {e}")
|
|
3851
|
+
return None
|
|
3852
|
+
|
|
3853
|
+
async def milestone_list(
|
|
3854
|
+
self,
|
|
3855
|
+
project_id: str | None = None,
|
|
3856
|
+
state: str | None = None,
|
|
3857
|
+
) -> list[Milestone]:
|
|
3858
|
+
"""List milestones using Linear Cycles.
|
|
3859
|
+
|
|
3860
|
+
Args:
|
|
3861
|
+
----
|
|
3862
|
+
project_id: Filter by project (not used by Linear Cycles)
|
|
3863
|
+
state: Filter by state (open, active, completed, closed)
|
|
3864
|
+
|
|
3865
|
+
Returns:
|
|
3866
|
+
-------
|
|
3867
|
+
List of Milestone objects
|
|
3868
|
+
|
|
3869
|
+
"""
|
|
3870
|
+
logger = logging.getLogger(__name__)
|
|
3871
|
+
|
|
3872
|
+
# Validate credentials
|
|
3873
|
+
is_valid, error_message = self.validate_credentials()
|
|
3874
|
+
if not is_valid:
|
|
3875
|
+
raise ValueError(error_message)
|
|
3876
|
+
|
|
3877
|
+
await self.initialize()
|
|
3878
|
+
team_id = await self._ensure_team_id()
|
|
3879
|
+
|
|
3880
|
+
try:
|
|
3881
|
+
result = await self.client.execute_query(
|
|
3882
|
+
LIST_CYCLES_QUERY,
|
|
3883
|
+
{"teamId": team_id, "first": 50, "after": None},
|
|
3884
|
+
)
|
|
3885
|
+
|
|
3886
|
+
cycles = result.get("team", {}).get("cycles", {}).get("nodes", [])
|
|
3887
|
+
milestones = [self._cycle_to_milestone(cycle) for cycle in cycles]
|
|
3888
|
+
|
|
3889
|
+
# Apply state filter if provided
|
|
3890
|
+
if state:
|
|
3891
|
+
milestones = [m for m in milestones if m.state == state]
|
|
3892
|
+
|
|
3893
|
+
logger.debug(f"Listed {len(milestones)} milestones (state={state})")
|
|
3894
|
+
return milestones
|
|
3895
|
+
|
|
3896
|
+
except Exception as e:
|
|
3897
|
+
logger.error(f"Failed to list milestones: {e}")
|
|
3898
|
+
return []
|
|
3899
|
+
|
|
3900
|
+
async def milestone_update(
|
|
3901
|
+
self,
|
|
3902
|
+
milestone_id: str,
|
|
3903
|
+
name: str | None = None,
|
|
3904
|
+
target_date: datetime | None = None,
|
|
3905
|
+
state: str | None = None,
|
|
3906
|
+
labels: list[str] | None = None,
|
|
3907
|
+
description: str | None = None,
|
|
3908
|
+
) -> Milestone | None:
|
|
3909
|
+
"""Update milestone properties.
|
|
3910
|
+
|
|
3911
|
+
Args:
|
|
3912
|
+
----
|
|
3913
|
+
milestone_id: Milestone identifier
|
|
3914
|
+
name: New name (optional)
|
|
3915
|
+
target_date: New target date (optional)
|
|
3916
|
+
state: New state (optional)
|
|
3917
|
+
labels: New labels (optional, stored in metadata)
|
|
3918
|
+
description: New description (optional)
|
|
3919
|
+
|
|
3920
|
+
Returns:
|
|
3921
|
+
-------
|
|
3922
|
+
Updated Milestone object, None if not found
|
|
3923
|
+
|
|
3924
|
+
"""
|
|
3925
|
+
logger = logging.getLogger(__name__)
|
|
3926
|
+
|
|
3927
|
+
# Validate credentials
|
|
3928
|
+
is_valid, error_message = self.validate_credentials()
|
|
3929
|
+
if not is_valid:
|
|
3930
|
+
raise ValueError(error_message)
|
|
3931
|
+
|
|
3932
|
+
await self.initialize()
|
|
3933
|
+
|
|
3934
|
+
# Build update input
|
|
3935
|
+
update_input = {}
|
|
3936
|
+
if name:
|
|
3937
|
+
update_input["name"] = name
|
|
3938
|
+
if description is not None:
|
|
3939
|
+
update_input["description"] = description
|
|
3940
|
+
if target_date:
|
|
3941
|
+
from datetime import timezone
|
|
3942
|
+
|
|
3943
|
+
# Ensure target_date has timezone
|
|
3944
|
+
if target_date.tzinfo is None:
|
|
3945
|
+
target_date = target_date.replace(tzinfo=timezone.utc)
|
|
3946
|
+
update_input["endsAt"] = target_date.isoformat()
|
|
3947
|
+
if state == "completed":
|
|
3948
|
+
# Mark cycle as completed
|
|
3949
|
+
from datetime import datetime, timezone
|
|
3950
|
+
|
|
3951
|
+
update_input["completedAt"] = datetime.now(timezone.utc).isoformat()
|
|
3952
|
+
|
|
3953
|
+
if not update_input:
|
|
3954
|
+
# No updates provided, just return current milestone
|
|
3955
|
+
return await self.milestone_get(milestone_id)
|
|
3956
|
+
|
|
3957
|
+
try:
|
|
3958
|
+
result = await self.client.execute_query(
|
|
3959
|
+
UPDATE_CYCLE_MUTATION,
|
|
3960
|
+
{"id": milestone_id, "input": update_input},
|
|
3961
|
+
)
|
|
3962
|
+
|
|
3963
|
+
if not result.get("cycleUpdate", {}).get("success"):
|
|
3964
|
+
logger.warning(f"Failed to update cycle {milestone_id}")
|
|
3965
|
+
return None
|
|
3966
|
+
|
|
3967
|
+
cycle_data = result["cycleUpdate"]["cycle"]
|
|
3968
|
+
logger.info(f"Updated Linear cycle {milestone_id}")
|
|
3969
|
+
|
|
3970
|
+
return self._cycle_to_milestone(cycle_data, labels)
|
|
3971
|
+
|
|
3972
|
+
except Exception as e:
|
|
3973
|
+
logger.error(f"Failed to update milestone {milestone_id}: {e}")
|
|
3974
|
+
return None
|
|
3975
|
+
|
|
3976
|
+
async def milestone_delete(self, milestone_id: str) -> bool:
|
|
3977
|
+
"""Delete (archive) milestone.
|
|
3978
|
+
|
|
3979
|
+
Linear doesn't support permanent cycle deletion, so this archives the cycle.
|
|
3980
|
+
|
|
3981
|
+
Args:
|
|
3982
|
+
----
|
|
3983
|
+
milestone_id: Milestone identifier
|
|
3984
|
+
|
|
3985
|
+
Returns:
|
|
3986
|
+
-------
|
|
3987
|
+
True if deleted successfully, False otherwise
|
|
3988
|
+
|
|
3989
|
+
"""
|
|
3990
|
+
logger = logging.getLogger(__name__)
|
|
3991
|
+
|
|
3992
|
+
# Validate credentials
|
|
3993
|
+
is_valid, error_message = self.validate_credentials()
|
|
3994
|
+
if not is_valid:
|
|
3995
|
+
raise ValueError(error_message)
|
|
3996
|
+
|
|
3997
|
+
await self.initialize()
|
|
3998
|
+
|
|
3999
|
+
try:
|
|
4000
|
+
result = await self.client.execute_query(
|
|
4001
|
+
ARCHIVE_CYCLE_MUTATION, {"id": milestone_id}
|
|
4002
|
+
)
|
|
4003
|
+
|
|
4004
|
+
success = result.get("cycleArchive", {}).get("success", False)
|
|
4005
|
+
if success:
|
|
4006
|
+
logger.info(f"Archived Linear cycle {milestone_id}")
|
|
4007
|
+
else:
|
|
4008
|
+
logger.warning(f"Failed to archive cycle {milestone_id}")
|
|
4009
|
+
|
|
4010
|
+
return success
|
|
4011
|
+
|
|
4012
|
+
except Exception as e:
|
|
4013
|
+
logger.error(f"Failed to delete milestone {milestone_id}: {e}")
|
|
4014
|
+
return False
|
|
4015
|
+
|
|
4016
|
+
async def milestone_get_issues(
|
|
4017
|
+
self,
|
|
4018
|
+
milestone_id: str,
|
|
4019
|
+
state: str | None = None,
|
|
4020
|
+
) -> list[Task]:
|
|
4021
|
+
"""Get issues associated with milestone (cycle).
|
|
4022
|
+
|
|
4023
|
+
Args:
|
|
4024
|
+
----
|
|
4025
|
+
milestone_id: Milestone identifier
|
|
4026
|
+
state: Filter by issue state (optional)
|
|
4027
|
+
|
|
4028
|
+
Returns:
|
|
4029
|
+
-------
|
|
4030
|
+
List of Task objects in the milestone
|
|
4031
|
+
|
|
4032
|
+
"""
|
|
4033
|
+
logger = logging.getLogger(__name__)
|
|
4034
|
+
|
|
4035
|
+
# Validate credentials
|
|
4036
|
+
is_valid, error_message = self.validate_credentials()
|
|
4037
|
+
if not is_valid:
|
|
4038
|
+
raise ValueError(error_message)
|
|
4039
|
+
|
|
4040
|
+
await self.initialize()
|
|
4041
|
+
|
|
4042
|
+
try:
|
|
4043
|
+
result = await self.client.execute_query(
|
|
4044
|
+
GET_CYCLE_ISSUES_QUERY, {"cycleId": milestone_id, "first": 100}
|
|
4045
|
+
)
|
|
4046
|
+
|
|
4047
|
+
cycle_data = result.get("cycle")
|
|
4048
|
+
if not cycle_data:
|
|
4049
|
+
logger.warning(f"Cycle {milestone_id} not found")
|
|
4050
|
+
return []
|
|
4051
|
+
|
|
4052
|
+
issues = cycle_data.get("issues", {}).get("nodes", [])
|
|
4053
|
+
|
|
4054
|
+
# Convert Linear issues to Task objects
|
|
4055
|
+
tasks = [map_linear_issue_to_task(issue) for issue in issues]
|
|
4056
|
+
|
|
4057
|
+
# Filter by state if provided
|
|
4058
|
+
if state:
|
|
4059
|
+
state_filter = TicketState(state) if state else None
|
|
4060
|
+
tasks = [t for t in tasks if t.state == state_filter]
|
|
4061
|
+
|
|
4062
|
+
logger.debug(f"Retrieved {len(tasks)} issues from milestone {milestone_id}")
|
|
4063
|
+
return tasks
|
|
4064
|
+
|
|
4065
|
+
except Exception as e:
|
|
4066
|
+
logger.error(f"Failed to get milestone issues {milestone_id}: {e}")
|
|
4067
|
+
return []
|
|
4068
|
+
|
|
4069
|
+
def _cycle_to_milestone(
|
|
4070
|
+
self,
|
|
4071
|
+
cycle_data: dict[str, Any],
|
|
4072
|
+
labels: list[str] | None = None,
|
|
4073
|
+
) -> Milestone:
|
|
4074
|
+
"""Convert Linear Cycle to universal Milestone model.
|
|
4075
|
+
|
|
4076
|
+
Determines state based on dates:
|
|
4077
|
+
- completed: Has completedAt timestamp
|
|
4078
|
+
- closed: Past end date without completion
|
|
4079
|
+
- active: Current date between start and end
|
|
4080
|
+
- open: Before start date
|
|
4081
|
+
|
|
4082
|
+
Args:
|
|
4083
|
+
----
|
|
4084
|
+
cycle_data: Linear Cycle data from GraphQL
|
|
4085
|
+
labels: Optional labels to associate with milestone
|
|
4086
|
+
|
|
4087
|
+
Returns:
|
|
4088
|
+
-------
|
|
4089
|
+
Milestone object
|
|
4090
|
+
|
|
4091
|
+
"""
|
|
4092
|
+
from datetime import datetime, timezone
|
|
4093
|
+
|
|
4094
|
+
# Determine state from dates
|
|
4095
|
+
now = datetime.now(timezone.utc)
|
|
4096
|
+
|
|
4097
|
+
# Parse dates
|
|
4098
|
+
starts_at_str = cycle_data.get("startsAt")
|
|
4099
|
+
ends_at_str = cycle_data.get("endsAt")
|
|
4100
|
+
completed_at_str = cycle_data.get("completedAt")
|
|
4101
|
+
|
|
4102
|
+
starts_at = (
|
|
4103
|
+
datetime.fromisoformat(starts_at_str.replace("Z", "+00:00"))
|
|
4104
|
+
if starts_at_str
|
|
4105
|
+
else None
|
|
4106
|
+
)
|
|
4107
|
+
ends_at = (
|
|
4108
|
+
datetime.fromisoformat(ends_at_str.replace("Z", "+00:00"))
|
|
4109
|
+
if ends_at_str
|
|
4110
|
+
else None
|
|
4111
|
+
)
|
|
4112
|
+
completed_at = (
|
|
4113
|
+
datetime.fromisoformat(completed_at_str.replace("Z", "+00:00"))
|
|
4114
|
+
if completed_at_str
|
|
4115
|
+
else None
|
|
4116
|
+
)
|
|
4117
|
+
|
|
4118
|
+
# Determine state
|
|
4119
|
+
if completed_at:
|
|
4120
|
+
state = "completed"
|
|
4121
|
+
elif ends_at and now > ends_at:
|
|
4122
|
+
state = "closed" # Past due without completion
|
|
4123
|
+
elif starts_at and ends_at and starts_at <= now <= ends_at:
|
|
4124
|
+
state = "active"
|
|
4125
|
+
else:
|
|
4126
|
+
state = "open" # Before start date
|
|
4127
|
+
|
|
4128
|
+
# Parse progress (Linear uses 0.0-1.0, we use 0-100)
|
|
4129
|
+
progress = cycle_data.get("progress", 0.0)
|
|
4130
|
+
progress_pct = progress * 100.0
|
|
4131
|
+
|
|
4132
|
+
return Milestone(
|
|
4133
|
+
id=cycle_data["id"],
|
|
4134
|
+
name=cycle_data["name"],
|
|
4135
|
+
description=cycle_data.get("description", ""),
|
|
4136
|
+
target_date=ends_at,
|
|
4137
|
+
state=state,
|
|
4138
|
+
labels=labels or [],
|
|
4139
|
+
total_issues=cycle_data.get("issueCount", 0),
|
|
4140
|
+
closed_issues=cycle_data.get("completedIssueCount", 0),
|
|
4141
|
+
progress_pct=progress_pct,
|
|
4142
|
+
created_at=None, # Linear doesn't provide creation timestamp for cycles
|
|
4143
|
+
updated_at=None,
|
|
4144
|
+
platform_data={
|
|
4145
|
+
"linear": {
|
|
4146
|
+
"cycle_id": cycle_data["id"],
|
|
4147
|
+
"starts_at": starts_at_str,
|
|
4148
|
+
"ends_at": ends_at_str,
|
|
4149
|
+
"completed_at": completed_at_str,
|
|
4150
|
+
"team": cycle_data.get("team"),
|
|
4151
|
+
}
|
|
4152
|
+
},
|
|
4153
|
+
)
|
|
4154
|
+
|
|
1564
4155
|
async def close(self) -> None:
|
|
1565
4156
|
"""Close the adapter and clean up resources."""
|
|
1566
4157
|
await self.client.close()
|