mcp-ticketer 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__init__.py +27 -0
- mcp_ticketer/__version__.py +40 -0
- mcp_ticketer/adapters/__init__.py +8 -0
- mcp_ticketer/adapters/aitrackdown.py +396 -0
- mcp_ticketer/adapters/github.py +974 -0
- mcp_ticketer/adapters/jira.py +831 -0
- mcp_ticketer/adapters/linear.py +1355 -0
- mcp_ticketer/cache/__init__.py +5 -0
- mcp_ticketer/cache/memory.py +193 -0
- mcp_ticketer/cli/__init__.py +5 -0
- mcp_ticketer/cli/main.py +812 -0
- mcp_ticketer/cli/queue_commands.py +285 -0
- mcp_ticketer/cli/utils.py +523 -0
- mcp_ticketer/core/__init__.py +15 -0
- mcp_ticketer/core/adapter.py +211 -0
- mcp_ticketer/core/config.py +403 -0
- mcp_ticketer/core/http_client.py +430 -0
- mcp_ticketer/core/mappers.py +492 -0
- mcp_ticketer/core/models.py +111 -0
- mcp_ticketer/core/registry.py +128 -0
- mcp_ticketer/mcp/__init__.py +5 -0
- mcp_ticketer/mcp/server.py +459 -0
- mcp_ticketer/py.typed +0 -0
- mcp_ticketer/queue/__init__.py +7 -0
- mcp_ticketer/queue/__main__.py +6 -0
- mcp_ticketer/queue/manager.py +261 -0
- mcp_ticketer/queue/queue.py +357 -0
- mcp_ticketer/queue/run_worker.py +38 -0
- mcp_ticketer/queue/worker.py +425 -0
- mcp_ticketer-0.1.1.dist-info/METADATA +362 -0
- mcp_ticketer-0.1.1.dist-info/RECORD +35 -0
- mcp_ticketer-0.1.1.dist-info/WHEEL +5 -0
- mcp_ticketer-0.1.1.dist-info/entry_points.txt +3 -0
- mcp_ticketer-0.1.1.dist-info/licenses/LICENSE +21 -0
- mcp_ticketer-0.1.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1355 @@
|
|
|
1
|
+
"""Linear adapter implementation using native GraphQL API with full feature support."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import asyncio
|
|
5
|
+
from typing import List, Optional, Dict, Any, Union
|
|
6
|
+
from datetime import datetime, date
|
|
7
|
+
from enum import Enum
|
|
8
|
+
|
|
9
|
+
from gql import gql, Client
|
|
10
|
+
from gql.transport.httpx import HTTPXAsyncTransport
|
|
11
|
+
from gql.transport.exceptions import TransportQueryError
|
|
12
|
+
import httpx
|
|
13
|
+
|
|
14
|
+
from ..core.adapter import BaseAdapter
|
|
15
|
+
from ..core.models import Epic, Task, Comment, SearchQuery, TicketState, Priority
|
|
16
|
+
from ..core.registry import AdapterRegistry
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class LinearStateType(str, Enum):
|
|
20
|
+
"""Linear workflow state types."""
|
|
21
|
+
BACKLOG = "backlog"
|
|
22
|
+
UNSTARTED = "unstarted"
|
|
23
|
+
STARTED = "started"
|
|
24
|
+
COMPLETED = "completed"
|
|
25
|
+
CANCELED = "canceled"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class LinearPriorityMapping:
|
|
29
|
+
"""Maps between Linear priority numbers and our Priority enum."""
|
|
30
|
+
|
|
31
|
+
TO_LINEAR = {
|
|
32
|
+
Priority.LOW: 4,
|
|
33
|
+
Priority.MEDIUM: 3,
|
|
34
|
+
Priority.HIGH: 2,
|
|
35
|
+
Priority.CRITICAL: 1,
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
FROM_LINEAR = {
|
|
39
|
+
0: Priority.CRITICAL, # Urgent
|
|
40
|
+
1: Priority.CRITICAL, # High
|
|
41
|
+
2: Priority.HIGH, # Medium
|
|
42
|
+
3: Priority.MEDIUM, # Low
|
|
43
|
+
4: Priority.LOW, # No priority
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
# GraphQL Fragments for reusable field definitions
|
|
48
|
+
USER_FRAGMENT = """
|
|
49
|
+
fragment UserFields on User {
|
|
50
|
+
id
|
|
51
|
+
name
|
|
52
|
+
email
|
|
53
|
+
displayName
|
|
54
|
+
avatarUrl
|
|
55
|
+
isMe
|
|
56
|
+
}
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
WORKFLOW_STATE_FRAGMENT = """
|
|
60
|
+
fragment WorkflowStateFields on WorkflowState {
|
|
61
|
+
id
|
|
62
|
+
name
|
|
63
|
+
type
|
|
64
|
+
position
|
|
65
|
+
color
|
|
66
|
+
}
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
TEAM_FRAGMENT = """
|
|
70
|
+
fragment TeamFields on Team {
|
|
71
|
+
id
|
|
72
|
+
name
|
|
73
|
+
key
|
|
74
|
+
description
|
|
75
|
+
}
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
CYCLE_FRAGMENT = """
|
|
79
|
+
fragment CycleFields on Cycle {
|
|
80
|
+
id
|
|
81
|
+
number
|
|
82
|
+
name
|
|
83
|
+
description
|
|
84
|
+
startsAt
|
|
85
|
+
endsAt
|
|
86
|
+
completedAt
|
|
87
|
+
}
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
PROJECT_FRAGMENT = """
|
|
91
|
+
fragment ProjectFields on Project {
|
|
92
|
+
id
|
|
93
|
+
name
|
|
94
|
+
description
|
|
95
|
+
state
|
|
96
|
+
createdAt
|
|
97
|
+
updatedAt
|
|
98
|
+
url
|
|
99
|
+
icon
|
|
100
|
+
color
|
|
101
|
+
targetDate
|
|
102
|
+
startedAt
|
|
103
|
+
completedAt
|
|
104
|
+
teams {
|
|
105
|
+
nodes {
|
|
106
|
+
...TeamFields
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
"""
|
|
111
|
+
|
|
112
|
+
LABEL_FRAGMENT = """
|
|
113
|
+
fragment LabelFields on IssueLabel {
|
|
114
|
+
id
|
|
115
|
+
name
|
|
116
|
+
color
|
|
117
|
+
description
|
|
118
|
+
}
|
|
119
|
+
"""
|
|
120
|
+
|
|
121
|
+
ATTACHMENT_FRAGMENT = """
|
|
122
|
+
fragment AttachmentFields on Attachment {
|
|
123
|
+
id
|
|
124
|
+
url
|
|
125
|
+
title
|
|
126
|
+
subtitle
|
|
127
|
+
metadata
|
|
128
|
+
source
|
|
129
|
+
sourceType
|
|
130
|
+
createdAt
|
|
131
|
+
}
|
|
132
|
+
"""
|
|
133
|
+
|
|
134
|
+
COMMENT_FRAGMENT = """
|
|
135
|
+
fragment CommentFields on Comment {
|
|
136
|
+
id
|
|
137
|
+
body
|
|
138
|
+
createdAt
|
|
139
|
+
updatedAt
|
|
140
|
+
user {
|
|
141
|
+
...UserFields
|
|
142
|
+
}
|
|
143
|
+
parent {
|
|
144
|
+
id
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
"""
|
|
148
|
+
|
|
149
|
+
ISSUE_COMPACT_FRAGMENT = """
|
|
150
|
+
fragment IssueCompactFields on Issue {
|
|
151
|
+
id
|
|
152
|
+
identifier
|
|
153
|
+
title
|
|
154
|
+
description
|
|
155
|
+
priority
|
|
156
|
+
priorityLabel
|
|
157
|
+
estimate
|
|
158
|
+
dueDate
|
|
159
|
+
slaBreachesAt
|
|
160
|
+
slaStartedAt
|
|
161
|
+
createdAt
|
|
162
|
+
updatedAt
|
|
163
|
+
archivedAt
|
|
164
|
+
canceledAt
|
|
165
|
+
completedAt
|
|
166
|
+
startedAt
|
|
167
|
+
startedTriageAt
|
|
168
|
+
triagedAt
|
|
169
|
+
url
|
|
170
|
+
branchName
|
|
171
|
+
customerTicketCount
|
|
172
|
+
|
|
173
|
+
state {
|
|
174
|
+
...WorkflowStateFields
|
|
175
|
+
}
|
|
176
|
+
assignee {
|
|
177
|
+
...UserFields
|
|
178
|
+
}
|
|
179
|
+
creator {
|
|
180
|
+
...UserFields
|
|
181
|
+
}
|
|
182
|
+
labels {
|
|
183
|
+
nodes {
|
|
184
|
+
...LabelFields
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
team {
|
|
188
|
+
...TeamFields
|
|
189
|
+
}
|
|
190
|
+
cycle {
|
|
191
|
+
...CycleFields
|
|
192
|
+
}
|
|
193
|
+
project {
|
|
194
|
+
...ProjectFields
|
|
195
|
+
}
|
|
196
|
+
parent {
|
|
197
|
+
id
|
|
198
|
+
identifier
|
|
199
|
+
title
|
|
200
|
+
}
|
|
201
|
+
children {
|
|
202
|
+
nodes {
|
|
203
|
+
id
|
|
204
|
+
identifier
|
|
205
|
+
title
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
attachments {
|
|
209
|
+
nodes {
|
|
210
|
+
...AttachmentFields
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
"""
|
|
215
|
+
|
|
216
|
+
ISSUE_FULL_FRAGMENT = """
|
|
217
|
+
fragment IssueFullFields on Issue {
|
|
218
|
+
...IssueCompactFields
|
|
219
|
+
comments {
|
|
220
|
+
nodes {
|
|
221
|
+
...CommentFields
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
subscribers {
|
|
225
|
+
nodes {
|
|
226
|
+
...UserFields
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
relations {
|
|
230
|
+
nodes {
|
|
231
|
+
id
|
|
232
|
+
type
|
|
233
|
+
relatedIssue {
|
|
234
|
+
id
|
|
235
|
+
identifier
|
|
236
|
+
title
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
"""
|
|
242
|
+
|
|
243
|
+
# Combine all fragments
|
|
244
|
+
ALL_FRAGMENTS = (
|
|
245
|
+
USER_FRAGMENT +
|
|
246
|
+
WORKFLOW_STATE_FRAGMENT +
|
|
247
|
+
TEAM_FRAGMENT +
|
|
248
|
+
CYCLE_FRAGMENT +
|
|
249
|
+
PROJECT_FRAGMENT +
|
|
250
|
+
LABEL_FRAGMENT +
|
|
251
|
+
ATTACHMENT_FRAGMENT +
|
|
252
|
+
COMMENT_FRAGMENT +
|
|
253
|
+
ISSUE_COMPACT_FRAGMENT +
|
|
254
|
+
ISSUE_FULL_FRAGMENT
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
# Fragments needed for issue list/search (without comments)
|
|
258
|
+
ISSUE_LIST_FRAGMENTS = (
|
|
259
|
+
USER_FRAGMENT +
|
|
260
|
+
WORKFLOW_STATE_FRAGMENT +
|
|
261
|
+
TEAM_FRAGMENT +
|
|
262
|
+
CYCLE_FRAGMENT +
|
|
263
|
+
PROJECT_FRAGMENT +
|
|
264
|
+
LABEL_FRAGMENT +
|
|
265
|
+
ATTACHMENT_FRAGMENT +
|
|
266
|
+
ISSUE_COMPACT_FRAGMENT
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
class LinearAdapter(BaseAdapter[Task]):
|
|
271
|
+
"""Adapter for Linear issue tracking system using native GraphQL API."""
|
|
272
|
+
|
|
273
|
+
def __init__(self, config: Dict[str, Any]):
|
|
274
|
+
"""Initialize Linear adapter.
|
|
275
|
+
|
|
276
|
+
Args:
|
|
277
|
+
config: Configuration with:
|
|
278
|
+
- api_key: Linear API key (or LINEAR_API_KEY env var)
|
|
279
|
+
- workspace: Linear workspace name (optional, for documentation)
|
|
280
|
+
- team_key: Linear team key (required, e.g., 'BTA')
|
|
281
|
+
- api_url: Optional Linear API URL
|
|
282
|
+
"""
|
|
283
|
+
super().__init__(config)
|
|
284
|
+
|
|
285
|
+
# Get API key from config or environment
|
|
286
|
+
self.api_key = config.get("api_key") or os.getenv("LINEAR_API_KEY")
|
|
287
|
+
if not self.api_key:
|
|
288
|
+
raise ValueError("Linear API key required (config.api_key or LINEAR_API_KEY env var)")
|
|
289
|
+
|
|
290
|
+
self.workspace = config.get("workspace") # Optional, for documentation
|
|
291
|
+
self.team_key = config.get("team_key")
|
|
292
|
+
if not self.team_key:
|
|
293
|
+
raise ValueError("Linear team_key is required in configuration")
|
|
294
|
+
self.api_url = config.get("api_url", "https://api.linear.app/graphql")
|
|
295
|
+
|
|
296
|
+
# Setup GraphQL client with authentication
|
|
297
|
+
transport = HTTPXAsyncTransport(
|
|
298
|
+
url=self.api_url,
|
|
299
|
+
headers={"Authorization": self.api_key},
|
|
300
|
+
timeout=30.0,
|
|
301
|
+
)
|
|
302
|
+
self.client = Client(transport=transport, fetch_schema_from_transport=False)
|
|
303
|
+
|
|
304
|
+
# Caches for frequently used data
|
|
305
|
+
self._team_id: Optional[str] = None
|
|
306
|
+
self._workflow_states: Optional[Dict[str, Dict[str, Any]]] = None
|
|
307
|
+
self._labels: Optional[Dict[str, str]] = None # name -> id
|
|
308
|
+
self._users: Optional[Dict[str, str]] = None # email -> id
|
|
309
|
+
|
|
310
|
+
# Initialize state mapping
|
|
311
|
+
self._state_mapping = self._get_state_mapping()
|
|
312
|
+
|
|
313
|
+
# Initialization lock to prevent concurrent initialization
|
|
314
|
+
self._init_lock = asyncio.Lock()
|
|
315
|
+
self._initialized = False
|
|
316
|
+
|
|
317
|
+
async def initialize(self) -> None:
|
|
318
|
+
"""Initialize adapter by preloading team, states, and labels data concurrently."""
|
|
319
|
+
if self._initialized:
|
|
320
|
+
return
|
|
321
|
+
|
|
322
|
+
async with self._init_lock:
|
|
323
|
+
if self._initialized:
|
|
324
|
+
return
|
|
325
|
+
|
|
326
|
+
try:
|
|
327
|
+
# First get team ID as it's required for other queries
|
|
328
|
+
team_id = await self._fetch_team_data()
|
|
329
|
+
|
|
330
|
+
# Then fetch states and labels concurrently
|
|
331
|
+
states_task = self._fetch_workflow_states_data(team_id)
|
|
332
|
+
labels_task = self._fetch_labels_data(team_id)
|
|
333
|
+
|
|
334
|
+
workflow_states, labels = await asyncio.gather(states_task, labels_task)
|
|
335
|
+
|
|
336
|
+
# Cache the results
|
|
337
|
+
self._team_id = team_id
|
|
338
|
+
self._workflow_states = workflow_states
|
|
339
|
+
self._labels = labels
|
|
340
|
+
self._initialized = True
|
|
341
|
+
|
|
342
|
+
except Exception as e:
|
|
343
|
+
# Reset on error
|
|
344
|
+
self._team_id = None
|
|
345
|
+
self._workflow_states = None
|
|
346
|
+
self._labels = None
|
|
347
|
+
raise e
|
|
348
|
+
|
|
349
|
+
async def _fetch_team_data(self) -> str:
|
|
350
|
+
"""Fetch team ID."""
|
|
351
|
+
query = gql("""
|
|
352
|
+
query GetTeam($key: String!) {
|
|
353
|
+
teams(filter: { key: { eq: $key } }) {
|
|
354
|
+
nodes {
|
|
355
|
+
id
|
|
356
|
+
name
|
|
357
|
+
key
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
""")
|
|
362
|
+
|
|
363
|
+
async with self.client as session:
|
|
364
|
+
result = await session.execute(query, variable_values={"key": self.team_key})
|
|
365
|
+
|
|
366
|
+
if not result["teams"]["nodes"]:
|
|
367
|
+
raise ValueError(f"Team with key '{self.team_key}' not found")
|
|
368
|
+
|
|
369
|
+
return result["teams"]["nodes"][0]["id"]
|
|
370
|
+
|
|
371
|
+
async def _fetch_workflow_states_data(self, team_id: str) -> Dict[str, Dict[str, Any]]:
|
|
372
|
+
"""Fetch workflow states data."""
|
|
373
|
+
query = gql("""
|
|
374
|
+
query WorkflowStates($teamId: ID!) {
|
|
375
|
+
workflowStates(filter: { team: { id: { eq: $teamId } } }) {
|
|
376
|
+
nodes {
|
|
377
|
+
id
|
|
378
|
+
name
|
|
379
|
+
type
|
|
380
|
+
position
|
|
381
|
+
color
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
""")
|
|
386
|
+
|
|
387
|
+
async with self.client as session:
|
|
388
|
+
result = await session.execute(query, variable_values={"teamId": team_id})
|
|
389
|
+
|
|
390
|
+
workflow_states = {}
|
|
391
|
+
for state in result["workflowStates"]["nodes"]:
|
|
392
|
+
state_type = state["type"].lower()
|
|
393
|
+
if state_type not in workflow_states:
|
|
394
|
+
workflow_states[state_type] = state
|
|
395
|
+
elif state["position"] < workflow_states[state_type]["position"]:
|
|
396
|
+
workflow_states[state_type] = state
|
|
397
|
+
|
|
398
|
+
return workflow_states
|
|
399
|
+
|
|
400
|
+
async def _fetch_labels_data(self, team_id: str) -> Dict[str, str]:
|
|
401
|
+
"""Fetch labels data."""
|
|
402
|
+
query = gql("""
|
|
403
|
+
query GetLabels($teamId: ID!) {
|
|
404
|
+
issueLabels(filter: { team: { id: { eq: $teamId } } }) {
|
|
405
|
+
nodes {
|
|
406
|
+
id
|
|
407
|
+
name
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
""")
|
|
412
|
+
|
|
413
|
+
async with self.client as session:
|
|
414
|
+
result = await session.execute(query, variable_values={"teamId": team_id})
|
|
415
|
+
|
|
416
|
+
return {label["name"]: label["id"] for label in result["issueLabels"]["nodes"]}
|
|
417
|
+
|
|
418
|
+
async def _ensure_initialized(self) -> None:
|
|
419
|
+
"""Ensure adapter is initialized before operations."""
|
|
420
|
+
if not self._initialized:
|
|
421
|
+
await self.initialize()
|
|
422
|
+
|
|
423
|
+
async def _ensure_team_id(self) -> str:
|
|
424
|
+
"""Get and cache the team ID."""
|
|
425
|
+
await self._ensure_initialized()
|
|
426
|
+
return self._team_id
|
|
427
|
+
|
|
428
|
+
async def _get_workflow_states(self) -> Dict[str, Dict[str, Any]]:
|
|
429
|
+
"""Get cached workflow states from Linear."""
|
|
430
|
+
await self._ensure_initialized()
|
|
431
|
+
return self._workflow_states
|
|
432
|
+
|
|
433
|
+
async def _get_or_create_label(self, name: str, color: Optional[str] = None) -> str:
|
|
434
|
+
"""Get existing label ID or create new label."""
|
|
435
|
+
await self._ensure_initialized()
|
|
436
|
+
|
|
437
|
+
# Check cache
|
|
438
|
+
if name in self._labels:
|
|
439
|
+
return self._labels[name]
|
|
440
|
+
|
|
441
|
+
# Try to find existing label (may have been added since initialization)
|
|
442
|
+
team_id = self._team_id
|
|
443
|
+
search_query = gql("""
|
|
444
|
+
query GetLabel($name: String!, $teamId: ID!) {
|
|
445
|
+
issueLabels(filter: { name: { eq: $name }, team: { id: { eq: $teamId } } }) {
|
|
446
|
+
nodes {
|
|
447
|
+
id
|
|
448
|
+
name
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
""")
|
|
453
|
+
|
|
454
|
+
async with self.client as session:
|
|
455
|
+
result = await session.execute(
|
|
456
|
+
search_query,
|
|
457
|
+
variable_values={"name": name, "teamId": team_id}
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
if result["issueLabels"]["nodes"]:
|
|
461
|
+
label_id = result["issueLabels"]["nodes"][0]["id"]
|
|
462
|
+
self._labels[name] = label_id
|
|
463
|
+
return label_id
|
|
464
|
+
|
|
465
|
+
# Create new label
|
|
466
|
+
create_query = gql("""
|
|
467
|
+
mutation CreateLabel($input: IssueLabelCreateInput!) {
|
|
468
|
+
issueLabelCreate(input: $input) {
|
|
469
|
+
issueLabel {
|
|
470
|
+
id
|
|
471
|
+
name
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
""")
|
|
476
|
+
|
|
477
|
+
label_input = {
|
|
478
|
+
"name": name,
|
|
479
|
+
"teamId": team_id,
|
|
480
|
+
}
|
|
481
|
+
if color:
|
|
482
|
+
label_input["color"] = color
|
|
483
|
+
|
|
484
|
+
async with self.client as session:
|
|
485
|
+
result = await session.execute(
|
|
486
|
+
create_query,
|
|
487
|
+
variable_values={"input": label_input}
|
|
488
|
+
)
|
|
489
|
+
|
|
490
|
+
label_id = result["issueLabelCreate"]["issueLabel"]["id"]
|
|
491
|
+
self._labels[name] = label_id
|
|
492
|
+
return label_id
|
|
493
|
+
|
|
494
|
+
async def _get_user_id(self, email: str) -> Optional[str]:
|
|
495
|
+
"""Get user ID by email."""
|
|
496
|
+
if not self._users:
|
|
497
|
+
self._users = {}
|
|
498
|
+
|
|
499
|
+
if email in self._users:
|
|
500
|
+
return self._users[email]
|
|
501
|
+
|
|
502
|
+
query = gql("""
|
|
503
|
+
query GetUser($email: String!) {
|
|
504
|
+
users(filter: { email: { eq: $email } }) {
|
|
505
|
+
nodes {
|
|
506
|
+
id
|
|
507
|
+
email
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
""")
|
|
512
|
+
|
|
513
|
+
async with self.client as session:
|
|
514
|
+
result = await session.execute(query, variable_values={"email": email})
|
|
515
|
+
|
|
516
|
+
if result["users"]["nodes"]:
|
|
517
|
+
user_id = result["users"]["nodes"][0]["id"]
|
|
518
|
+
self._users[email] = user_id
|
|
519
|
+
return user_id
|
|
520
|
+
|
|
521
|
+
return None
|
|
522
|
+
|
|
523
|
+
def _get_state_mapping(self) -> Dict[TicketState, str]:
|
|
524
|
+
"""Get mapping from universal states to Linear state types.
|
|
525
|
+
|
|
526
|
+
Required by BaseAdapter abstract method.
|
|
527
|
+
"""
|
|
528
|
+
return {
|
|
529
|
+
TicketState.OPEN: LinearStateType.BACKLOG,
|
|
530
|
+
TicketState.IN_PROGRESS: LinearStateType.STARTED,
|
|
531
|
+
TicketState.READY: LinearStateType.STARTED, # Will use label for distinction
|
|
532
|
+
TicketState.TESTED: LinearStateType.STARTED, # Will use label
|
|
533
|
+
TicketState.DONE: LinearStateType.COMPLETED,
|
|
534
|
+
TicketState.WAITING: LinearStateType.UNSTARTED,
|
|
535
|
+
TicketState.BLOCKED: LinearStateType.UNSTARTED, # Will use label
|
|
536
|
+
TicketState.CLOSED: LinearStateType.CANCELED,
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
def _map_state_to_linear(self, state: TicketState) -> str:
|
|
540
|
+
"""Map universal state to Linear state type."""
|
|
541
|
+
# Handle both enum and string values
|
|
542
|
+
if isinstance(state, str):
|
|
543
|
+
state = TicketState(state)
|
|
544
|
+
return self._state_mapping.get(state, LinearStateType.BACKLOG)
|
|
545
|
+
|
|
546
|
+
def _map_linear_state(self, state_data: Dict[str, Any], labels: List[str]) -> TicketState:
|
|
547
|
+
"""Map Linear state and labels to universal state."""
|
|
548
|
+
state_type = state_data.get("type", "").lower()
|
|
549
|
+
|
|
550
|
+
# Check for special states via labels
|
|
551
|
+
labels_lower = [l.lower() for l in labels]
|
|
552
|
+
if "blocked" in labels_lower:
|
|
553
|
+
return TicketState.BLOCKED
|
|
554
|
+
if "waiting" in labels_lower:
|
|
555
|
+
return TicketState.WAITING
|
|
556
|
+
if "ready" in labels_lower or "review" in labels_lower:
|
|
557
|
+
return TicketState.READY
|
|
558
|
+
if "tested" in labels_lower or "qa" in labels_lower:
|
|
559
|
+
return TicketState.TESTED
|
|
560
|
+
|
|
561
|
+
# Map by state type
|
|
562
|
+
state_mapping = {
|
|
563
|
+
"backlog": TicketState.OPEN,
|
|
564
|
+
"unstarted": TicketState.OPEN,
|
|
565
|
+
"started": TicketState.IN_PROGRESS,
|
|
566
|
+
"completed": TicketState.DONE,
|
|
567
|
+
"canceled": TicketState.CLOSED,
|
|
568
|
+
}
|
|
569
|
+
return state_mapping.get(state_type, TicketState.OPEN)
|
|
570
|
+
|
|
571
|
+
def _task_from_linear_issue(self, issue: Dict[str, Any]) -> Task:
|
|
572
|
+
"""Convert Linear issue to universal Task."""
|
|
573
|
+
# Extract labels
|
|
574
|
+
tags = []
|
|
575
|
+
if issue.get("labels") and issue["labels"].get("nodes"):
|
|
576
|
+
tags = [label["name"] for label in issue["labels"]["nodes"]]
|
|
577
|
+
|
|
578
|
+
# Map priority
|
|
579
|
+
linear_priority = issue.get("priority", 4)
|
|
580
|
+
priority = LinearPriorityMapping.FROM_LINEAR.get(linear_priority, Priority.MEDIUM)
|
|
581
|
+
|
|
582
|
+
# Map state
|
|
583
|
+
state = self._map_linear_state(issue.get("state", {}), tags)
|
|
584
|
+
|
|
585
|
+
# Build metadata with all Linear-specific fields
|
|
586
|
+
metadata = {
|
|
587
|
+
"linear": {
|
|
588
|
+
"id": issue["id"],
|
|
589
|
+
"identifier": issue["identifier"],
|
|
590
|
+
"url": issue.get("url"),
|
|
591
|
+
"state_id": issue.get("state", {}).get("id"),
|
|
592
|
+
"state_name": issue.get("state", {}).get("name"),
|
|
593
|
+
"team_id": issue.get("team", {}).get("id"),
|
|
594
|
+
"team_name": issue.get("team", {}).get("name"),
|
|
595
|
+
"cycle_id": issue.get("cycle", {}).get("id") if issue.get("cycle") else None,
|
|
596
|
+
"cycle_name": issue.get("cycle", {}).get("name") if issue.get("cycle") else None,
|
|
597
|
+
"project_id": issue.get("project", {}).get("id") if issue.get("project") else None,
|
|
598
|
+
"project_name": issue.get("project", {}).get("name") if issue.get("project") else None,
|
|
599
|
+
"priority_label": issue.get("priorityLabel"),
|
|
600
|
+
"estimate": issue.get("estimate"),
|
|
601
|
+
"due_date": issue.get("dueDate"),
|
|
602
|
+
"branch_name": issue.get("branchName"),
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
# Add timestamps if available
|
|
607
|
+
if issue.get("startedAt"):
|
|
608
|
+
metadata["linear"]["started_at"] = issue["startedAt"]
|
|
609
|
+
if issue.get("completedAt"):
|
|
610
|
+
metadata["linear"]["completed_at"] = issue["completedAt"]
|
|
611
|
+
if issue.get("canceledAt"):
|
|
612
|
+
metadata["linear"]["canceled_at"] = issue["canceledAt"]
|
|
613
|
+
|
|
614
|
+
# Add attachments metadata
|
|
615
|
+
if issue.get("attachments") and issue["attachments"].get("nodes"):
|
|
616
|
+
metadata["linear"]["attachments"] = [
|
|
617
|
+
{
|
|
618
|
+
"id": att["id"],
|
|
619
|
+
"url": att["url"],
|
|
620
|
+
"title": att.get("title"),
|
|
621
|
+
"source": att.get("source"),
|
|
622
|
+
}
|
|
623
|
+
for att in issue["attachments"]["nodes"]
|
|
624
|
+
]
|
|
625
|
+
|
|
626
|
+
# Extract child issue IDs
|
|
627
|
+
child_ids = []
|
|
628
|
+
if issue.get("children") and issue["children"].get("nodes"):
|
|
629
|
+
child_ids = [child["identifier"] for child in issue["children"]["nodes"]]
|
|
630
|
+
metadata["linear"]["child_issues"] = child_ids
|
|
631
|
+
|
|
632
|
+
return Task(
|
|
633
|
+
id=issue["identifier"],
|
|
634
|
+
title=issue["title"],
|
|
635
|
+
description=issue.get("description"),
|
|
636
|
+
state=state,
|
|
637
|
+
priority=priority,
|
|
638
|
+
tags=tags,
|
|
639
|
+
parent_issue=issue.get("parent", {}).get("identifier") if issue.get("parent") else None,
|
|
640
|
+
parent_epic=issue.get("project", {}).get("id") if issue.get("project") else None,
|
|
641
|
+
assignee=issue.get("assignee", {}).get("email") if issue.get("assignee") else None,
|
|
642
|
+
estimated_hours=issue.get("estimate"),
|
|
643
|
+
created_at=datetime.fromisoformat(issue["createdAt"].replace("Z", "+00:00"))
|
|
644
|
+
if issue.get("createdAt") else None,
|
|
645
|
+
updated_at=datetime.fromisoformat(issue["updatedAt"].replace("Z", "+00:00"))
|
|
646
|
+
if issue.get("updatedAt") else None,
|
|
647
|
+
metadata=metadata,
|
|
648
|
+
)
|
|
649
|
+
|
|
650
|
+
async def create(self, ticket: Task) -> Task:
|
|
651
|
+
"""Create a new Linear issue with full field support."""
|
|
652
|
+
team_id = await self._ensure_team_id()
|
|
653
|
+
states = await self._get_workflow_states()
|
|
654
|
+
|
|
655
|
+
# Map state to Linear state ID
|
|
656
|
+
linear_state_type = self._map_state_to_linear(ticket.state)
|
|
657
|
+
state_data = states.get(linear_state_type)
|
|
658
|
+
if not state_data:
|
|
659
|
+
# Fallback to backlog state
|
|
660
|
+
state_data = states.get("backlog")
|
|
661
|
+
state_id = state_data["id"] if state_data else None
|
|
662
|
+
|
|
663
|
+
# Build issue input
|
|
664
|
+
issue_input = {
|
|
665
|
+
"title": ticket.title,
|
|
666
|
+
"teamId": team_id,
|
|
667
|
+
}
|
|
668
|
+
|
|
669
|
+
if ticket.description:
|
|
670
|
+
issue_input["description"] = ticket.description
|
|
671
|
+
|
|
672
|
+
if state_id:
|
|
673
|
+
issue_input["stateId"] = state_id
|
|
674
|
+
|
|
675
|
+
# Set priority
|
|
676
|
+
if ticket.priority:
|
|
677
|
+
issue_input["priority"] = LinearPriorityMapping.TO_LINEAR.get(ticket.priority, 3)
|
|
678
|
+
|
|
679
|
+
# Handle labels/tags
|
|
680
|
+
if ticket.tags:
|
|
681
|
+
label_ids = []
|
|
682
|
+
for tag in ticket.tags:
|
|
683
|
+
# Add special state labels if needed
|
|
684
|
+
if ticket.state == TicketState.BLOCKED and "blocked" not in [t.lower() for t in ticket.tags]:
|
|
685
|
+
label_ids.append(await self._get_or_create_label("blocked", "#FF0000"))
|
|
686
|
+
elif ticket.state == TicketState.WAITING and "waiting" not in [t.lower() for t in ticket.tags]:
|
|
687
|
+
label_ids.append(await self._get_or_create_label("waiting", "#FFA500"))
|
|
688
|
+
elif ticket.state == TicketState.READY and "ready" not in [t.lower() for t in ticket.tags]:
|
|
689
|
+
label_ids.append(await self._get_or_create_label("ready", "#00FF00"))
|
|
690
|
+
|
|
691
|
+
label_id = await self._get_or_create_label(tag)
|
|
692
|
+
label_ids.append(label_id)
|
|
693
|
+
if label_ids:
|
|
694
|
+
issue_input["labelIds"] = label_ids
|
|
695
|
+
|
|
696
|
+
# Handle assignee
|
|
697
|
+
if ticket.assignee:
|
|
698
|
+
user_id = await self._get_user_id(ticket.assignee)
|
|
699
|
+
if user_id:
|
|
700
|
+
issue_input["assigneeId"] = user_id
|
|
701
|
+
|
|
702
|
+
# Handle estimate (Linear uses integer points, so we round hours)
|
|
703
|
+
if ticket.estimated_hours:
|
|
704
|
+
issue_input["estimate"] = int(round(ticket.estimated_hours))
|
|
705
|
+
|
|
706
|
+
# Handle parent issue
|
|
707
|
+
if ticket.parent_issue:
|
|
708
|
+
# Get parent issue's Linear ID
|
|
709
|
+
parent_query = gql("""
|
|
710
|
+
query GetIssue($identifier: String!) {
|
|
711
|
+
issue(id: $identifier) {
|
|
712
|
+
id
|
|
713
|
+
}
|
|
714
|
+
}
|
|
715
|
+
""")
|
|
716
|
+
async with self.client as session:
|
|
717
|
+
parent_result = await session.execute(
|
|
718
|
+
parent_query,
|
|
719
|
+
variable_values={"identifier": ticket.parent_issue}
|
|
720
|
+
)
|
|
721
|
+
if parent_result.get("issue"):
|
|
722
|
+
issue_input["parentId"] = parent_result["issue"]["id"]
|
|
723
|
+
|
|
724
|
+
# Handle project (epic)
|
|
725
|
+
if ticket.parent_epic:
|
|
726
|
+
issue_input["projectId"] = ticket.parent_epic
|
|
727
|
+
|
|
728
|
+
# Handle metadata fields
|
|
729
|
+
if ticket.metadata and "linear" in ticket.metadata:
|
|
730
|
+
linear_meta = ticket.metadata["linear"]
|
|
731
|
+
if "due_date" in linear_meta:
|
|
732
|
+
issue_input["dueDate"] = linear_meta["due_date"]
|
|
733
|
+
if "cycle_id" in linear_meta:
|
|
734
|
+
issue_input["cycleId"] = linear_meta["cycle_id"]
|
|
735
|
+
|
|
736
|
+
# Create issue mutation with full fields
|
|
737
|
+
create_query = gql(ALL_FRAGMENTS + """
|
|
738
|
+
mutation CreateIssue($input: IssueCreateInput!) {
|
|
739
|
+
issueCreate(input: $input) {
|
|
740
|
+
success
|
|
741
|
+
issue {
|
|
742
|
+
...IssueFullFields
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
}
|
|
746
|
+
""")
|
|
747
|
+
|
|
748
|
+
async with self.client as session:
|
|
749
|
+
result = await session.execute(
|
|
750
|
+
create_query,
|
|
751
|
+
variable_values={"input": issue_input}
|
|
752
|
+
)
|
|
753
|
+
|
|
754
|
+
if not result["issueCreate"]["success"]:
|
|
755
|
+
raise Exception("Failed to create Linear issue")
|
|
756
|
+
|
|
757
|
+
created_issue = result["issueCreate"]["issue"]
|
|
758
|
+
return self._task_from_linear_issue(created_issue)
|
|
759
|
+
|
|
760
|
+
async def read(self, ticket_id: str) -> Optional[Task]:
|
|
761
|
+
"""Read a Linear issue by identifier with full details."""
|
|
762
|
+
query = gql(ALL_FRAGMENTS + """
|
|
763
|
+
query GetIssue($identifier: String!) {
|
|
764
|
+
issue(id: $identifier) {
|
|
765
|
+
...IssueFullFields
|
|
766
|
+
}
|
|
767
|
+
}
|
|
768
|
+
""")
|
|
769
|
+
|
|
770
|
+
try:
|
|
771
|
+
async with self.client as session:
|
|
772
|
+
result = await session.execute(
|
|
773
|
+
query,
|
|
774
|
+
variable_values={"identifier": ticket_id}
|
|
775
|
+
)
|
|
776
|
+
|
|
777
|
+
if result.get("issue"):
|
|
778
|
+
return self._task_from_linear_issue(result["issue"])
|
|
779
|
+
except TransportQueryError:
|
|
780
|
+
# Issue not found
|
|
781
|
+
pass
|
|
782
|
+
|
|
783
|
+
return None
|
|
784
|
+
|
|
785
|
+
async def update(self, ticket_id: str, updates: Dict[str, Any]) -> Optional[Task]:
|
|
786
|
+
"""Update a Linear issue with comprehensive field support."""
|
|
787
|
+
# First get the Linear internal ID
|
|
788
|
+
query = gql("""
|
|
789
|
+
query GetIssueId($identifier: String!) {
|
|
790
|
+
issue(id: $identifier) {
|
|
791
|
+
id
|
|
792
|
+
}
|
|
793
|
+
}
|
|
794
|
+
""")
|
|
795
|
+
|
|
796
|
+
async with self.client as session:
|
|
797
|
+
result = await session.execute(
|
|
798
|
+
query,
|
|
799
|
+
variable_values={"identifier": ticket_id}
|
|
800
|
+
)
|
|
801
|
+
|
|
802
|
+
if not result.get("issue"):
|
|
803
|
+
return None
|
|
804
|
+
|
|
805
|
+
linear_id = result["issue"]["id"]
|
|
806
|
+
|
|
807
|
+
# Build update input
|
|
808
|
+
update_input = {}
|
|
809
|
+
|
|
810
|
+
if "title" in updates:
|
|
811
|
+
update_input["title"] = updates["title"]
|
|
812
|
+
|
|
813
|
+
if "description" in updates:
|
|
814
|
+
update_input["description"] = updates["description"]
|
|
815
|
+
|
|
816
|
+
if "priority" in updates:
|
|
817
|
+
priority = updates["priority"]
|
|
818
|
+
if isinstance(priority, str):
|
|
819
|
+
priority = Priority(priority)
|
|
820
|
+
update_input["priority"] = LinearPriorityMapping.TO_LINEAR.get(priority, 3)
|
|
821
|
+
|
|
822
|
+
if "state" in updates:
|
|
823
|
+
states = await self._get_workflow_states()
|
|
824
|
+
state = updates["state"]
|
|
825
|
+
if isinstance(state, str):
|
|
826
|
+
state = TicketState(state)
|
|
827
|
+
linear_state_type = self._map_state_to_linear(state)
|
|
828
|
+
state_data = states.get(linear_state_type)
|
|
829
|
+
if state_data:
|
|
830
|
+
update_input["stateId"] = state_data["id"]
|
|
831
|
+
|
|
832
|
+
if "assignee" in updates:
|
|
833
|
+
if updates["assignee"]:
|
|
834
|
+
user_id = await self._get_user_id(updates["assignee"])
|
|
835
|
+
if user_id:
|
|
836
|
+
update_input["assigneeId"] = user_id
|
|
837
|
+
else:
|
|
838
|
+
update_input["assigneeId"] = None
|
|
839
|
+
|
|
840
|
+
if "tags" in updates:
|
|
841
|
+
label_ids = []
|
|
842
|
+
for tag in updates["tags"]:
|
|
843
|
+
label_id = await self._get_or_create_label(tag)
|
|
844
|
+
label_ids.append(label_id)
|
|
845
|
+
update_input["labelIds"] = label_ids
|
|
846
|
+
|
|
847
|
+
if "estimated_hours" in updates:
|
|
848
|
+
update_input["estimate"] = int(round(updates["estimated_hours"]))
|
|
849
|
+
|
|
850
|
+
# Handle metadata updates
|
|
851
|
+
if "metadata" in updates and "linear" in updates["metadata"]:
|
|
852
|
+
linear_meta = updates["metadata"]["linear"]
|
|
853
|
+
if "due_date" in linear_meta:
|
|
854
|
+
update_input["dueDate"] = linear_meta["due_date"]
|
|
855
|
+
if "cycle_id" in linear_meta:
|
|
856
|
+
update_input["cycleId"] = linear_meta["cycle_id"]
|
|
857
|
+
if "project_id" in linear_meta:
|
|
858
|
+
update_input["projectId"] = linear_meta["project_id"]
|
|
859
|
+
|
|
860
|
+
# Update mutation
|
|
861
|
+
update_query = gql(ALL_FRAGMENTS + """
|
|
862
|
+
mutation UpdateIssue($id: String!, $input: IssueUpdateInput!) {
|
|
863
|
+
issueUpdate(id: $id, input: $input) {
|
|
864
|
+
success
|
|
865
|
+
issue {
|
|
866
|
+
...IssueFullFields
|
|
867
|
+
}
|
|
868
|
+
}
|
|
869
|
+
}
|
|
870
|
+
""")
|
|
871
|
+
|
|
872
|
+
async with self.client as session:
|
|
873
|
+
result = await session.execute(
|
|
874
|
+
update_query,
|
|
875
|
+
variable_values={"id": linear_id, "input": update_input}
|
|
876
|
+
)
|
|
877
|
+
|
|
878
|
+
if result["issueUpdate"]["success"]:
|
|
879
|
+
return self._task_from_linear_issue(result["issueUpdate"]["issue"])
|
|
880
|
+
|
|
881
|
+
return None
|
|
882
|
+
|
|
883
|
+
async def delete(self, ticket_id: str) -> bool:
|
|
884
|
+
"""Archive (soft delete) a Linear issue."""
|
|
885
|
+
# Get Linear ID
|
|
886
|
+
query = gql("""
|
|
887
|
+
query GetIssueId($identifier: String!) {
|
|
888
|
+
issue(id: $identifier) {
|
|
889
|
+
id
|
|
890
|
+
}
|
|
891
|
+
}
|
|
892
|
+
""")
|
|
893
|
+
|
|
894
|
+
async with self.client as session:
|
|
895
|
+
result = await session.execute(
|
|
896
|
+
query,
|
|
897
|
+
variable_values={"identifier": ticket_id}
|
|
898
|
+
)
|
|
899
|
+
|
|
900
|
+
if not result.get("issue"):
|
|
901
|
+
return False
|
|
902
|
+
|
|
903
|
+
linear_id = result["issue"]["id"]
|
|
904
|
+
|
|
905
|
+
# Archive mutation
|
|
906
|
+
archive_query = gql("""
|
|
907
|
+
mutation ArchiveIssue($id: String!) {
|
|
908
|
+
issueArchive(id: $id) {
|
|
909
|
+
success
|
|
910
|
+
}
|
|
911
|
+
}
|
|
912
|
+
""")
|
|
913
|
+
|
|
914
|
+
async with self.client as session:
|
|
915
|
+
result = await session.execute(
|
|
916
|
+
archive_query,
|
|
917
|
+
variable_values={"id": linear_id}
|
|
918
|
+
)
|
|
919
|
+
|
|
920
|
+
return result.get("issueArchive", {}).get("success", False)
|
|
921
|
+
|
|
922
|
+
async def list(
|
|
923
|
+
self,
|
|
924
|
+
limit: int = 10,
|
|
925
|
+
offset: int = 0,
|
|
926
|
+
filters: Optional[Dict[str, Any]] = None
|
|
927
|
+
) -> List[Task]:
|
|
928
|
+
"""List Linear issues with comprehensive filtering."""
|
|
929
|
+
team_id = await self._ensure_team_id()
|
|
930
|
+
|
|
931
|
+
# Build filter
|
|
932
|
+
issue_filter = {"team": {"id": {"eq": team_id}}}
|
|
933
|
+
|
|
934
|
+
if filters:
|
|
935
|
+
# State filter
|
|
936
|
+
if "state" in filters:
|
|
937
|
+
state = filters["state"]
|
|
938
|
+
if isinstance(state, str):
|
|
939
|
+
state = TicketState(state)
|
|
940
|
+
# Map to Linear state types
|
|
941
|
+
state_mapping = {
|
|
942
|
+
TicketState.OPEN: ["backlog", "unstarted"],
|
|
943
|
+
TicketState.IN_PROGRESS: ["started"],
|
|
944
|
+
TicketState.DONE: ["completed"],
|
|
945
|
+
TicketState.CLOSED: ["canceled"],
|
|
946
|
+
}
|
|
947
|
+
if state in state_mapping:
|
|
948
|
+
issue_filter["state"] = {"type": {"in": state_mapping[state]}}
|
|
949
|
+
|
|
950
|
+
# Priority filter
|
|
951
|
+
if "priority" in filters:
|
|
952
|
+
priority = filters["priority"]
|
|
953
|
+
if isinstance(priority, str):
|
|
954
|
+
priority = Priority(priority)
|
|
955
|
+
linear_priority = LinearPriorityMapping.TO_LINEAR.get(priority, 3)
|
|
956
|
+
issue_filter["priority"] = {"eq": linear_priority}
|
|
957
|
+
|
|
958
|
+
# Assignee filter
|
|
959
|
+
if "assignee" in filters and filters["assignee"]:
|
|
960
|
+
user_id = await self._get_user_id(filters["assignee"])
|
|
961
|
+
if user_id:
|
|
962
|
+
issue_filter["assignee"] = {"id": {"eq": user_id}}
|
|
963
|
+
|
|
964
|
+
# Project filter
|
|
965
|
+
if "project_id" in filters:
|
|
966
|
+
issue_filter["project"] = {"id": {"eq": filters["project_id"]}}
|
|
967
|
+
|
|
968
|
+
# Cycle filter
|
|
969
|
+
if "cycle_id" in filters:
|
|
970
|
+
issue_filter["cycle"] = {"id": {"eq": filters["cycle_id"]}}
|
|
971
|
+
|
|
972
|
+
# Label filter
|
|
973
|
+
if "labels" in filters:
|
|
974
|
+
issue_filter["labels"] = {"some": {"name": {"in": filters["labels"]}}}
|
|
975
|
+
|
|
976
|
+
# Parent filter
|
|
977
|
+
if "parent_id" in filters:
|
|
978
|
+
issue_filter["parent"] = {"identifier": {"eq": filters["parent_id"]}}
|
|
979
|
+
|
|
980
|
+
# Date filters
|
|
981
|
+
if "created_after" in filters:
|
|
982
|
+
issue_filter["createdAt"] = {"gte": filters["created_after"]}
|
|
983
|
+
if "updated_after" in filters:
|
|
984
|
+
issue_filter["updatedAt"] = {"gte": filters["updated_after"]}
|
|
985
|
+
if "due_before" in filters:
|
|
986
|
+
issue_filter["dueDate"] = {"lte": filters["due_before"]}
|
|
987
|
+
|
|
988
|
+
# Exclude archived issues by default
|
|
989
|
+
if not filters or "includeArchived" not in filters or not filters["includeArchived"]:
|
|
990
|
+
issue_filter["archivedAt"] = {"null": True}
|
|
991
|
+
|
|
992
|
+
query = gql(ISSUE_LIST_FRAGMENTS + """
|
|
993
|
+
query ListIssues($filter: IssueFilter, $first: Int!) {
|
|
994
|
+
issues(
|
|
995
|
+
filter: $filter
|
|
996
|
+
first: $first
|
|
997
|
+
orderBy: updatedAt
|
|
998
|
+
) {
|
|
999
|
+
nodes {
|
|
1000
|
+
...IssueCompactFields
|
|
1001
|
+
}
|
|
1002
|
+
pageInfo {
|
|
1003
|
+
hasNextPage
|
|
1004
|
+
hasPreviousPage
|
|
1005
|
+
}
|
|
1006
|
+
}
|
|
1007
|
+
}
|
|
1008
|
+
""")
|
|
1009
|
+
|
|
1010
|
+
async with self.client as session:
|
|
1011
|
+
result = await session.execute(
|
|
1012
|
+
query,
|
|
1013
|
+
variable_values={
|
|
1014
|
+
"filter": issue_filter,
|
|
1015
|
+
"first": limit,
|
|
1016
|
+
# Note: Linear uses cursor-based pagination, not offset
|
|
1017
|
+
# For simplicity, we ignore offset here
|
|
1018
|
+
}
|
|
1019
|
+
)
|
|
1020
|
+
|
|
1021
|
+
tasks = []
|
|
1022
|
+
for issue in result["issues"]["nodes"]:
|
|
1023
|
+
tasks.append(self._task_from_linear_issue(issue))
|
|
1024
|
+
|
|
1025
|
+
return tasks
|
|
1026
|
+
|
|
1027
|
+
async def search(self, query: SearchQuery) -> List[Task]:
|
|
1028
|
+
"""Search Linear issues with advanced filtering and text search."""
|
|
1029
|
+
team_id = await self._ensure_team_id()
|
|
1030
|
+
|
|
1031
|
+
# Build filter
|
|
1032
|
+
issue_filter = {"team": {"id": {"eq": team_id}}}
|
|
1033
|
+
|
|
1034
|
+
# Text search in title and description
|
|
1035
|
+
if query.query:
|
|
1036
|
+
issue_filter["or"] = [
|
|
1037
|
+
{"title": {"containsIgnoreCase": query.query}},
|
|
1038
|
+
{"description": {"containsIgnoreCase": query.query}},
|
|
1039
|
+
]
|
|
1040
|
+
|
|
1041
|
+
# State filter
|
|
1042
|
+
if query.state:
|
|
1043
|
+
state_mapping = {
|
|
1044
|
+
TicketState.OPEN: ["backlog", "unstarted"],
|
|
1045
|
+
TicketState.IN_PROGRESS: ["started"],
|
|
1046
|
+
TicketState.DONE: ["completed"],
|
|
1047
|
+
TicketState.CLOSED: ["canceled"],
|
|
1048
|
+
}
|
|
1049
|
+
if query.state in state_mapping:
|
|
1050
|
+
issue_filter["state"] = {"type": {"in": state_mapping[query.state]}}
|
|
1051
|
+
|
|
1052
|
+
# Priority filter
|
|
1053
|
+
if query.priority:
|
|
1054
|
+
linear_priority = LinearPriorityMapping.TO_LINEAR.get(query.priority, 3)
|
|
1055
|
+
issue_filter["priority"] = {"eq": linear_priority}
|
|
1056
|
+
|
|
1057
|
+
# Assignee filter
|
|
1058
|
+
if query.assignee:
|
|
1059
|
+
user_id = await self._get_user_id(query.assignee)
|
|
1060
|
+
if user_id:
|
|
1061
|
+
issue_filter["assignee"] = {"id": {"eq": user_id}}
|
|
1062
|
+
|
|
1063
|
+
# Tags filter (labels in Linear)
|
|
1064
|
+
if query.tags:
|
|
1065
|
+
issue_filter["labels"] = {"some": {"name": {"in": query.tags}}}
|
|
1066
|
+
|
|
1067
|
+
# Exclude archived
|
|
1068
|
+
issue_filter["archivedAt"] = {"null": True}
|
|
1069
|
+
|
|
1070
|
+
search_query = gql(ISSUE_LIST_FRAGMENTS + """
|
|
1071
|
+
query SearchIssues($filter: IssueFilter, $first: Int!) {
|
|
1072
|
+
issues(
|
|
1073
|
+
filter: $filter
|
|
1074
|
+
first: $first
|
|
1075
|
+
orderBy: updatedAt
|
|
1076
|
+
) {
|
|
1077
|
+
nodes {
|
|
1078
|
+
...IssueCompactFields
|
|
1079
|
+
}
|
|
1080
|
+
}
|
|
1081
|
+
}
|
|
1082
|
+
""")
|
|
1083
|
+
|
|
1084
|
+
async with self.client as session:
|
|
1085
|
+
result = await session.execute(
|
|
1086
|
+
search_query,
|
|
1087
|
+
variable_values={
|
|
1088
|
+
"filter": issue_filter,
|
|
1089
|
+
"first": query.limit,
|
|
1090
|
+
# Note: Linear uses cursor-based pagination, not offset
|
|
1091
|
+
}
|
|
1092
|
+
)
|
|
1093
|
+
|
|
1094
|
+
tasks = []
|
|
1095
|
+
for issue in result["issues"]["nodes"]:
|
|
1096
|
+
tasks.append(self._task_from_linear_issue(issue))
|
|
1097
|
+
|
|
1098
|
+
return tasks
|
|
1099
|
+
|
|
1100
|
+
async def transition_state(
|
|
1101
|
+
self,
|
|
1102
|
+
ticket_id: str,
|
|
1103
|
+
target_state: TicketState
|
|
1104
|
+
) -> Optional[Task]:
|
|
1105
|
+
"""Transition Linear issue to new state with workflow validation."""
|
|
1106
|
+
# Validate transition
|
|
1107
|
+
if not await self.validate_transition(ticket_id, target_state):
|
|
1108
|
+
return None
|
|
1109
|
+
|
|
1110
|
+
# Update state
|
|
1111
|
+
return await self.update(ticket_id, {"state": target_state})
|
|
1112
|
+
|
|
1113
|
+
async def add_comment(self, comment: Comment) -> Comment:
|
|
1114
|
+
"""Add comment to a Linear issue."""
|
|
1115
|
+
# Get Linear issue ID
|
|
1116
|
+
query = gql("""
|
|
1117
|
+
query GetIssueId($identifier: String!) {
|
|
1118
|
+
issue(id: $identifier) {
|
|
1119
|
+
id
|
|
1120
|
+
}
|
|
1121
|
+
}
|
|
1122
|
+
""")
|
|
1123
|
+
|
|
1124
|
+
async with self.client as session:
|
|
1125
|
+
result = await session.execute(
|
|
1126
|
+
query,
|
|
1127
|
+
variable_values={"identifier": comment.ticket_id}
|
|
1128
|
+
)
|
|
1129
|
+
|
|
1130
|
+
if not result.get("issue"):
|
|
1131
|
+
raise ValueError(f"Issue {comment.ticket_id} not found")
|
|
1132
|
+
|
|
1133
|
+
linear_id = result["issue"]["id"]
|
|
1134
|
+
|
|
1135
|
+
# Create comment mutation (only include needed fragments)
|
|
1136
|
+
create_comment_query = gql(USER_FRAGMENT + COMMENT_FRAGMENT + """
|
|
1137
|
+
mutation CreateComment($input: CommentCreateInput!) {
|
|
1138
|
+
commentCreate(input: $input) {
|
|
1139
|
+
success
|
|
1140
|
+
comment {
|
|
1141
|
+
...CommentFields
|
|
1142
|
+
}
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
""")
|
|
1146
|
+
|
|
1147
|
+
comment_input = {
|
|
1148
|
+
"issueId": linear_id,
|
|
1149
|
+
"body": comment.content,
|
|
1150
|
+
}
|
|
1151
|
+
|
|
1152
|
+
# Handle parent comment for threading
|
|
1153
|
+
if comment.metadata and "parent_comment_id" in comment.metadata:
|
|
1154
|
+
comment_input["parentId"] = comment.metadata["parent_comment_id"]
|
|
1155
|
+
|
|
1156
|
+
async with self.client as session:
|
|
1157
|
+
result = await session.execute(
|
|
1158
|
+
create_comment_query,
|
|
1159
|
+
variable_values={"input": comment_input}
|
|
1160
|
+
)
|
|
1161
|
+
|
|
1162
|
+
if not result["commentCreate"]["success"]:
|
|
1163
|
+
raise Exception("Failed to create comment")
|
|
1164
|
+
|
|
1165
|
+
created_comment = result["commentCreate"]["comment"]
|
|
1166
|
+
|
|
1167
|
+
return Comment(
|
|
1168
|
+
id=created_comment["id"],
|
|
1169
|
+
ticket_id=comment.ticket_id,
|
|
1170
|
+
author=created_comment["user"]["email"] if created_comment.get("user") else None,
|
|
1171
|
+
content=created_comment["body"],
|
|
1172
|
+
created_at=datetime.fromisoformat(created_comment["createdAt"].replace("Z", "+00:00")),
|
|
1173
|
+
metadata={
|
|
1174
|
+
"linear": {
|
|
1175
|
+
"id": created_comment["id"],
|
|
1176
|
+
"parent_id": created_comment.get("parent", {}).get("id") if created_comment.get("parent") else None,
|
|
1177
|
+
}
|
|
1178
|
+
},
|
|
1179
|
+
)
|
|
1180
|
+
|
|
1181
|
+
async def get_comments(
|
|
1182
|
+
self,
|
|
1183
|
+
ticket_id: str,
|
|
1184
|
+
limit: int = 10,
|
|
1185
|
+
offset: int = 0
|
|
1186
|
+
) -> List[Comment]:
|
|
1187
|
+
"""Get comments for a Linear issue with pagination."""
|
|
1188
|
+
query = gql(USER_FRAGMENT + COMMENT_FRAGMENT + """
|
|
1189
|
+
query GetIssueComments($identifier: String!, $first: Int!) {
|
|
1190
|
+
issue(id: $identifier) {
|
|
1191
|
+
comments(first: $first, orderBy: createdAt) {
|
|
1192
|
+
nodes {
|
|
1193
|
+
...CommentFields
|
|
1194
|
+
}
|
|
1195
|
+
}
|
|
1196
|
+
}
|
|
1197
|
+
}
|
|
1198
|
+
""")
|
|
1199
|
+
|
|
1200
|
+
try:
|
|
1201
|
+
async with self.client as session:
|
|
1202
|
+
result = await session.execute(
|
|
1203
|
+
query,
|
|
1204
|
+
variable_values={
|
|
1205
|
+
"identifier": ticket_id,
|
|
1206
|
+
"first": limit,
|
|
1207
|
+
# Note: Linear uses cursor-based pagination
|
|
1208
|
+
}
|
|
1209
|
+
)
|
|
1210
|
+
|
|
1211
|
+
if not result.get("issue"):
|
|
1212
|
+
return []
|
|
1213
|
+
|
|
1214
|
+
comments = []
|
|
1215
|
+
for comment_data in result["issue"]["comments"]["nodes"]:
|
|
1216
|
+
comments.append(Comment(
|
|
1217
|
+
id=comment_data["id"],
|
|
1218
|
+
ticket_id=ticket_id,
|
|
1219
|
+
author=comment_data["user"]["email"] if comment_data.get("user") else None,
|
|
1220
|
+
content=comment_data["body"],
|
|
1221
|
+
created_at=datetime.fromisoformat(comment_data["createdAt"].replace("Z", "+00:00")),
|
|
1222
|
+
metadata={
|
|
1223
|
+
"linear": {
|
|
1224
|
+
"id": comment_data["id"],
|
|
1225
|
+
"parent_id": comment_data.get("parent", {}).get("id") if comment_data.get("parent") else None,
|
|
1226
|
+
}
|
|
1227
|
+
},
|
|
1228
|
+
))
|
|
1229
|
+
|
|
1230
|
+
return comments
|
|
1231
|
+
except TransportQueryError:
|
|
1232
|
+
return []
|
|
1233
|
+
|
|
1234
|
+
async def create_project(self, name: str, description: Optional[str] = None) -> str:
|
|
1235
|
+
"""Create a Linear project."""
|
|
1236
|
+
team_id = await self._ensure_team_id()
|
|
1237
|
+
|
|
1238
|
+
create_query = gql("""
|
|
1239
|
+
mutation CreateProject($input: ProjectCreateInput!) {
|
|
1240
|
+
projectCreate(input: $input) {
|
|
1241
|
+
success
|
|
1242
|
+
project {
|
|
1243
|
+
id
|
|
1244
|
+
name
|
|
1245
|
+
}
|
|
1246
|
+
}
|
|
1247
|
+
}
|
|
1248
|
+
""")
|
|
1249
|
+
|
|
1250
|
+
project_input = {
|
|
1251
|
+
"name": name,
|
|
1252
|
+
"teamIds": [team_id],
|
|
1253
|
+
}
|
|
1254
|
+
if description:
|
|
1255
|
+
project_input["description"] = description
|
|
1256
|
+
|
|
1257
|
+
async with self.client as session:
|
|
1258
|
+
result = await session.execute(
|
|
1259
|
+
create_query,
|
|
1260
|
+
variable_values={"input": project_input}
|
|
1261
|
+
)
|
|
1262
|
+
|
|
1263
|
+
if not result["projectCreate"]["success"]:
|
|
1264
|
+
raise Exception("Failed to create project")
|
|
1265
|
+
|
|
1266
|
+
return result["projectCreate"]["project"]["id"]
|
|
1267
|
+
|
|
1268
|
+
async def get_cycles(self, active_only: bool = True) -> List[Dict[str, Any]]:
|
|
1269
|
+
"""Get Linear cycles (sprints) for the team."""
|
|
1270
|
+
team_id = await self._ensure_team_id()
|
|
1271
|
+
|
|
1272
|
+
cycle_filter = {"team": {"id": {"eq": team_id}}}
|
|
1273
|
+
if active_only:
|
|
1274
|
+
cycle_filter["isActive"] = {"eq": True}
|
|
1275
|
+
|
|
1276
|
+
query = gql("""
|
|
1277
|
+
query GetCycles($filter: CycleFilter) {
|
|
1278
|
+
cycles(filter: $filter, orderBy: createdAt) {
|
|
1279
|
+
nodes {
|
|
1280
|
+
id
|
|
1281
|
+
number
|
|
1282
|
+
name
|
|
1283
|
+
description
|
|
1284
|
+
startsAt
|
|
1285
|
+
endsAt
|
|
1286
|
+
completedAt
|
|
1287
|
+
issues {
|
|
1288
|
+
nodes {
|
|
1289
|
+
id
|
|
1290
|
+
identifier
|
|
1291
|
+
}
|
|
1292
|
+
}
|
|
1293
|
+
}
|
|
1294
|
+
}
|
|
1295
|
+
}
|
|
1296
|
+
""")
|
|
1297
|
+
|
|
1298
|
+
async with self.client as session:
|
|
1299
|
+
result = await session.execute(
|
|
1300
|
+
query,
|
|
1301
|
+
variable_values={"filter": cycle_filter}
|
|
1302
|
+
)
|
|
1303
|
+
|
|
1304
|
+
return result["cycles"]["nodes"]
|
|
1305
|
+
|
|
1306
|
+
async def add_to_cycle(self, ticket_id: str, cycle_id: str) -> bool:
|
|
1307
|
+
"""Add an issue to a cycle."""
|
|
1308
|
+
return await self.update(
|
|
1309
|
+
ticket_id,
|
|
1310
|
+
{"metadata": {"linear": {"cycle_id": cycle_id}}}
|
|
1311
|
+
) is not None
|
|
1312
|
+
|
|
1313
|
+
async def set_due_date(self, ticket_id: str, due_date: Union[str, date]) -> bool:
|
|
1314
|
+
"""Set due date for an issue."""
|
|
1315
|
+
if isinstance(due_date, date):
|
|
1316
|
+
due_date = due_date.isoformat()
|
|
1317
|
+
|
|
1318
|
+
return await self.update(
|
|
1319
|
+
ticket_id,
|
|
1320
|
+
{"metadata": {"linear": {"due_date": due_date}}}
|
|
1321
|
+
) is not None
|
|
1322
|
+
|
|
1323
|
+
async def add_reaction(self, comment_id: str, emoji: str) -> bool:
|
|
1324
|
+
"""Add reaction to a comment."""
|
|
1325
|
+
create_query = gql("""
|
|
1326
|
+
mutation CreateReaction($input: ReactionCreateInput!) {
|
|
1327
|
+
reactionCreate(input: $input) {
|
|
1328
|
+
success
|
|
1329
|
+
}
|
|
1330
|
+
}
|
|
1331
|
+
""")
|
|
1332
|
+
|
|
1333
|
+
async with self.client as session:
|
|
1334
|
+
result = await session.execute(
|
|
1335
|
+
create_query,
|
|
1336
|
+
variable_values={
|
|
1337
|
+
"input": {
|
|
1338
|
+
"commentId": comment_id,
|
|
1339
|
+
"emoji": emoji,
|
|
1340
|
+
}
|
|
1341
|
+
}
|
|
1342
|
+
)
|
|
1343
|
+
|
|
1344
|
+
return result.get("reactionCreate", {}).get("success", False)
|
|
1345
|
+
|
|
1346
|
+
async def close(self) -> None:
|
|
1347
|
+
"""Close the GraphQL client connection."""
|
|
1348
|
+
if hasattr(self.client, 'close_async'):
|
|
1349
|
+
await self.client.close_async()
|
|
1350
|
+
elif hasattr(self.client.transport, 'close'):
|
|
1351
|
+
await self.client.transport.close()
|
|
1352
|
+
|
|
1353
|
+
|
|
1354
|
+
# Register the adapter
|
|
1355
|
+
AdapterRegistry.register("linear", LinearAdapter)
|