mcp-ticketer 0.1.37__py3-none-any.whl → 0.1.39__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__version__.py +1 -1
- mcp_ticketer/adapters/github.py +27 -11
- mcp_ticketer/adapters/jira.py +185 -53
- mcp_ticketer/adapters/linear.py +78 -9
- mcp_ticketer/cli/linear_commands.py +490 -0
- mcp_ticketer/cli/main.py +102 -9
- mcp_ticketer/cli/simple_health.py +6 -6
- mcp_ticketer/cli/utils.py +6 -2
- mcp_ticketer/core/env_loader.py +325 -0
- mcp_ticketer/core/models.py +163 -10
- mcp_ticketer/mcp/server.py +4 -4
- mcp_ticketer/queue/manager.py +57 -2
- mcp_ticketer/queue/run_worker.py +5 -0
- mcp_ticketer/queue/worker.py +24 -2
- {mcp_ticketer-0.1.37.dist-info → mcp_ticketer-0.1.39.dist-info}/METADATA +1 -1
- {mcp_ticketer-0.1.37.dist-info → mcp_ticketer-0.1.39.dist-info}/RECORD +20 -18
- {mcp_ticketer-0.1.37.dist-info → mcp_ticketer-0.1.39.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.1.37.dist-info → mcp_ticketer-0.1.39.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.1.37.dist-info → mcp_ticketer-0.1.39.dist-info}/licenses/LICENSE +0 -0
- {mcp_ticketer-0.1.37.dist-info → mcp_ticketer-0.1.39.dist-info}/top_level.txt +0 -0
mcp_ticketer/__version__.py
CHANGED
mcp_ticketer/adapters/github.py
CHANGED
|
@@ -4,13 +4,14 @@ import builtins
|
|
|
4
4
|
import os
|
|
5
5
|
import re
|
|
6
6
|
from datetime import datetime
|
|
7
|
-
from typing import Any, Optional
|
|
7
|
+
from typing import Any, Dict, List, Optional
|
|
8
8
|
|
|
9
9
|
import httpx
|
|
10
10
|
|
|
11
11
|
from ..core.adapter import BaseAdapter
|
|
12
12
|
from ..core.models import Comment, Epic, Priority, SearchQuery, Task, TicketState
|
|
13
13
|
from ..core.registry import AdapterRegistry
|
|
14
|
+
from ..core.env_loader import load_adapter_config, validate_adapter_config
|
|
14
15
|
|
|
15
16
|
|
|
16
17
|
class GitHubStateMapping:
|
|
@@ -151,21 +152,22 @@ class GitHubAdapter(BaseAdapter[Task]):
|
|
|
151
152
|
"""
|
|
152
153
|
super().__init__(config)
|
|
153
154
|
|
|
155
|
+
# Load configuration with environment variable resolution
|
|
156
|
+
full_config = load_adapter_config("github", config)
|
|
157
|
+
|
|
158
|
+
# Validate required configuration
|
|
159
|
+
missing_keys = validate_adapter_config("github", full_config)
|
|
160
|
+
if missing_keys:
|
|
161
|
+
raise ValueError(f"GitHub adapter missing required configuration: {', '.join(missing_keys)}")
|
|
162
|
+
|
|
154
163
|
# Get authentication token - support both 'api_key' and 'token' for compatibility
|
|
155
164
|
self.token = (
|
|
156
|
-
|
|
165
|
+
full_config.get("api_key") or full_config.get("token") or full_config.get("token")
|
|
157
166
|
)
|
|
158
|
-
if not self.token:
|
|
159
|
-
raise ValueError(
|
|
160
|
-
"GitHub token required (config.api_key, config.token or GITHUB_TOKEN env var)"
|
|
161
|
-
)
|
|
162
167
|
|
|
163
168
|
# Get repository information
|
|
164
|
-
self.owner =
|
|
165
|
-
self.repo =
|
|
166
|
-
|
|
167
|
-
if not self.owner or not self.repo:
|
|
168
|
-
raise ValueError("GitHub owner and repo are required")
|
|
169
|
+
self.owner = full_config.get("owner")
|
|
170
|
+
self.repo = full_config.get("repo")
|
|
169
171
|
|
|
170
172
|
# API URLs
|
|
171
173
|
self.api_url = config.get("api_url", "https://api.github.com")
|
|
@@ -1329,6 +1331,20 @@ Fixes #{issue_number}
|
|
|
1329
1331
|
"message": f"Successfully linked PR #{pr_number} to issue #{issue_number}",
|
|
1330
1332
|
}
|
|
1331
1333
|
|
|
1334
|
+
async def get_collaborators(self) -> List[Dict[str, Any]]:
|
|
1335
|
+
"""Get repository collaborators."""
|
|
1336
|
+
response = await self.client.get(
|
|
1337
|
+
f"/repos/{self.owner}/{self.repo}/collaborators"
|
|
1338
|
+
)
|
|
1339
|
+
response.raise_for_status()
|
|
1340
|
+
return response.json()
|
|
1341
|
+
|
|
1342
|
+
async def get_current_user(self) -> Optional[Dict[str, Any]]:
|
|
1343
|
+
"""Get current authenticated user information."""
|
|
1344
|
+
response = await self.client.get("/user")
|
|
1345
|
+
response.raise_for_status()
|
|
1346
|
+
return response.json()
|
|
1347
|
+
|
|
1332
1348
|
async def close(self) -> None:
|
|
1333
1349
|
"""Close the HTTP client connection."""
|
|
1334
1350
|
await self.client.aclose()
|
mcp_ticketer/adapters/jira.py
CHANGED
|
@@ -4,9 +4,10 @@ import asyncio
|
|
|
4
4
|
import builtins
|
|
5
5
|
import logging
|
|
6
6
|
import os
|
|
7
|
+
import re
|
|
7
8
|
from datetime import datetime
|
|
8
9
|
from enum import Enum
|
|
9
|
-
from typing import Any, Optional, Union
|
|
10
|
+
from typing import Any, Dict, List, Optional, Union
|
|
10
11
|
|
|
11
12
|
import httpx
|
|
12
13
|
from httpx import AsyncClient, HTTPStatusError, TimeoutException
|
|
@@ -14,10 +15,79 @@ from httpx import AsyncClient, HTTPStatusError, TimeoutException
|
|
|
14
15
|
from ..core.adapter import BaseAdapter
|
|
15
16
|
from ..core.models import Comment, Epic, Priority, SearchQuery, Task, TicketState
|
|
16
17
|
from ..core.registry import AdapterRegistry
|
|
18
|
+
from ..core.env_loader import load_adapter_config, validate_adapter_config
|
|
17
19
|
|
|
18
20
|
logger = logging.getLogger(__name__)
|
|
19
21
|
|
|
20
22
|
|
|
23
|
+
def parse_jira_datetime(date_str: str) -> Optional[datetime]:
|
|
24
|
+
"""
|
|
25
|
+
Parse JIRA datetime strings which can be in various formats.
|
|
26
|
+
|
|
27
|
+
JIRA can return dates in formats like:
|
|
28
|
+
- 2025-10-24T14:12:18.771-0400
|
|
29
|
+
- 2025-10-24T14:12:18.771Z
|
|
30
|
+
- 2025-10-24T14:12:18.771+00:00
|
|
31
|
+
"""
|
|
32
|
+
if not date_str:
|
|
33
|
+
return None
|
|
34
|
+
|
|
35
|
+
try:
|
|
36
|
+
# Handle Z timezone
|
|
37
|
+
if date_str.endswith('Z'):
|
|
38
|
+
return datetime.fromisoformat(date_str.replace('Z', '+00:00'))
|
|
39
|
+
|
|
40
|
+
# Handle timezone formats like -0400, +0500 (need to add colon)
|
|
41
|
+
if re.match(r'.*[+-]\d{4}$', date_str):
|
|
42
|
+
# Insert colon in timezone: -0400 -> -04:00
|
|
43
|
+
date_str = re.sub(r'([+-]\d{2})(\d{2})$', r'\1:\2', date_str)
|
|
44
|
+
|
|
45
|
+
return datetime.fromisoformat(date_str)
|
|
46
|
+
|
|
47
|
+
except (ValueError, TypeError) as e:
|
|
48
|
+
logger.warning(f"Failed to parse JIRA datetime '{date_str}': {e}")
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def extract_text_from_adf(adf_content: Union[str, Dict[str, Any]]) -> str:
|
|
53
|
+
"""
|
|
54
|
+
Extract plain text from Atlassian Document Format (ADF).
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
adf_content: Either a string (already plain text) or ADF document dict
|
|
58
|
+
|
|
59
|
+
Returns:
|
|
60
|
+
Plain text string extracted from the ADF content
|
|
61
|
+
"""
|
|
62
|
+
if isinstance(adf_content, str):
|
|
63
|
+
return adf_content
|
|
64
|
+
|
|
65
|
+
if not isinstance(adf_content, dict):
|
|
66
|
+
return str(adf_content) if adf_content else ""
|
|
67
|
+
|
|
68
|
+
def extract_text_recursive(node: Dict[str, Any]) -> str:
|
|
69
|
+
"""Recursively extract text from ADF nodes."""
|
|
70
|
+
if not isinstance(node, dict):
|
|
71
|
+
return ""
|
|
72
|
+
|
|
73
|
+
# If this is a text node, return its text
|
|
74
|
+
if node.get("type") == "text":
|
|
75
|
+
return node.get("text", "")
|
|
76
|
+
|
|
77
|
+
# If this node has content, process it recursively
|
|
78
|
+
content = node.get("content", [])
|
|
79
|
+
if isinstance(content, list):
|
|
80
|
+
return "".join(extract_text_recursive(child) for child in content)
|
|
81
|
+
|
|
82
|
+
return ""
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
return extract_text_recursive(adf_content)
|
|
86
|
+
except Exception as e:
|
|
87
|
+
logger.warning(f"Failed to extract text from ADF: {e}")
|
|
88
|
+
return str(adf_content) if adf_content else ""
|
|
89
|
+
|
|
90
|
+
|
|
21
91
|
class JiraIssueType(str, Enum):
|
|
22
92
|
"""Common JIRA issue types."""
|
|
23
93
|
|
|
@@ -60,21 +130,23 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
60
130
|
"""
|
|
61
131
|
super().__init__(config)
|
|
62
132
|
|
|
63
|
-
#
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
self.is_cloud = config.get("cloud", True)
|
|
71
|
-
self.verify_ssl = config.get("verify_ssl", True)
|
|
72
|
-
self.timeout = config.get("timeout", 30)
|
|
73
|
-
self.max_retries = config.get("max_retries", 3)
|
|
133
|
+
# Load configuration with environment variable resolution
|
|
134
|
+
full_config = load_adapter_config("jira", config)
|
|
135
|
+
|
|
136
|
+
# Validate required configuration
|
|
137
|
+
missing_keys = validate_adapter_config("jira", full_config)
|
|
138
|
+
if missing_keys:
|
|
139
|
+
raise ValueError(f"JIRA adapter missing required configuration: {', '.join(missing_keys)}")
|
|
74
140
|
|
|
75
|
-
#
|
|
76
|
-
|
|
77
|
-
|
|
141
|
+
# Configuration
|
|
142
|
+
self.server = full_config.get("server", "")
|
|
143
|
+
self.email = full_config.get("email", "")
|
|
144
|
+
self.api_token = full_config.get("api_token", "")
|
|
145
|
+
self.project_key = full_config.get("project_key", "")
|
|
146
|
+
self.is_cloud = full_config.get("cloud", True)
|
|
147
|
+
self.verify_ssl = full_config.get("verify_ssl", True)
|
|
148
|
+
self.timeout = full_config.get("timeout", 30)
|
|
149
|
+
self.max_retries = full_config.get("max_retries", 3)
|
|
78
150
|
|
|
79
151
|
# Clean up server URL
|
|
80
152
|
self.server = self.server.rstrip("/")
|
|
@@ -382,16 +454,8 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
382
454
|
label.get("name", "") if isinstance(label, dict) else str(label)
|
|
383
455
|
for label in fields.get("labels", [])
|
|
384
456
|
],
|
|
385
|
-
"created_at": (
|
|
386
|
-
|
|
387
|
-
if fields.get("created")
|
|
388
|
-
else None
|
|
389
|
-
),
|
|
390
|
-
"updated_at": (
|
|
391
|
-
datetime.fromisoformat(fields.get("updated", "").replace("Z", "+00:00"))
|
|
392
|
-
if fields.get("updated")
|
|
393
|
-
else None
|
|
394
|
-
),
|
|
457
|
+
"created_at": parse_jira_datetime(fields.get("created")),
|
|
458
|
+
"updated_at": parse_jira_datetime(fields.get("updated")),
|
|
395
459
|
"metadata": {
|
|
396
460
|
"jira": {
|
|
397
461
|
"id": issue.get("id"),
|
|
@@ -457,9 +521,12 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
457
521
|
"summary": ticket.title,
|
|
458
522
|
"description": description,
|
|
459
523
|
"labels": ticket.tags,
|
|
460
|
-
"priority": {"name": self._map_priority_to_jira(ticket.priority)},
|
|
461
524
|
}
|
|
462
525
|
|
|
526
|
+
# Only add priority for Tasks, not Epics (some JIRA configurations don't allow priority on Epics)
|
|
527
|
+
if isinstance(ticket, Task):
|
|
528
|
+
fields["priority"] = {"name": self._map_priority_to_jira(ticket.priority)}
|
|
529
|
+
|
|
463
530
|
# Add project if creating new issue
|
|
464
531
|
if not ticket.id and self.project_key:
|
|
465
532
|
fields["project"] = {"key": self.project_key}
|
|
@@ -608,16 +675,16 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
608
675
|
|
|
609
676
|
jql = " AND ".join(jql_parts) if jql_parts else "ORDER BY created DESC"
|
|
610
677
|
|
|
611
|
-
# Search issues using the
|
|
678
|
+
# Search issues using the JIRA API endpoint
|
|
612
679
|
data = await self._make_request(
|
|
613
|
-
"
|
|
614
|
-
"search/jql", #
|
|
615
|
-
|
|
680
|
+
"GET",
|
|
681
|
+
"search/jql", # JIRA search endpoint (new API v3)
|
|
682
|
+
params={
|
|
616
683
|
"jql": jql,
|
|
617
684
|
"startAt": offset,
|
|
618
685
|
"maxResults": limit,
|
|
619
|
-
"fields":
|
|
620
|
-
"expand":
|
|
686
|
+
"fields": "*all",
|
|
687
|
+
"expand": "renderedFields",
|
|
621
688
|
},
|
|
622
689
|
)
|
|
623
690
|
|
|
@@ -658,16 +725,16 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
658
725
|
|
|
659
726
|
jql = " AND ".join(jql_parts) if jql_parts else "ORDER BY created DESC"
|
|
660
727
|
|
|
661
|
-
# Execute search using the
|
|
728
|
+
# Execute search using the JIRA API endpoint
|
|
662
729
|
data = await self._make_request(
|
|
663
|
-
"
|
|
664
|
-
"search/jql", #
|
|
665
|
-
|
|
730
|
+
"GET",
|
|
731
|
+
"search/jql", # JIRA search endpoint (new API v3)
|
|
732
|
+
params={
|
|
666
733
|
"jql": jql,
|
|
667
734
|
"startAt": query.offset,
|
|
668
735
|
"maxResults": query.limit,
|
|
669
|
-
"fields":
|
|
670
|
-
"expand":
|
|
736
|
+
"fields": "*all",
|
|
737
|
+
"expand": "renderedFields",
|
|
671
738
|
},
|
|
672
739
|
)
|
|
673
740
|
|
|
@@ -728,8 +795,24 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
728
795
|
|
|
729
796
|
async def add_comment(self, comment: Comment) -> Comment:
|
|
730
797
|
"""Add a comment to a JIRA issue."""
|
|
731
|
-
# Prepare comment data
|
|
732
|
-
data = {
|
|
798
|
+
# Prepare comment data in Atlassian Document Format
|
|
799
|
+
data = {
|
|
800
|
+
"body": {
|
|
801
|
+
"type": "doc",
|
|
802
|
+
"version": 1,
|
|
803
|
+
"content": [
|
|
804
|
+
{
|
|
805
|
+
"type": "paragraph",
|
|
806
|
+
"content": [
|
|
807
|
+
{
|
|
808
|
+
"type": "text",
|
|
809
|
+
"text": comment.content
|
|
810
|
+
}
|
|
811
|
+
]
|
|
812
|
+
}
|
|
813
|
+
]
|
|
814
|
+
}
|
|
815
|
+
}
|
|
733
816
|
|
|
734
817
|
# Add comment
|
|
735
818
|
result = await self._make_request(
|
|
@@ -738,11 +821,7 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
738
821
|
|
|
739
822
|
# Update comment with JIRA data
|
|
740
823
|
comment.id = result.get("id")
|
|
741
|
-
comment.created_at = (
|
|
742
|
-
datetime.fromisoformat(result.get("created", "").replace("Z", "+00:00"))
|
|
743
|
-
if result.get("created")
|
|
744
|
-
else datetime.now()
|
|
745
|
-
)
|
|
824
|
+
comment.created_at = parse_jira_datetime(result.get("created")) or datetime.now()
|
|
746
825
|
comment.author = result.get("author", {}).get("displayName", comment.author)
|
|
747
826
|
comment.metadata["jira"] = result
|
|
748
827
|
|
|
@@ -766,18 +845,16 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
766
845
|
# Convert to Comment objects
|
|
767
846
|
comments = []
|
|
768
847
|
for comment_data in paginated:
|
|
848
|
+
# Extract text content from ADF format
|
|
849
|
+
body_content = comment_data.get("body", "")
|
|
850
|
+
text_content = extract_text_from_adf(body_content)
|
|
851
|
+
|
|
769
852
|
comment = Comment(
|
|
770
853
|
id=comment_data.get("id"),
|
|
771
854
|
ticket_id=ticket_id,
|
|
772
855
|
author=comment_data.get("author", {}).get("displayName", "Unknown"),
|
|
773
|
-
content=
|
|
774
|
-
created_at=(
|
|
775
|
-
datetime.fromisoformat(
|
|
776
|
-
comment_data.get("created", "").replace("Z", "+00:00")
|
|
777
|
-
)
|
|
778
|
-
if comment_data.get("created")
|
|
779
|
-
else None
|
|
780
|
-
),
|
|
856
|
+
content=text_content,
|
|
857
|
+
created_at=parse_jira_datetime(comment_data.get("created")),
|
|
781
858
|
metadata={"jira": comment_data},
|
|
782
859
|
)
|
|
783
860
|
comments.append(comment)
|
|
@@ -866,6 +943,61 @@ class JiraAdapter(BaseAdapter[Union[Epic, Task]]):
|
|
|
866
943
|
|
|
867
944
|
return sprints_data.get("values", [])
|
|
868
945
|
|
|
946
|
+
async def get_project_users(self) -> List[Dict[str, Any]]:
|
|
947
|
+
"""Get users who have access to the project."""
|
|
948
|
+
if not self.project_key:
|
|
949
|
+
return []
|
|
950
|
+
|
|
951
|
+
try:
|
|
952
|
+
# Get project role users
|
|
953
|
+
project_data = await self._make_request("GET", f"project/{self.project_key}")
|
|
954
|
+
|
|
955
|
+
# Get users from project roles
|
|
956
|
+
users = []
|
|
957
|
+
if "roles" in project_data:
|
|
958
|
+
for role_name, role_url in project_data["roles"].items():
|
|
959
|
+
# Extract role ID from URL
|
|
960
|
+
role_id = role_url.split("/")[-1]
|
|
961
|
+
try:
|
|
962
|
+
role_data = await self._make_request("GET", f"project/{self.project_key}/role/{role_id}")
|
|
963
|
+
if "actors" in role_data:
|
|
964
|
+
for actor in role_data["actors"]:
|
|
965
|
+
if actor.get("type") == "atlassian-user-role-actor":
|
|
966
|
+
users.append(actor.get("actorUser", {}))
|
|
967
|
+
except Exception:
|
|
968
|
+
# Skip if role access fails
|
|
969
|
+
continue
|
|
970
|
+
|
|
971
|
+
# Remove duplicates based on accountId
|
|
972
|
+
seen_ids = set()
|
|
973
|
+
unique_users = []
|
|
974
|
+
for user in users:
|
|
975
|
+
account_id = user.get("accountId")
|
|
976
|
+
if account_id and account_id not in seen_ids:
|
|
977
|
+
seen_ids.add(account_id)
|
|
978
|
+
unique_users.append(user)
|
|
979
|
+
|
|
980
|
+
return unique_users
|
|
981
|
+
|
|
982
|
+
except Exception:
|
|
983
|
+
# Fallback: try to get assignable users for the project
|
|
984
|
+
try:
|
|
985
|
+
users_data = await self._make_request(
|
|
986
|
+
"GET",
|
|
987
|
+
"user/assignable/search",
|
|
988
|
+
params={"project": self.project_key, "maxResults": 50}
|
|
989
|
+
)
|
|
990
|
+
return users_data if isinstance(users_data, list) else []
|
|
991
|
+
except Exception:
|
|
992
|
+
return []
|
|
993
|
+
|
|
994
|
+
async def get_current_user(self) -> Optional[Dict[str, Any]]:
|
|
995
|
+
"""Get current authenticated user information."""
|
|
996
|
+
try:
|
|
997
|
+
return await self._make_request("GET", "myself")
|
|
998
|
+
except Exception:
|
|
999
|
+
return None
|
|
1000
|
+
|
|
869
1001
|
async def close(self) -> None:
|
|
870
1002
|
"""Close the adapter and cleanup resources."""
|
|
871
1003
|
# Clear caches
|
mcp_ticketer/adapters/linear.py
CHANGED
|
@@ -5,7 +5,7 @@ import builtins
|
|
|
5
5
|
import os
|
|
6
6
|
from datetime import date, datetime
|
|
7
7
|
from enum import Enum
|
|
8
|
-
from typing import Any, Optional, Union
|
|
8
|
+
from typing import Any, Dict, List, Optional, Union
|
|
9
9
|
|
|
10
10
|
from gql import Client, gql
|
|
11
11
|
from gql.transport.exceptions import TransportQueryError
|
|
@@ -22,6 +22,7 @@ from ..core.models import (
|
|
|
22
22
|
TicketType,
|
|
23
23
|
)
|
|
24
24
|
from ..core.registry import AdapterRegistry
|
|
25
|
+
from ..core.env_loader import load_adapter_config, validate_adapter_config
|
|
25
26
|
|
|
26
27
|
|
|
27
28
|
class LinearStateType(str, Enum):
|
|
@@ -295,24 +296,36 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
295
296
|
"""
|
|
296
297
|
super().__init__(config)
|
|
297
298
|
|
|
299
|
+
# Load configuration with environment variable resolution
|
|
300
|
+
full_config = load_adapter_config("linear", config)
|
|
301
|
+
|
|
298
302
|
# Get API key from config or environment
|
|
299
|
-
self.api_key =
|
|
303
|
+
self.api_key = full_config.get("api_key")
|
|
300
304
|
if not self.api_key:
|
|
301
305
|
raise ValueError(
|
|
302
306
|
"Linear API key required (config.api_key or LINEAR_API_KEY env var)"
|
|
303
307
|
)
|
|
304
308
|
|
|
305
|
-
self.workspace =
|
|
309
|
+
self.workspace = full_config.get("workspace") # Optional, for documentation
|
|
306
310
|
|
|
307
311
|
# Support both team_key (short key) and team_id (UUID)
|
|
308
|
-
self.team_key =
|
|
309
|
-
self.team_id_config =
|
|
312
|
+
self.team_key = full_config.get("team_key") # Short key like "BTA"
|
|
313
|
+
self.team_id_config = full_config.get("team_id") # UUID like "02d15669-..."
|
|
310
314
|
|
|
311
315
|
# Require at least one team identifier
|
|
312
316
|
if not self.team_key and not self.team_id_config:
|
|
313
317
|
raise ValueError("Either team_key or team_id is required in configuration")
|
|
314
318
|
|
|
315
|
-
self.api_url =
|
|
319
|
+
self.api_url = full_config.get("api_url", "https://api.linear.app/graphql")
|
|
320
|
+
|
|
321
|
+
# DEBUG: Log API key details for debugging
|
|
322
|
+
import logging
|
|
323
|
+
logger = logging.getLogger(__name__)
|
|
324
|
+
logger.info(f"LinearAdapter initialized with API key: {self.api_key[:20]}...")
|
|
325
|
+
logger.info(f"LinearAdapter config api_key: {config.get('api_key', 'Not set')[:20] if config.get('api_key') else 'Not set'}...")
|
|
326
|
+
logger.info(f"LinearAdapter env LINEAR_API_KEY: {os.getenv('LINEAR_API_KEY', 'Not set')[:20] if os.getenv('LINEAR_API_KEY') else 'Not set'}...")
|
|
327
|
+
logger.info(f"LinearAdapter team_id_config: {self.team_id_config}")
|
|
328
|
+
logger.info(f"LinearAdapter team_key: {self.team_key}")
|
|
316
329
|
|
|
317
330
|
# Caches for frequently used data
|
|
318
331
|
self._team_id: Optional[str] = None
|
|
@@ -839,13 +852,22 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
839
852
|
metadata=metadata,
|
|
840
853
|
)
|
|
841
854
|
|
|
842
|
-
async def create(self, ticket: Task) -> Task:
|
|
843
|
-
"""Create a new Linear issue with full field support."""
|
|
855
|
+
async def create(self, ticket: Union[Epic, Task]) -> Union[Epic, Task]:
|
|
856
|
+
"""Create a new Linear issue or project with full field support."""
|
|
844
857
|
# Validate credentials before attempting operation
|
|
845
858
|
is_valid, error_message = self.validate_credentials()
|
|
846
859
|
if not is_valid:
|
|
847
860
|
raise ValueError(error_message)
|
|
848
861
|
|
|
862
|
+
# Handle Epic creation (Linear Projects)
|
|
863
|
+
if isinstance(ticket, Epic):
|
|
864
|
+
return await self.create_epic(
|
|
865
|
+
title=ticket.title,
|
|
866
|
+
description=ticket.description,
|
|
867
|
+
tags=ticket.tags,
|
|
868
|
+
priority=ticket.priority
|
|
869
|
+
)
|
|
870
|
+
|
|
849
871
|
team_id = await self._ensure_team_id()
|
|
850
872
|
states = await self._get_workflow_states()
|
|
851
873
|
|
|
@@ -1942,7 +1964,8 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1942
1964
|
team_id = await self._ensure_team_id()
|
|
1943
1965
|
|
|
1944
1966
|
create_query = gql(
|
|
1945
|
-
|
|
1967
|
+
TEAM_FRAGMENT
|
|
1968
|
+
+ PROJECT_FRAGMENT
|
|
1946
1969
|
+ """
|
|
1947
1970
|
mutation CreateProject($input: ProjectCreateInput!) {
|
|
1948
1971
|
projectCreate(input: $input) {
|
|
@@ -2306,6 +2329,52 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
2306
2329
|
except TransportQueryError:
|
|
2307
2330
|
return []
|
|
2308
2331
|
|
|
2332
|
+
async def get_team_members(self) -> List[Dict[str, Any]]:
|
|
2333
|
+
"""Get team members for the current team."""
|
|
2334
|
+
team_id = await self._ensure_team_id()
|
|
2335
|
+
|
|
2336
|
+
query = gql(
|
|
2337
|
+
USER_FRAGMENT + """
|
|
2338
|
+
query GetTeamMembers($teamId: String!) {
|
|
2339
|
+
team(id: $teamId) {
|
|
2340
|
+
members {
|
|
2341
|
+
nodes {
|
|
2342
|
+
...UserFields
|
|
2343
|
+
}
|
|
2344
|
+
}
|
|
2345
|
+
}
|
|
2346
|
+
}
|
|
2347
|
+
"""
|
|
2348
|
+
)
|
|
2349
|
+
|
|
2350
|
+
client = self._create_client()
|
|
2351
|
+
async with client as session:
|
|
2352
|
+
result = await session.execute(
|
|
2353
|
+
query, variable_values={"teamId": team_id}
|
|
2354
|
+
)
|
|
2355
|
+
|
|
2356
|
+
if result.get("team", {}).get("members", {}).get("nodes"):
|
|
2357
|
+
return result["team"]["members"]["nodes"]
|
|
2358
|
+
return []
|
|
2359
|
+
|
|
2360
|
+
async def get_current_user(self) -> Optional[Dict[str, Any]]:
|
|
2361
|
+
"""Get current user information."""
|
|
2362
|
+
query = gql(
|
|
2363
|
+
USER_FRAGMENT + """
|
|
2364
|
+
query GetCurrentUser {
|
|
2365
|
+
viewer {
|
|
2366
|
+
...UserFields
|
|
2367
|
+
}
|
|
2368
|
+
}
|
|
2369
|
+
"""
|
|
2370
|
+
)
|
|
2371
|
+
|
|
2372
|
+
client = self._create_client()
|
|
2373
|
+
async with client as session:
|
|
2374
|
+
result = await session.execute(query)
|
|
2375
|
+
|
|
2376
|
+
return result.get("viewer")
|
|
2377
|
+
|
|
2309
2378
|
async def close(self) -> None:
|
|
2310
2379
|
"""Close the GraphQL client connection.
|
|
2311
2380
|
|