mcp-ticketer 2.0.1__py3-none-any.whl → 2.2.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__version__.py +1 -1
- mcp_ticketer/_version_scm.py +1 -0
- mcp_ticketer/adapters/aitrackdown.py +122 -0
- mcp_ticketer/adapters/asana/adapter.py +121 -0
- mcp_ticketer/adapters/github/__init__.py +26 -0
- mcp_ticketer/adapters/{github.py → github/adapter.py} +1506 -365
- mcp_ticketer/adapters/github/client.py +335 -0
- mcp_ticketer/adapters/github/mappers.py +797 -0
- mcp_ticketer/adapters/github/queries.py +692 -0
- mcp_ticketer/adapters/github/types.py +460 -0
- mcp_ticketer/adapters/jira/__init__.py +35 -0
- mcp_ticketer/adapters/{jira.py → jira/adapter.py} +250 -678
- mcp_ticketer/adapters/jira/client.py +271 -0
- mcp_ticketer/adapters/jira/mappers.py +246 -0
- mcp_ticketer/adapters/jira/queries.py +216 -0
- mcp_ticketer/adapters/jira/types.py +304 -0
- mcp_ticketer/adapters/linear/adapter.py +1000 -92
- mcp_ticketer/adapters/linear/client.py +91 -1
- mcp_ticketer/adapters/linear/mappers.py +107 -0
- mcp_ticketer/adapters/linear/queries.py +112 -2
- mcp_ticketer/adapters/linear/types.py +50 -10
- mcp_ticketer/cli/configure.py +524 -89
- mcp_ticketer/cli/install_mcp_server.py +418 -0
- mcp_ticketer/cli/main.py +10 -0
- mcp_ticketer/cli/mcp_configure.py +177 -49
- mcp_ticketer/cli/platform_installer.py +9 -0
- mcp_ticketer/cli/setup_command.py +157 -1
- mcp_ticketer/cli/ticket_commands.py +443 -81
- mcp_ticketer/cli/utils.py +113 -0
- mcp_ticketer/core/__init__.py +28 -0
- mcp_ticketer/core/adapter.py +367 -1
- mcp_ticketer/core/milestone_manager.py +252 -0
- mcp_ticketer/core/models.py +345 -0
- mcp_ticketer/core/project_utils.py +281 -0
- mcp_ticketer/core/project_validator.py +376 -0
- mcp_ticketer/core/session_state.py +6 -1
- mcp_ticketer/core/state_matcher.py +36 -3
- mcp_ticketer/mcp/server/__main__.py +2 -1
- mcp_ticketer/mcp/server/routing.py +68 -0
- mcp_ticketer/mcp/server/tools/__init__.py +7 -4
- mcp_ticketer/mcp/server/tools/attachment_tools.py +3 -1
- mcp_ticketer/mcp/server/tools/config_tools.py +233 -35
- mcp_ticketer/mcp/server/tools/milestone_tools.py +338 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +30 -1
- mcp_ticketer/mcp/server/tools/ticket_tools.py +37 -1
- mcp_ticketer/queue/queue.py +68 -0
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/METADATA +33 -3
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/RECORD +72 -36
- mcp_ticketer-2.2.13.dist-info/top_level.txt +2 -0
- py_mcp_installer/examples/phase3_demo.py +178 -0
- py_mcp_installer/scripts/manage_version.py +54 -0
- py_mcp_installer/setup.py +6 -0
- py_mcp_installer/src/py_mcp_installer/__init__.py +153 -0
- py_mcp_installer/src/py_mcp_installer/command_builder.py +445 -0
- py_mcp_installer/src/py_mcp_installer/config_manager.py +541 -0
- py_mcp_installer/src/py_mcp_installer/exceptions.py +243 -0
- py_mcp_installer/src/py_mcp_installer/installation_strategy.py +617 -0
- py_mcp_installer/src/py_mcp_installer/installer.py +656 -0
- py_mcp_installer/src/py_mcp_installer/mcp_inspector.py +750 -0
- py_mcp_installer/src/py_mcp_installer/platform_detector.py +451 -0
- py_mcp_installer/src/py_mcp_installer/platforms/__init__.py +26 -0
- py_mcp_installer/src/py_mcp_installer/platforms/claude_code.py +225 -0
- py_mcp_installer/src/py_mcp_installer/platforms/codex.py +181 -0
- py_mcp_installer/src/py_mcp_installer/platforms/cursor.py +191 -0
- py_mcp_installer/src/py_mcp_installer/types.py +222 -0
- py_mcp_installer/src/py_mcp_installer/utils.py +463 -0
- py_mcp_installer/tests/__init__.py +0 -0
- py_mcp_installer/tests/platforms/__init__.py +0 -0
- py_mcp_installer/tests/test_platform_detector.py +17 -0
- mcp_ticketer-2.0.1.dist-info/top_level.txt +0 -1
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/WHEEL +0 -0
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/licenses/LICENSE +0 -0
|
@@ -21,11 +21,13 @@ except ImportError:
|
|
|
21
21
|
|
|
22
22
|
import builtins
|
|
23
23
|
|
|
24
|
+
from ...cache.memory import MemoryCache
|
|
24
25
|
from ...core.adapter import BaseAdapter
|
|
25
26
|
from ...core.models import (
|
|
26
27
|
Attachment,
|
|
27
28
|
Comment,
|
|
28
29
|
Epic,
|
|
30
|
+
Milestone,
|
|
29
31
|
ProjectUpdate,
|
|
30
32
|
ProjectUpdateHealth,
|
|
31
33
|
SearchQuery,
|
|
@@ -45,10 +47,14 @@ from .mappers import (
|
|
|
45
47
|
)
|
|
46
48
|
from .queries import (
|
|
47
49
|
ALL_FRAGMENTS,
|
|
50
|
+
ARCHIVE_CYCLE_MUTATION,
|
|
51
|
+
CREATE_CYCLE_MUTATION,
|
|
48
52
|
CREATE_ISSUE_MUTATION,
|
|
49
53
|
CREATE_LABEL_MUTATION,
|
|
50
54
|
CREATE_PROJECT_UPDATE_MUTATION,
|
|
51
55
|
GET_CUSTOM_VIEW_QUERY,
|
|
56
|
+
GET_CYCLE_ISSUES_QUERY,
|
|
57
|
+
GET_CYCLE_QUERY,
|
|
52
58
|
GET_ISSUE_STATUS_QUERY,
|
|
53
59
|
GET_PROJECT_UPDATE_QUERY,
|
|
54
60
|
LIST_CYCLES_QUERY,
|
|
@@ -57,6 +63,7 @@ from .queries import (
|
|
|
57
63
|
LIST_PROJECT_UPDATES_QUERY,
|
|
58
64
|
LIST_PROJECTS_QUERY,
|
|
59
65
|
SEARCH_ISSUES_QUERY,
|
|
66
|
+
UPDATE_CYCLE_MUTATION,
|
|
60
67
|
UPDATE_ISSUE_MUTATION,
|
|
61
68
|
WORKFLOW_STATES_QUERY,
|
|
62
69
|
)
|
|
@@ -98,6 +105,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
98
105
|
- team_key: Linear team key (e.g., 'BTA') OR
|
|
99
106
|
- team_id: Linear team UUID (e.g., '02d15669-7351-4451-9719-807576c16049')
|
|
100
107
|
- api_url: Optional Linear API URL (defaults to https://api.linear.app/graphql)
|
|
108
|
+
- labels_ttl: TTL for label cache in seconds (default: 300)
|
|
101
109
|
|
|
102
110
|
Raises:
|
|
103
111
|
------
|
|
@@ -108,7 +116,8 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
108
116
|
# because parent constructor calls _get_state_mapping()
|
|
109
117
|
self._team_data: dict[str, Any] | None = None
|
|
110
118
|
self._workflow_states: dict[str, dict[str, Any]] | None = None
|
|
111
|
-
self.
|
|
119
|
+
self._labels_ttl = config.get("labels_ttl", 300.0) # 5 min default
|
|
120
|
+
self._labels_cache = MemoryCache(default_ttl=self._labels_ttl)
|
|
112
121
|
self._users_cache: dict[str, dict[str, Any]] | None = None
|
|
113
122
|
self._initialized = False
|
|
114
123
|
|
|
@@ -219,6 +228,13 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
219
228
|
logger.debug("Loading team data and workflow states...")
|
|
220
229
|
team_id = await self._ensure_team_id()
|
|
221
230
|
|
|
231
|
+
# Validate team_id before initialization
|
|
232
|
+
if not team_id:
|
|
233
|
+
raise ValueError(
|
|
234
|
+
"Cannot initialize Linear adapter without team_id. "
|
|
235
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
236
|
+
)
|
|
237
|
+
|
|
222
238
|
# Load workflow states and labels for the team
|
|
223
239
|
await self._load_workflow_states(team_id)
|
|
224
240
|
await self._load_team_labels(team_id)
|
|
@@ -531,6 +547,51 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
531
547
|
|
|
532
548
|
return epic
|
|
533
549
|
|
|
550
|
+
def _validate_linear_uuid(self, uuid_value: str, field_name: str = "UUID") -> bool:
|
|
551
|
+
"""Validate Linear UUID format (36 chars, 8-4-4-4-12 pattern).
|
|
552
|
+
|
|
553
|
+
Linear UUIDs follow standard UUID v4 format:
|
|
554
|
+
- Total length: 36 characters
|
|
555
|
+
- Pattern: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
|
|
556
|
+
- Contains exactly 4 hyphens at positions 8, 13, 18, 23
|
|
557
|
+
|
|
558
|
+
Args:
|
|
559
|
+
----
|
|
560
|
+
uuid_value: UUID string to validate
|
|
561
|
+
field_name: Name of field for error messages (default: "UUID")
|
|
562
|
+
|
|
563
|
+
Returns:
|
|
564
|
+
-------
|
|
565
|
+
True if valid UUID format, False otherwise
|
|
566
|
+
|
|
567
|
+
Examples:
|
|
568
|
+
--------
|
|
569
|
+
>>> _validate_linear_uuid("12345678-1234-1234-1234-123456789012", "projectId")
|
|
570
|
+
True
|
|
571
|
+
>>> _validate_linear_uuid("invalid-uuid", "projectId")
|
|
572
|
+
False
|
|
573
|
+
"""
|
|
574
|
+
logger = logging.getLogger(__name__)
|
|
575
|
+
|
|
576
|
+
if not isinstance(uuid_value, str):
|
|
577
|
+
logger.warning(f"{field_name} is not a string: {type(uuid_value).__name__}")
|
|
578
|
+
return False
|
|
579
|
+
|
|
580
|
+
if len(uuid_value) != 36:
|
|
581
|
+
logger.warning(
|
|
582
|
+
f"{field_name} has invalid length {len(uuid_value)}, expected 36 characters"
|
|
583
|
+
)
|
|
584
|
+
return False
|
|
585
|
+
|
|
586
|
+
if uuid_value.count("-") != 4:
|
|
587
|
+
logger.warning(
|
|
588
|
+
f"{field_name} has invalid format: {uuid_value}. "
|
|
589
|
+
f"Expected xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx pattern"
|
|
590
|
+
)
|
|
591
|
+
return False
|
|
592
|
+
|
|
593
|
+
return True
|
|
594
|
+
|
|
534
595
|
async def _resolve_project_id(self, project_identifier: str) -> str | None:
|
|
535
596
|
"""Resolve project identifier (slug, name, short ID, or URL) to full UUID.
|
|
536
597
|
|
|
@@ -678,11 +739,31 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
678
739
|
or slug_part.lower() == project_lower
|
|
679
740
|
or short_id.lower() == project_lower
|
|
680
741
|
):
|
|
681
|
-
|
|
742
|
+
project_uuid = project["id"]
|
|
743
|
+
# Validate UUID format before returning
|
|
744
|
+
if not self._validate_linear_uuid(
|
|
745
|
+
project_uuid, "projectId"
|
|
746
|
+
):
|
|
747
|
+
logging.getLogger(__name__).error(
|
|
748
|
+
f"Project '{project_identifier}' resolved to invalid UUID format: '{project_uuid}'. "
|
|
749
|
+
f"Expected 36-character UUID (xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx). "
|
|
750
|
+
f"This indicates a data inconsistency in Linear API response."
|
|
751
|
+
)
|
|
752
|
+
return None
|
|
753
|
+
return project_uuid
|
|
682
754
|
|
|
683
755
|
# Also check exact name match (case-insensitive)
|
|
684
756
|
if project["name"].lower() == project_lower:
|
|
685
|
-
|
|
757
|
+
project_uuid = project["id"]
|
|
758
|
+
# Validate UUID format before returning
|
|
759
|
+
if not self._validate_linear_uuid(project_uuid, "projectId"):
|
|
760
|
+
logging.getLogger(__name__).error(
|
|
761
|
+
f"Project '{project_identifier}' resolved to invalid UUID format: '{project_uuid}'. "
|
|
762
|
+
f"Expected 36-character UUID (xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx). "
|
|
763
|
+
f"This indicates a data inconsistency in Linear API response."
|
|
764
|
+
)
|
|
765
|
+
return None
|
|
766
|
+
return project_uuid
|
|
686
767
|
|
|
687
768
|
# No match found
|
|
688
769
|
return None
|
|
@@ -882,33 +963,110 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
882
963
|
) from e
|
|
883
964
|
|
|
884
965
|
async def _load_workflow_states(self, team_id: str) -> None:
|
|
885
|
-
"""Load and cache workflow states for the team.
|
|
966
|
+
"""Load and cache workflow states for the team with semantic name matching.
|
|
967
|
+
|
|
968
|
+
Implements two-level mapping strategy to handle Linear workflows with
|
|
969
|
+
multiple states of the same type (e.g., "Todo", "Backlog", "Ready" all
|
|
970
|
+
being "unstarted"):
|
|
971
|
+
|
|
972
|
+
1. Semantic name matching: Match state names to universal states using
|
|
973
|
+
predefined mappings (flexible, respects custom workflows)
|
|
974
|
+
2. State type fallback: Use first state of matching type for unmapped
|
|
975
|
+
universal states (backward compatible)
|
|
976
|
+
|
|
977
|
+
This fixes issue 1M-552 where transitions to READY/TESTED/WAITING states
|
|
978
|
+
failed with "Discrepancy between issue state and state type" errors.
|
|
886
979
|
|
|
887
980
|
Args:
|
|
888
981
|
----
|
|
889
982
|
team_id: Linear team ID
|
|
890
983
|
|
|
891
984
|
"""
|
|
985
|
+
logger = logging.getLogger(__name__)
|
|
892
986
|
try:
|
|
893
987
|
result = await self.client.execute_query(
|
|
894
988
|
WORKFLOW_STATES_QUERY, {"teamId": team_id}
|
|
895
989
|
)
|
|
896
990
|
|
|
897
|
-
|
|
898
|
-
|
|
991
|
+
states = result["team"]["states"]["nodes"]
|
|
992
|
+
|
|
993
|
+
# Build auxiliary mappings for efficient lookup
|
|
994
|
+
state_by_name: dict[str, tuple[str, str]] = {} # name → (state_id, type)
|
|
995
|
+
state_by_type: dict[str, str] = {} # type → state_id (first occurrence)
|
|
996
|
+
|
|
997
|
+
# Sort states by position to ensure consistent selection
|
|
998
|
+
sorted_states = sorted(states, key=lambda s: s["position"])
|
|
999
|
+
|
|
1000
|
+
for state in sorted_states:
|
|
1001
|
+
state_id = state["id"]
|
|
1002
|
+
state_name = state["name"].lower()
|
|
899
1003
|
state_type = state["type"].lower()
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
1004
|
+
|
|
1005
|
+
# Store by name for semantic matching (first occurrence wins)
|
|
1006
|
+
if state_name not in state_by_name:
|
|
1007
|
+
state_by_name[state_name] = (state_id, state_type)
|
|
1008
|
+
|
|
1009
|
+
# Store by type for fallback (keep first occurrence by position)
|
|
1010
|
+
if state_type not in state_by_type:
|
|
1011
|
+
state_by_type[state_type] = state_id
|
|
1012
|
+
|
|
1013
|
+
# Build final state map with semantic matching
|
|
1014
|
+
workflow_states = {}
|
|
1015
|
+
|
|
1016
|
+
for universal_state in TicketState:
|
|
1017
|
+
state_id = None
|
|
1018
|
+
matched_strategy = None
|
|
1019
|
+
|
|
1020
|
+
# Strategy 1: Try semantic name matching
|
|
1021
|
+
if universal_state in LinearStateMapping.SEMANTIC_NAMES:
|
|
1022
|
+
for semantic_name in LinearStateMapping.SEMANTIC_NAMES[
|
|
1023
|
+
universal_state
|
|
1024
|
+
]:
|
|
1025
|
+
if semantic_name in state_by_name:
|
|
1026
|
+
state_id = state_by_name[semantic_name][0]
|
|
1027
|
+
matched_strategy = f"name:{semantic_name}"
|
|
1028
|
+
break
|
|
1029
|
+
|
|
1030
|
+
# Strategy 2: Fallback to type mapping
|
|
1031
|
+
if not state_id:
|
|
1032
|
+
linear_type = LinearStateMapping.TO_LINEAR.get(universal_state)
|
|
1033
|
+
if linear_type:
|
|
1034
|
+
state_id = state_by_type.get(linear_type)
|
|
1035
|
+
if state_id:
|
|
1036
|
+
matched_strategy = f"type:{linear_type}"
|
|
1037
|
+
|
|
1038
|
+
if state_id:
|
|
1039
|
+
workflow_states[universal_state.value] = state_id
|
|
1040
|
+
logger.debug(
|
|
1041
|
+
f"Mapped {universal_state.value} → {state_id} "
|
|
1042
|
+
f"(strategy: {matched_strategy})"
|
|
1043
|
+
)
|
|
904
1044
|
|
|
905
1045
|
self._workflow_states = workflow_states
|
|
906
1046
|
|
|
1047
|
+
# Log warning if multiple states of same type detected
|
|
1048
|
+
type_counts: dict[str, int] = {}
|
|
1049
|
+
for state in states:
|
|
1050
|
+
state_type = state["type"].lower()
|
|
1051
|
+
type_counts[state_type] = type_counts.get(state_type, 0) + 1
|
|
1052
|
+
|
|
1053
|
+
multi_state_types = {
|
|
1054
|
+
type_: count for type_, count in type_counts.items() if count > 1
|
|
1055
|
+
}
|
|
1056
|
+
if multi_state_types:
|
|
1057
|
+
logger.info(
|
|
1058
|
+
f"Team {team_id} has multiple states per type: {multi_state_types}. "
|
|
1059
|
+
"Using semantic name matching for state resolution."
|
|
1060
|
+
)
|
|
1061
|
+
|
|
907
1062
|
except Exception as e:
|
|
908
1063
|
raise ValueError(f"Failed to load workflow states: {e}") from e
|
|
909
1064
|
|
|
910
1065
|
async def _load_team_labels(self, team_id: str) -> None:
|
|
911
|
-
"""Load and cache labels for the team with retry logic.
|
|
1066
|
+
"""Load and cache labels for the team with retry logic and pagination.
|
|
1067
|
+
|
|
1068
|
+
Fetches ALL labels for the team using cursor-based pagination.
|
|
1069
|
+
Handles teams with >250 labels (Linear's default page size).
|
|
912
1070
|
|
|
913
1071
|
Args:
|
|
914
1072
|
----
|
|
@@ -918,15 +1076,19 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
918
1076
|
logger = logging.getLogger(__name__)
|
|
919
1077
|
|
|
920
1078
|
query = """
|
|
921
|
-
query GetTeamLabels($teamId: String
|
|
1079
|
+
query GetTeamLabels($teamId: String!, $first: Int!, $after: String) {
|
|
922
1080
|
team(id: $teamId) {
|
|
923
|
-
labels {
|
|
1081
|
+
labels(first: $first, after: $after) {
|
|
924
1082
|
nodes {
|
|
925
1083
|
id
|
|
926
1084
|
name
|
|
927
1085
|
color
|
|
928
1086
|
description
|
|
929
1087
|
}
|
|
1088
|
+
pageInfo {
|
|
1089
|
+
hasNextPage
|
|
1090
|
+
endCursor
|
|
1091
|
+
}
|
|
930
1092
|
}
|
|
931
1093
|
}
|
|
932
1094
|
}
|
|
@@ -935,10 +1097,40 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
935
1097
|
max_retries = 3
|
|
936
1098
|
for attempt in range(max_retries):
|
|
937
1099
|
try:
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
1100
|
+
# Fetch all labels with pagination
|
|
1101
|
+
all_labels: list[dict] = []
|
|
1102
|
+
has_next_page = True
|
|
1103
|
+
after_cursor = None
|
|
1104
|
+
page_count = 0
|
|
1105
|
+
max_pages = 10 # Safety limit: 10 pages * 250 labels = 2500 labels max
|
|
1106
|
+
|
|
1107
|
+
while has_next_page and page_count < max_pages:
|
|
1108
|
+
page_count += 1
|
|
1109
|
+
variables = {"teamId": team_id, "first": 250}
|
|
1110
|
+
if after_cursor:
|
|
1111
|
+
variables["after"] = after_cursor
|
|
1112
|
+
|
|
1113
|
+
result = await self.client.execute_query(query, variables)
|
|
1114
|
+
labels_data = result.get("team", {}).get("labels", {})
|
|
1115
|
+
page_labels = labels_data.get("nodes", [])
|
|
1116
|
+
page_info = labels_data.get("pageInfo", {})
|
|
1117
|
+
|
|
1118
|
+
all_labels.extend(page_labels)
|
|
1119
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
1120
|
+
after_cursor = page_info.get("endCursor")
|
|
1121
|
+
|
|
1122
|
+
if page_count >= max_pages and has_next_page:
|
|
1123
|
+
logger.warning(
|
|
1124
|
+
f"Reached max page limit ({max_pages}) for team {team_id}. "
|
|
1125
|
+
f"Loaded {len(all_labels)} labels, but more may exist."
|
|
1126
|
+
)
|
|
1127
|
+
|
|
1128
|
+
# Store in TTL-based cache
|
|
1129
|
+
cache_key = f"linear_labels:{team_id}"
|
|
1130
|
+
await self._labels_cache.set(cache_key, all_labels)
|
|
1131
|
+
logger.info(
|
|
1132
|
+
f"Loaded {len(all_labels)} labels for team {team_id} ({page_count} page(s))"
|
|
1133
|
+
)
|
|
942
1134
|
return # Success
|
|
943
1135
|
|
|
944
1136
|
except Exception as e:
|
|
@@ -954,12 +1146,14 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
954
1146
|
f"Failed to load team labels after {max_retries} attempts: {e}",
|
|
955
1147
|
exc_info=True,
|
|
956
1148
|
)
|
|
957
|
-
|
|
1149
|
+
# Store empty list in cache on failure
|
|
1150
|
+
cache_key = f"linear_labels:{team_id}"
|
|
1151
|
+
await self._labels_cache.set(cache_key, [])
|
|
958
1152
|
|
|
959
1153
|
async def _find_label_by_name(
|
|
960
1154
|
self, name: str, team_id: str, max_retries: int = 3
|
|
961
1155
|
) -> dict | None:
|
|
962
|
-
"""Find a label by name using Linear API (server-side check) with retry logic.
|
|
1156
|
+
"""Find a label by name using Linear API (server-side check) with retry logic and pagination.
|
|
963
1157
|
|
|
964
1158
|
Handles cache staleness by checking Linear's server-side state.
|
|
965
1159
|
This method is used when cache lookup misses to prevent duplicate
|
|
@@ -969,6 +1163,10 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
969
1163
|
network failures and distinguish between "label not found" (None) and
|
|
970
1164
|
"check failed" (exception).
|
|
971
1165
|
|
|
1166
|
+
Uses cursor-based pagination with early exit optimization to handle
|
|
1167
|
+
teams with >250 labels efficiently. Stops searching as soon as the
|
|
1168
|
+
label is found.
|
|
1169
|
+
|
|
972
1170
|
Args:
|
|
973
1171
|
----
|
|
974
1172
|
name: Label name to search for (case-insensitive)
|
|
@@ -986,11 +1184,6 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
986
1184
|
(network/API failure). Caller must handle to prevent
|
|
987
1185
|
duplicate label creation.
|
|
988
1186
|
|
|
989
|
-
Note:
|
|
990
|
-
----
|
|
991
|
-
This method queries Linear's API and returns the first 250 labels.
|
|
992
|
-
For teams with >250 labels, pagination would be needed (future enhancement).
|
|
993
|
-
|
|
994
1187
|
Related:
|
|
995
1188
|
-------
|
|
996
1189
|
1M-443: Fix duplicate label error when setting existing labels
|
|
@@ -1000,15 +1193,19 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1000
1193
|
logger = logging.getLogger(__name__)
|
|
1001
1194
|
|
|
1002
1195
|
query = """
|
|
1003
|
-
query GetTeamLabels($teamId: String
|
|
1196
|
+
query GetTeamLabels($teamId: String!, $first: Int!, $after: String) {
|
|
1004
1197
|
team(id: $teamId) {
|
|
1005
|
-
labels(first:
|
|
1198
|
+
labels(first: $first, after: $after) {
|
|
1006
1199
|
nodes {
|
|
1007
1200
|
id
|
|
1008
1201
|
name
|
|
1009
1202
|
color
|
|
1010
1203
|
description
|
|
1011
1204
|
}
|
|
1205
|
+
pageInfo {
|
|
1206
|
+
hasNextPage
|
|
1207
|
+
endCursor
|
|
1208
|
+
}
|
|
1012
1209
|
}
|
|
1013
1210
|
}
|
|
1014
1211
|
}
|
|
@@ -1016,20 +1213,47 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1016
1213
|
|
|
1017
1214
|
for attempt in range(max_retries):
|
|
1018
1215
|
try:
|
|
1019
|
-
|
|
1020
|
-
labels = result.get("team", {}).get("labels", {}).get("nodes", [])
|
|
1021
|
-
|
|
1022
|
-
# Case-insensitive search
|
|
1216
|
+
# Search with pagination and early exit
|
|
1023
1217
|
name_lower = name.lower()
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1218
|
+
has_next_page = True
|
|
1219
|
+
after_cursor = None
|
|
1220
|
+
page_count = 0
|
|
1221
|
+
max_pages = 10 # Safety limit: 10 pages * 250 labels = 2500 labels max
|
|
1222
|
+
total_checked = 0
|
|
1223
|
+
|
|
1224
|
+
while has_next_page and page_count < max_pages:
|
|
1225
|
+
page_count += 1
|
|
1226
|
+
variables = {"teamId": team_id, "first": 250}
|
|
1227
|
+
if after_cursor:
|
|
1228
|
+
variables["after"] = after_cursor
|
|
1229
|
+
|
|
1230
|
+
result = await self.client.execute_query(query, variables)
|
|
1231
|
+
labels_data = result.get("team", {}).get("labels", {})
|
|
1232
|
+
page_labels = labels_data.get("nodes", [])
|
|
1233
|
+
page_info = labels_data.get("pageInfo", {})
|
|
1234
|
+
|
|
1235
|
+
total_checked += len(page_labels)
|
|
1236
|
+
|
|
1237
|
+
# Case-insensitive search in current page
|
|
1238
|
+
for label in page_labels:
|
|
1239
|
+
if label["name"].lower() == name_lower:
|
|
1240
|
+
logger.debug(
|
|
1241
|
+
f"Found label '{name}' via server-side search "
|
|
1242
|
+
f"(ID: {label['id']}, checked {total_checked} labels)"
|
|
1243
|
+
)
|
|
1244
|
+
return label
|
|
1245
|
+
|
|
1246
|
+
has_next_page = page_info.get("hasNextPage", False)
|
|
1247
|
+
after_cursor = page_info.get("endCursor")
|
|
1248
|
+
|
|
1249
|
+
if page_count >= max_pages and has_next_page:
|
|
1250
|
+
logger.warning(
|
|
1251
|
+
f"Reached max page limit ({max_pages}) searching for label '{name}'. "
|
|
1252
|
+
f"Checked {total_checked} labels, but more exist."
|
|
1253
|
+
)
|
|
1030
1254
|
|
|
1031
1255
|
# Label definitively doesn't exist (successful check)
|
|
1032
|
-
logger.debug(f"Label '{name}' not found in {
|
|
1256
|
+
logger.debug(f"Label '{name}' not found in {total_checked} team labels")
|
|
1033
1257
|
return None
|
|
1034
1258
|
|
|
1035
1259
|
except Exception as e:
|
|
@@ -1096,9 +1320,9 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1096
1320
|
created_label = result["issueLabelCreate"]["issueLabel"]
|
|
1097
1321
|
label_id = created_label["id"]
|
|
1098
1322
|
|
|
1099
|
-
#
|
|
1323
|
+
# Invalidate cache to force refresh on next access
|
|
1100
1324
|
if self._labels_cache is not None:
|
|
1101
|
-
self._labels_cache.
|
|
1325
|
+
await self._labels_cache.clear()
|
|
1102
1326
|
|
|
1103
1327
|
logger.info(f"Created new label '{name}' with ID: {label_id}")
|
|
1104
1328
|
return label_id
|
|
@@ -1122,26 +1346,76 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1122
1346
|
f"Duplicate label detected for '{name}', attempting recovery lookup"
|
|
1123
1347
|
)
|
|
1124
1348
|
|
|
1125
|
-
# Retry Tier 2:
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
label_id = server_label["id"]
|
|
1349
|
+
# Retry Tier 2 with backoff: API eventual consistency requires delay
|
|
1350
|
+
# Linear API has 100-500ms propagation delay between write and read
|
|
1351
|
+
max_recovery_attempts = 5
|
|
1352
|
+
backoff_delays = [0.1, 0.2, 0.5, 1.0, 1.5] # Total: 3.3s max
|
|
1130
1353
|
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1354
|
+
for attempt in range(max_recovery_attempts):
|
|
1355
|
+
try:
|
|
1356
|
+
if attempt > 0:
|
|
1357
|
+
# Wait before retry (skip delay on first attempt)
|
|
1358
|
+
delay = backoff_delays[
|
|
1359
|
+
min(attempt - 1, len(backoff_delays) - 1)
|
|
1360
|
+
]
|
|
1361
|
+
logger.debug(
|
|
1362
|
+
f"Label '{name}' duplicate detected. "
|
|
1363
|
+
f"Retrying retrieval (attempt {attempt + 1}/{max_recovery_attempts}) "
|
|
1364
|
+
f"after {delay}s delay for API propagation..."
|
|
1365
|
+
)
|
|
1366
|
+
await asyncio.sleep(delay)
|
|
1367
|
+
|
|
1368
|
+
# Query server for existing label
|
|
1369
|
+
server_label = await self._find_label_by_name(name, team_id)
|
|
1370
|
+
|
|
1371
|
+
if server_label:
|
|
1372
|
+
label_id = server_label["id"]
|
|
1373
|
+
|
|
1374
|
+
# Invalidate cache to force refresh on next access
|
|
1375
|
+
if self._labels_cache is not None:
|
|
1376
|
+
await self._labels_cache.clear()
|
|
1377
|
+
|
|
1378
|
+
logger.info(
|
|
1379
|
+
f"Successfully recovered existing label '{name}' (ID: {label_id}) "
|
|
1380
|
+
f"after {attempt + 1} attempt(s)"
|
|
1381
|
+
)
|
|
1382
|
+
return label_id
|
|
1383
|
+
|
|
1384
|
+
# Label still not found, log and continue to next retry
|
|
1385
|
+
logger.debug(
|
|
1386
|
+
f"Label '{name}' not found in recovery attempt {attempt + 1}/{max_recovery_attempts}"
|
|
1387
|
+
)
|
|
1134
1388
|
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
return label_id
|
|
1389
|
+
except Exception as lookup_error:
|
|
1390
|
+
logger.warning(
|
|
1391
|
+
f"Recovery lookup failed on attempt {attempt + 1}/{max_recovery_attempts}: {lookup_error}"
|
|
1392
|
+
)
|
|
1140
1393
|
|
|
1141
|
-
|
|
1394
|
+
# If this is the last attempt, raise with context
|
|
1395
|
+
if attempt == max_recovery_attempts - 1:
|
|
1396
|
+
raise ValueError(
|
|
1397
|
+
f"Failed to recover label '{name}' after {max_recovery_attempts} attempts. "
|
|
1398
|
+
f"Last error: {lookup_error}. This may indicate:\n"
|
|
1399
|
+
f" 1. Network connectivity issues\n"
|
|
1400
|
+
f" 2. API propagation delay >{sum(backoff_delays):.1f}s (very unusual)\n"
|
|
1401
|
+
f" 3. Label exists beyond first 250 labels in team\n"
|
|
1402
|
+
f" 4. Permissions issue preventing label query\n"
|
|
1403
|
+
f"Please retry the operation or check Linear workspace status."
|
|
1404
|
+
) from lookup_error
|
|
1405
|
+
|
|
1406
|
+
# Not the last attempt, continue to next retry
|
|
1407
|
+
continue
|
|
1408
|
+
|
|
1409
|
+
# If we get here, all recovery attempts failed (label never found, no exceptions)
|
|
1142
1410
|
raise ValueError(
|
|
1143
|
-
f"Label '{name}' already exists but could not retrieve ID
|
|
1144
|
-
f"
|
|
1411
|
+
f"Label '{name}' already exists but could not retrieve ID after "
|
|
1412
|
+
f"{max_recovery_attempts} attempts. The label query succeeded but returned no results.\n"
|
|
1413
|
+
f"This may indicate:\n"
|
|
1414
|
+
f" 1. API propagation delay >{sum(backoff_delays):.1f}s (very unusual)\n"
|
|
1415
|
+
f" 2. Label exists beyond first 250 labels in team\n"
|
|
1416
|
+
f" 3. Permissions issue preventing label query\n"
|
|
1417
|
+
f" 4. Team ID mismatch\n"
|
|
1418
|
+
f"Please retry the operation or check Linear workspace permissions."
|
|
1145
1419
|
) from e
|
|
1146
1420
|
|
|
1147
1421
|
# Not a duplicate error - re-raise original exception
|
|
@@ -1194,24 +1468,33 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1194
1468
|
if not label_names:
|
|
1195
1469
|
return []
|
|
1196
1470
|
|
|
1197
|
-
#
|
|
1198
|
-
|
|
1199
|
-
|
|
1471
|
+
# Get team ID for label operations
|
|
1472
|
+
team_id = await self._ensure_team_id()
|
|
1473
|
+
|
|
1474
|
+
# Validate team_id before loading labels
|
|
1475
|
+
if not team_id:
|
|
1476
|
+
raise ValueError(
|
|
1477
|
+
"Cannot resolve Linear labels without team_id. "
|
|
1478
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
1479
|
+
)
|
|
1480
|
+
|
|
1481
|
+
# Check cache for labels
|
|
1482
|
+
cache_key = f"linear_labels:{team_id}"
|
|
1483
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
1484
|
+
|
|
1485
|
+
# Load labels if not cached
|
|
1486
|
+
if cached_labels is None:
|
|
1200
1487
|
await self._load_team_labels(team_id)
|
|
1488
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
1201
1489
|
|
|
1202
|
-
if
|
|
1490
|
+
if not cached_labels:
|
|
1203
1491
|
logger.error(
|
|
1204
|
-
"Label cache is
|
|
1492
|
+
"Label cache is empty after load attempt. Tags will be skipped."
|
|
1205
1493
|
)
|
|
1206
1494
|
return []
|
|
1207
1495
|
|
|
1208
|
-
# Get team ID for creating new labels
|
|
1209
|
-
team_id = await self._ensure_team_id()
|
|
1210
|
-
|
|
1211
1496
|
# Create name -> ID mapping (case-insensitive)
|
|
1212
|
-
label_map = {
|
|
1213
|
-
label["name"].lower(): label["id"] for label in (self._labels_cache or [])
|
|
1214
|
-
}
|
|
1497
|
+
label_map = {label["name"].lower(): label["id"] for label in cached_labels}
|
|
1215
1498
|
|
|
1216
1499
|
logger.debug(f"Available labels in team: {list(label_map.keys())}")
|
|
1217
1500
|
|
|
@@ -1245,18 +1528,18 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1245
1528
|
) from e
|
|
1246
1529
|
|
|
1247
1530
|
if server_label:
|
|
1248
|
-
# Label exists on server but not in cache -
|
|
1531
|
+
# Label exists on server but not in cache - invalidate cache
|
|
1249
1532
|
label_id = server_label["id"]
|
|
1250
1533
|
label_ids.append(label_id)
|
|
1251
1534
|
label_map[name_lower] = label_id
|
|
1252
1535
|
|
|
1253
|
-
#
|
|
1536
|
+
# Invalidate cache to force refresh on next access
|
|
1254
1537
|
if self._labels_cache is not None:
|
|
1255
|
-
self._labels_cache.
|
|
1538
|
+
await self._labels_cache.clear()
|
|
1256
1539
|
|
|
1257
1540
|
logger.info(
|
|
1258
1541
|
f"[Tier 2] Found stale label '{name}' on server (ID: {label_id}), "
|
|
1259
|
-
"
|
|
1542
|
+
"invalidated cache for refresh"
|
|
1260
1543
|
)
|
|
1261
1544
|
else:
|
|
1262
1545
|
# Tier 3: Label truly doesn't exist - create it
|
|
@@ -1292,7 +1575,7 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1292
1575
|
|
|
1293
1576
|
Returns:
|
|
1294
1577
|
-------
|
|
1295
|
-
Dictionary mapping TicketState to Linear state ID
|
|
1578
|
+
Dictionary mapping TicketState to Linear state ID (UUID)
|
|
1296
1579
|
|
|
1297
1580
|
"""
|
|
1298
1581
|
if not self._workflow_states:
|
|
@@ -1309,13 +1592,18 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1309
1592
|
}
|
|
1310
1593
|
|
|
1311
1594
|
# Return ID-based mapping using cached workflow states
|
|
1595
|
+
# _workflow_states is keyed by universal_state.value (e.g., "open")
|
|
1596
|
+
# and contains state UUIDs directly
|
|
1312
1597
|
mapping = {}
|
|
1313
|
-
for universal_state
|
|
1314
|
-
|
|
1315
|
-
|
|
1598
|
+
for universal_state in TicketState:
|
|
1599
|
+
state_uuid = self._workflow_states.get(universal_state.value)
|
|
1600
|
+
if state_uuid:
|
|
1601
|
+
mapping[universal_state] = state_uuid
|
|
1316
1602
|
else:
|
|
1317
|
-
# Fallback to type name
|
|
1318
|
-
|
|
1603
|
+
# Fallback to type name if state not found in cache
|
|
1604
|
+
linear_type = LinearStateMapping.TO_LINEAR.get(universal_state)
|
|
1605
|
+
if linear_type:
|
|
1606
|
+
mapping[universal_state] = linear_type
|
|
1319
1607
|
|
|
1320
1608
|
return mapping
|
|
1321
1609
|
|
|
@@ -1419,6 +1707,13 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1419
1707
|
logger = logging.getLogger(__name__)
|
|
1420
1708
|
team_id = await self._ensure_team_id()
|
|
1421
1709
|
|
|
1710
|
+
# Validate team_id before creating issue
|
|
1711
|
+
if not team_id:
|
|
1712
|
+
raise ValueError(
|
|
1713
|
+
"Cannot create Linear issue without team_id. "
|
|
1714
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
1715
|
+
)
|
|
1716
|
+
|
|
1422
1717
|
# Build issue input using mapper
|
|
1423
1718
|
issue_input = build_linear_issue_input(task, team_id)
|
|
1424
1719
|
|
|
@@ -1529,6 +1824,22 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1529
1824
|
)
|
|
1530
1825
|
issue_input.pop("labelIds")
|
|
1531
1826
|
|
|
1827
|
+
# Debug logging: Log mutation input before execution for troubleshooting
|
|
1828
|
+
logger.debug(
|
|
1829
|
+
"Creating Linear issue with input: %s",
|
|
1830
|
+
{
|
|
1831
|
+
"title": task.title,
|
|
1832
|
+
"teamId": team_id,
|
|
1833
|
+
"projectId": issue_input.get("projectId"),
|
|
1834
|
+
"parentId": issue_input.get("parentId"),
|
|
1835
|
+
"stateId": issue_input.get("stateId"),
|
|
1836
|
+
"priority": issue_input.get("priority"),
|
|
1837
|
+
"labelIds": issue_input.get("labelIds"),
|
|
1838
|
+
"assigneeId": issue_input.get("assigneeId"),
|
|
1839
|
+
"hasDescription": bool(task.description),
|
|
1840
|
+
},
|
|
1841
|
+
)
|
|
1842
|
+
|
|
1532
1843
|
try:
|
|
1533
1844
|
result = await self.client.execute_mutation(
|
|
1534
1845
|
CREATE_ISSUE_MUTATION, {"input": issue_input}
|
|
@@ -1559,13 +1870,45 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1559
1870
|
"""
|
|
1560
1871
|
team_id = await self._ensure_team_id()
|
|
1561
1872
|
|
|
1873
|
+
# Validate team_id before creating teamIds array
|
|
1874
|
+
if not team_id:
|
|
1875
|
+
raise ValueError(
|
|
1876
|
+
"Cannot create Linear project without team_id. "
|
|
1877
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
1878
|
+
)
|
|
1879
|
+
|
|
1562
1880
|
project_input = {
|
|
1563
1881
|
"name": epic.title,
|
|
1564
1882
|
"teamIds": [team_id],
|
|
1565
1883
|
}
|
|
1566
1884
|
|
|
1567
1885
|
if epic.description:
|
|
1568
|
-
|
|
1886
|
+
# Validate description length (Linear limit: 255 chars for project description)
|
|
1887
|
+
# Matches validation in update_epic() for consistency
|
|
1888
|
+
from mcp_ticketer.core.validators import FieldValidator, ValidationError
|
|
1889
|
+
|
|
1890
|
+
try:
|
|
1891
|
+
validated_description = FieldValidator.validate_field(
|
|
1892
|
+
"linear", "epic_description", epic.description, truncate=False
|
|
1893
|
+
)
|
|
1894
|
+
project_input["description"] = validated_description
|
|
1895
|
+
except ValidationError as e:
|
|
1896
|
+
raise ValueError(
|
|
1897
|
+
f"Epic description validation failed: {e}. "
|
|
1898
|
+
f"Linear projects have a 255 character limit for descriptions. "
|
|
1899
|
+
f"Current length: {len(epic.description)} characters."
|
|
1900
|
+
) from e
|
|
1901
|
+
|
|
1902
|
+
# Debug logging: Log mutation input before execution for troubleshooting
|
|
1903
|
+
logging.getLogger(__name__).debug(
|
|
1904
|
+
"Creating Linear project with input: %s",
|
|
1905
|
+
{
|
|
1906
|
+
"name": epic.title,
|
|
1907
|
+
"teamIds": [team_id],
|
|
1908
|
+
"hasDescription": bool(project_input.get("description")),
|
|
1909
|
+
"leadId": project_input.get("leadId"),
|
|
1910
|
+
},
|
|
1911
|
+
)
|
|
1569
1912
|
|
|
1570
1913
|
# Create project mutation
|
|
1571
1914
|
create_query = """
|
|
@@ -1677,6 +2020,20 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1677
2020
|
if "icon" in updates:
|
|
1678
2021
|
update_input["icon"] = updates["icon"]
|
|
1679
2022
|
|
|
2023
|
+
# Debug logging: Log mutation input before execution for troubleshooting
|
|
2024
|
+
logging.getLogger(__name__).debug(
|
|
2025
|
+
"Updating Linear project %s with input: %s",
|
|
2026
|
+
epic_id,
|
|
2027
|
+
{
|
|
2028
|
+
"name": update_input.get("name"),
|
|
2029
|
+
"hasDescription": bool(update_input.get("description")),
|
|
2030
|
+
"state": update_input.get("state"),
|
|
2031
|
+
"targetDate": update_input.get("targetDate"),
|
|
2032
|
+
"color": update_input.get("color"),
|
|
2033
|
+
"icon": update_input.get("icon"),
|
|
2034
|
+
},
|
|
2035
|
+
)
|
|
2036
|
+
|
|
1680
2037
|
# ProjectUpdate mutation
|
|
1681
2038
|
update_query = """
|
|
1682
2039
|
mutation UpdateProject($id: String!, $input: ProjectUpdateInput!) {
|
|
@@ -1972,19 +2329,35 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1972
2329
|
return False
|
|
1973
2330
|
|
|
1974
2331
|
async def list(
|
|
1975
|
-
self,
|
|
1976
|
-
|
|
1977
|
-
|
|
2332
|
+
self,
|
|
2333
|
+
limit: int = 20,
|
|
2334
|
+
offset: int = 0,
|
|
2335
|
+
filters: dict[str, Any] | None = None,
|
|
2336
|
+
compact: bool = False,
|
|
2337
|
+
) -> dict[str, Any] | builtins.list[Task]:
|
|
2338
|
+
"""List Linear issues with optional filtering and compact output.
|
|
1978
2339
|
|
|
1979
2340
|
Args:
|
|
1980
2341
|
----
|
|
1981
|
-
limit: Maximum number of issues to return
|
|
2342
|
+
limit: Maximum number of issues to return (default: 20, max: 100)
|
|
1982
2343
|
offset: Number of issues to skip (Note: Linear uses cursor-based pagination)
|
|
1983
2344
|
filters: Optional filters (state, assignee, priority, etc.)
|
|
2345
|
+
compact: Return compact format for token efficiency (default: False for backward compatibility)
|
|
1984
2346
|
|
|
1985
2347
|
Returns:
|
|
1986
2348
|
-------
|
|
1987
|
-
|
|
2349
|
+
When compact=True: Dictionary with items and pagination metadata
|
|
2350
|
+
When compact=False: List of Task objects (backward compatible, default)
|
|
2351
|
+
|
|
2352
|
+
Design Decision: Backward Compatible Default (1M-554)
|
|
2353
|
+
------------------------------------------------------
|
|
2354
|
+
Rationale: Backward compatibility prioritized to avoid breaking existing code.
|
|
2355
|
+
Compact mode available via explicit compact=True for new code.
|
|
2356
|
+
|
|
2357
|
+
Default compact=False maintains existing return type (list[Task]).
|
|
2358
|
+
Users can opt-in to compact mode for 77% token reduction.
|
|
2359
|
+
|
|
2360
|
+
Recommended: Use compact=True for new code to reduce token usage by ~77%.
|
|
1988
2361
|
|
|
1989
2362
|
"""
|
|
1990
2363
|
# Validate credentials
|
|
@@ -1995,6 +2368,17 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
1995
2368
|
await self.initialize()
|
|
1996
2369
|
team_id = await self._ensure_team_id()
|
|
1997
2370
|
|
|
2371
|
+
# Validate team_id before filtering
|
|
2372
|
+
if not team_id:
|
|
2373
|
+
raise ValueError(
|
|
2374
|
+
"Cannot list Linear issues without team_id. "
|
|
2375
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
2376
|
+
)
|
|
2377
|
+
|
|
2378
|
+
# Enforce maximum limit to prevent excessive responses
|
|
2379
|
+
if limit > 100:
|
|
2380
|
+
limit = 100
|
|
2381
|
+
|
|
1998
2382
|
# Build issue filter
|
|
1999
2383
|
issue_filter = build_issue_filter(
|
|
2000
2384
|
team_id=team_id,
|
|
@@ -2034,6 +2418,24 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
2034
2418
|
for issue in result["issues"]["nodes"]:
|
|
2035
2419
|
tasks.append(map_linear_issue_to_task(issue))
|
|
2036
2420
|
|
|
2421
|
+
# Return compact format with pagination metadata
|
|
2422
|
+
if compact:
|
|
2423
|
+
from .mappers import task_to_compact_format
|
|
2424
|
+
|
|
2425
|
+
compact_items = [task_to_compact_format(task) for task in tasks]
|
|
2426
|
+
return {
|
|
2427
|
+
"status": "success",
|
|
2428
|
+
"items": compact_items,
|
|
2429
|
+
"pagination": {
|
|
2430
|
+
"total_returned": len(compact_items),
|
|
2431
|
+
"limit": limit,
|
|
2432
|
+
"offset": offset,
|
|
2433
|
+
"has_more": len(tasks)
|
|
2434
|
+
== limit, # Heuristic: full page likely means more
|
|
2435
|
+
},
|
|
2436
|
+
}
|
|
2437
|
+
|
|
2438
|
+
# Backward compatible: return list of Task objects
|
|
2037
2439
|
return tasks
|
|
2038
2440
|
|
|
2039
2441
|
except Exception as e:
|
|
@@ -2059,6 +2461,13 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
2059
2461
|
await self.initialize()
|
|
2060
2462
|
team_id = await self._ensure_team_id()
|
|
2061
2463
|
|
|
2464
|
+
# Validate team_id before searching
|
|
2465
|
+
if not team_id:
|
|
2466
|
+
raise ValueError(
|
|
2467
|
+
"Cannot search Linear issues without team_id. "
|
|
2468
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
2469
|
+
)
|
|
2470
|
+
|
|
2062
2471
|
# Build comprehensive issue filter
|
|
2063
2472
|
issue_filter = {"team": {"id": {"eq": team_id}}}
|
|
2064
2473
|
|
|
@@ -2302,13 +2711,26 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
2302
2711
|
List of label dictionaries with 'id', 'name', and 'color' fields
|
|
2303
2712
|
|
|
2304
2713
|
"""
|
|
2305
|
-
#
|
|
2306
|
-
|
|
2307
|
-
|
|
2714
|
+
# Get team ID for label operations
|
|
2715
|
+
team_id = await self._ensure_team_id()
|
|
2716
|
+
# Validate team_id before loading labels
|
|
2717
|
+
if not team_id:
|
|
2718
|
+
raise ValueError(
|
|
2719
|
+
"Cannot list Linear labels without team_id. "
|
|
2720
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
2721
|
+
)
|
|
2722
|
+
|
|
2723
|
+
# Check cache for labels
|
|
2724
|
+
cache_key = f"linear_labels:{team_id}"
|
|
2725
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
2726
|
+
|
|
2727
|
+
# Load labels if not cached
|
|
2728
|
+
if cached_labels is None:
|
|
2308
2729
|
await self._load_team_labels(team_id)
|
|
2730
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
2309
2731
|
|
|
2310
2732
|
# Return cached labels or empty list if not available
|
|
2311
|
-
if not
|
|
2733
|
+
if not cached_labels:
|
|
2312
2734
|
return []
|
|
2313
2735
|
|
|
2314
2736
|
# Transform to standardized format
|
|
@@ -2318,9 +2740,19 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
2318
2740
|
"name": label["name"],
|
|
2319
2741
|
"color": label.get("color", ""),
|
|
2320
2742
|
}
|
|
2321
|
-
for label in
|
|
2743
|
+
for label in cached_labels
|
|
2322
2744
|
]
|
|
2323
2745
|
|
|
2746
|
+
async def invalidate_label_cache(self) -> None:
|
|
2747
|
+
"""Manually invalidate the label cache.
|
|
2748
|
+
|
|
2749
|
+
Useful when labels are modified externally or after creating new labels.
|
|
2750
|
+
The cache will be automatically refreshed on the next label operation.
|
|
2751
|
+
|
|
2752
|
+
"""
|
|
2753
|
+
if self._labels_cache is not None:
|
|
2754
|
+
await self._labels_cache.clear()
|
|
2755
|
+
|
|
2324
2756
|
async def upload_file(self, file_path: str, mime_type: str | None = None) -> str:
|
|
2325
2757
|
"""Upload a file to Linear's storage and return the asset URL.
|
|
2326
2758
|
|
|
@@ -2762,6 +3194,13 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
2762
3194
|
if team_id is None:
|
|
2763
3195
|
team_id = await self._ensure_team_id()
|
|
2764
3196
|
|
|
3197
|
+
# Validate team_id before listing cycles
|
|
3198
|
+
if not team_id:
|
|
3199
|
+
raise ValueError(
|
|
3200
|
+
"Cannot list Linear cycles without team_id. "
|
|
3201
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
3202
|
+
)
|
|
3203
|
+
|
|
2765
3204
|
try:
|
|
2766
3205
|
# Fetch all cycles with pagination
|
|
2767
3206
|
all_cycles: list[dict[str, Any]] = []
|
|
@@ -2876,6 +3315,13 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
2876
3315
|
if team_id is None:
|
|
2877
3316
|
team_id = await self._ensure_team_id()
|
|
2878
3317
|
|
|
3318
|
+
# Validate team_id before listing statuses
|
|
3319
|
+
if not team_id:
|
|
3320
|
+
raise ValueError(
|
|
3321
|
+
"Cannot list Linear issue statuses without team_id. "
|
|
3322
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
3323
|
+
)
|
|
3324
|
+
|
|
2879
3325
|
try:
|
|
2880
3326
|
result = await self.client.execute_query(
|
|
2881
3327
|
LIST_ISSUE_STATUSES_QUERY, {"teamId": team_id}
|
|
@@ -2894,30 +3340,40 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
2894
3340
|
|
|
2895
3341
|
async def list_epics(
|
|
2896
3342
|
self,
|
|
2897
|
-
limit: int =
|
|
3343
|
+
limit: int = 20,
|
|
2898
3344
|
offset: int = 0,
|
|
2899
3345
|
state: str | None = None,
|
|
2900
3346
|
include_completed: bool = True,
|
|
3347
|
+
compact: bool = False,
|
|
2901
3348
|
**kwargs: Any,
|
|
2902
|
-
) -> builtins.list[Epic]:
|
|
2903
|
-
"""List Linear projects (epics) with efficient pagination.
|
|
3349
|
+
) -> dict[str, Any] | builtins.list[Epic]:
|
|
3350
|
+
"""List Linear projects (epics) with efficient pagination and compact output.
|
|
2904
3351
|
|
|
2905
3352
|
Args:
|
|
2906
3353
|
----
|
|
2907
|
-
limit: Maximum number of projects to return (default:
|
|
3354
|
+
limit: Maximum number of projects to return (default: 20, max: 100)
|
|
2908
3355
|
offset: Number of projects to skip (note: Linear uses cursor-based pagination)
|
|
2909
3356
|
state: Filter by project state (e.g., "planned", "started", "completed", "canceled")
|
|
2910
3357
|
include_completed: Whether to include completed projects (default: True)
|
|
3358
|
+
compact: Return compact format for token efficiency (default: False for backward compatibility)
|
|
2911
3359
|
**kwargs: Additional filter parameters (reserved for future use)
|
|
2912
3360
|
|
|
2913
3361
|
Returns:
|
|
2914
3362
|
-------
|
|
2915
|
-
|
|
3363
|
+
When compact=True: Dictionary with items and pagination metadata
|
|
3364
|
+
When compact=False: List of Epic objects (backward compatible, default)
|
|
2916
3365
|
|
|
2917
3366
|
Raises:
|
|
2918
3367
|
------
|
|
2919
3368
|
ValueError: If credentials are invalid or query fails
|
|
2920
3369
|
|
|
3370
|
+
Design Decision: Backward Compatible with Opt-in Compact Mode (1M-554)
|
|
3371
|
+
----------------------------------------------------------------------
|
|
3372
|
+
Rationale: Reduced default limit from 50 to 20 to match list() behavior.
|
|
3373
|
+
Compact mode provides ~77% token reduction when explicitly enabled.
|
|
3374
|
+
|
|
3375
|
+
Recommended: Use compact=True for new code to reduce token usage.
|
|
3376
|
+
|
|
2921
3377
|
"""
|
|
2922
3378
|
# Validate credentials
|
|
2923
3379
|
is_valid, error_message = self.validate_credentials()
|
|
@@ -2927,6 +3383,17 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
2927
3383
|
await self.initialize()
|
|
2928
3384
|
team_id = await self._ensure_team_id()
|
|
2929
3385
|
|
|
3386
|
+
# Validate team_id before listing projects
|
|
3387
|
+
if not team_id:
|
|
3388
|
+
raise ValueError(
|
|
3389
|
+
"Cannot list Linear projects without team_id. "
|
|
3390
|
+
"Ensure LINEAR_TEAM_KEY is configured correctly."
|
|
3391
|
+
)
|
|
3392
|
+
|
|
3393
|
+
# Enforce maximum limit to prevent excessive responses
|
|
3394
|
+
if limit > 100:
|
|
3395
|
+
limit = 100
|
|
3396
|
+
|
|
2930
3397
|
# Build project filter using existing helper
|
|
2931
3398
|
from .types import build_project_filter
|
|
2932
3399
|
|
|
@@ -2976,6 +3443,23 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
2976
3443
|
for project in paginated_projects:
|
|
2977
3444
|
epics.append(map_linear_project_to_epic(project))
|
|
2978
3445
|
|
|
3446
|
+
# Return compact format with pagination metadata
|
|
3447
|
+
if compact:
|
|
3448
|
+
from .mappers import epic_to_compact_format
|
|
3449
|
+
|
|
3450
|
+
compact_items = [epic_to_compact_format(epic) for epic in epics]
|
|
3451
|
+
return {
|
|
3452
|
+
"status": "success",
|
|
3453
|
+
"items": compact_items,
|
|
3454
|
+
"pagination": {
|
|
3455
|
+
"total_returned": len(compact_items),
|
|
3456
|
+
"limit": limit,
|
|
3457
|
+
"offset": offset,
|
|
3458
|
+
"has_more": has_next_page, # Use actual Linear pagination status
|
|
3459
|
+
},
|
|
3460
|
+
}
|
|
3461
|
+
|
|
3462
|
+
# Backward compatible: return list of Epic objects
|
|
2979
3463
|
return epics
|
|
2980
3464
|
|
|
2981
3465
|
except Exception as e:
|
|
@@ -3244,6 +3728,430 @@ class LinearAdapter(BaseAdapter[Task]):
|
|
|
3244
3728
|
logger.error(f"Failed to get project update {update_id}: {e}")
|
|
3245
3729
|
raise ValueError(f"Failed to get project update '{update_id}': {e}") from e
|
|
3246
3730
|
|
|
3731
|
+
# Milestone Operations (1M-607 Phase 2: Linear Adapter Integration)
|
|
3732
|
+
|
|
3733
|
+
async def milestone_create(
|
|
3734
|
+
self,
|
|
3735
|
+
name: str,
|
|
3736
|
+
target_date: datetime | None = None,
|
|
3737
|
+
labels: list[str] | None = None,
|
|
3738
|
+
description: str = "",
|
|
3739
|
+
project_id: str | None = None,
|
|
3740
|
+
) -> Milestone:
|
|
3741
|
+
"""Create milestone using Linear Cycles.
|
|
3742
|
+
|
|
3743
|
+
Linear Cycles require start and end dates. If target_date is provided,
|
|
3744
|
+
set startsAt to today and endsAt to target_date. If no target_date,
|
|
3745
|
+
defaults to a 2-week cycle.
|
|
3746
|
+
|
|
3747
|
+
Args:
|
|
3748
|
+
----
|
|
3749
|
+
name: Milestone name
|
|
3750
|
+
target_date: Target completion date (optional)
|
|
3751
|
+
labels: Labels for milestone grouping (optional, stored in metadata)
|
|
3752
|
+
description: Milestone description
|
|
3753
|
+
project_id: Associated project ID (optional)
|
|
3754
|
+
|
|
3755
|
+
Returns:
|
|
3756
|
+
-------
|
|
3757
|
+
Created Milestone object
|
|
3758
|
+
|
|
3759
|
+
Raises:
|
|
3760
|
+
------
|
|
3761
|
+
ValueError: If credentials invalid or creation fails
|
|
3762
|
+
|
|
3763
|
+
"""
|
|
3764
|
+
logger = logging.getLogger(__name__)
|
|
3765
|
+
|
|
3766
|
+
# Validate credentials
|
|
3767
|
+
is_valid, error_message = self.validate_credentials()
|
|
3768
|
+
if not is_valid:
|
|
3769
|
+
raise ValueError(error_message)
|
|
3770
|
+
|
|
3771
|
+
await self.initialize()
|
|
3772
|
+
team_id = await self._ensure_team_id()
|
|
3773
|
+
|
|
3774
|
+
# Linear requires both start and end dates for cycles
|
|
3775
|
+
from datetime import timedelta, timezone
|
|
3776
|
+
|
|
3777
|
+
starts_at = datetime.now(timezone.utc)
|
|
3778
|
+
if target_date:
|
|
3779
|
+
ends_at = target_date
|
|
3780
|
+
# Ensure ends_at has timezone info
|
|
3781
|
+
if ends_at.tzinfo is None:
|
|
3782
|
+
ends_at = ends_at.replace(tzinfo=timezone.utc)
|
|
3783
|
+
else:
|
|
3784
|
+
# Default to 2 weeks from now
|
|
3785
|
+
ends_at = starts_at + timedelta(days=14)
|
|
3786
|
+
|
|
3787
|
+
try:
|
|
3788
|
+
result = await self.client.execute_query(
|
|
3789
|
+
CREATE_CYCLE_MUTATION,
|
|
3790
|
+
{
|
|
3791
|
+
"input": {
|
|
3792
|
+
"name": name,
|
|
3793
|
+
"description": description,
|
|
3794
|
+
"startsAt": starts_at.isoformat(),
|
|
3795
|
+
"endsAt": ends_at.isoformat(),
|
|
3796
|
+
"teamId": team_id,
|
|
3797
|
+
}
|
|
3798
|
+
},
|
|
3799
|
+
)
|
|
3800
|
+
|
|
3801
|
+
if not result.get("cycleCreate", {}).get("success"):
|
|
3802
|
+
raise ValueError("Failed to create cycle")
|
|
3803
|
+
|
|
3804
|
+
cycle_data = result["cycleCreate"]["cycle"]
|
|
3805
|
+
logger.info(
|
|
3806
|
+
f"Created Linear cycle {cycle_data['id']} for milestone '{name}'"
|
|
3807
|
+
)
|
|
3808
|
+
|
|
3809
|
+
# Convert Linear Cycle to Milestone model
|
|
3810
|
+
return self._cycle_to_milestone(cycle_data, labels)
|
|
3811
|
+
|
|
3812
|
+
except Exception as e:
|
|
3813
|
+
logger.error(f"Failed to create milestone '{name}': {e}")
|
|
3814
|
+
raise ValueError(f"Failed to create milestone: {e}") from e
|
|
3815
|
+
|
|
3816
|
+
async def milestone_get(self, milestone_id: str) -> Milestone | None:
|
|
3817
|
+
"""Get milestone by ID with progress calculation.
|
|
3818
|
+
|
|
3819
|
+
Args:
|
|
3820
|
+
----
|
|
3821
|
+
milestone_id: Milestone/Cycle identifier
|
|
3822
|
+
|
|
3823
|
+
Returns:
|
|
3824
|
+
-------
|
|
3825
|
+
Milestone object with calculated progress, None if not found
|
|
3826
|
+
|
|
3827
|
+
"""
|
|
3828
|
+
logger = logging.getLogger(__name__)
|
|
3829
|
+
|
|
3830
|
+
# Validate credentials
|
|
3831
|
+
is_valid, error_message = self.validate_credentials()
|
|
3832
|
+
if not is_valid:
|
|
3833
|
+
raise ValueError(error_message)
|
|
3834
|
+
|
|
3835
|
+
await self.initialize()
|
|
3836
|
+
|
|
3837
|
+
try:
|
|
3838
|
+
result = await self.client.execute_query(
|
|
3839
|
+
GET_CYCLE_QUERY, {"id": milestone_id}
|
|
3840
|
+
)
|
|
3841
|
+
|
|
3842
|
+
cycle_data = result.get("cycle")
|
|
3843
|
+
if not cycle_data:
|
|
3844
|
+
logger.debug(f"Cycle {milestone_id} not found")
|
|
3845
|
+
return None
|
|
3846
|
+
|
|
3847
|
+
return self._cycle_to_milestone(cycle_data)
|
|
3848
|
+
|
|
3849
|
+
except Exception as e:
|
|
3850
|
+
logger.warning(f"Failed to get milestone {milestone_id}: {e}")
|
|
3851
|
+
return None
|
|
3852
|
+
|
|
3853
|
+
async def milestone_list(
|
|
3854
|
+
self,
|
|
3855
|
+
project_id: str | None = None,
|
|
3856
|
+
state: str | None = None,
|
|
3857
|
+
) -> list[Milestone]:
|
|
3858
|
+
"""List milestones using Linear Cycles.
|
|
3859
|
+
|
|
3860
|
+
Args:
|
|
3861
|
+
----
|
|
3862
|
+
project_id: Filter by project (not used by Linear Cycles)
|
|
3863
|
+
state: Filter by state (open, active, completed, closed)
|
|
3864
|
+
|
|
3865
|
+
Returns:
|
|
3866
|
+
-------
|
|
3867
|
+
List of Milestone objects
|
|
3868
|
+
|
|
3869
|
+
"""
|
|
3870
|
+
logger = logging.getLogger(__name__)
|
|
3871
|
+
|
|
3872
|
+
# Validate credentials
|
|
3873
|
+
is_valid, error_message = self.validate_credentials()
|
|
3874
|
+
if not is_valid:
|
|
3875
|
+
raise ValueError(error_message)
|
|
3876
|
+
|
|
3877
|
+
await self.initialize()
|
|
3878
|
+
team_id = await self._ensure_team_id()
|
|
3879
|
+
|
|
3880
|
+
try:
|
|
3881
|
+
result = await self.client.execute_query(
|
|
3882
|
+
LIST_CYCLES_QUERY,
|
|
3883
|
+
{"teamId": team_id, "first": 50, "after": None},
|
|
3884
|
+
)
|
|
3885
|
+
|
|
3886
|
+
cycles = result.get("team", {}).get("cycles", {}).get("nodes", [])
|
|
3887
|
+
milestones = [self._cycle_to_milestone(cycle) for cycle in cycles]
|
|
3888
|
+
|
|
3889
|
+
# Apply state filter if provided
|
|
3890
|
+
if state:
|
|
3891
|
+
milestones = [m for m in milestones if m.state == state]
|
|
3892
|
+
|
|
3893
|
+
logger.debug(f"Listed {len(milestones)} milestones (state={state})")
|
|
3894
|
+
return milestones
|
|
3895
|
+
|
|
3896
|
+
except Exception as e:
|
|
3897
|
+
logger.error(f"Failed to list milestones: {e}")
|
|
3898
|
+
return []
|
|
3899
|
+
|
|
3900
|
+
async def milestone_update(
|
|
3901
|
+
self,
|
|
3902
|
+
milestone_id: str,
|
|
3903
|
+
name: str | None = None,
|
|
3904
|
+
target_date: datetime | None = None,
|
|
3905
|
+
state: str | None = None,
|
|
3906
|
+
labels: list[str] | None = None,
|
|
3907
|
+
description: str | None = None,
|
|
3908
|
+
) -> Milestone | None:
|
|
3909
|
+
"""Update milestone properties.
|
|
3910
|
+
|
|
3911
|
+
Args:
|
|
3912
|
+
----
|
|
3913
|
+
milestone_id: Milestone identifier
|
|
3914
|
+
name: New name (optional)
|
|
3915
|
+
target_date: New target date (optional)
|
|
3916
|
+
state: New state (optional)
|
|
3917
|
+
labels: New labels (optional, stored in metadata)
|
|
3918
|
+
description: New description (optional)
|
|
3919
|
+
|
|
3920
|
+
Returns:
|
|
3921
|
+
-------
|
|
3922
|
+
Updated Milestone object, None if not found
|
|
3923
|
+
|
|
3924
|
+
"""
|
|
3925
|
+
logger = logging.getLogger(__name__)
|
|
3926
|
+
|
|
3927
|
+
# Validate credentials
|
|
3928
|
+
is_valid, error_message = self.validate_credentials()
|
|
3929
|
+
if not is_valid:
|
|
3930
|
+
raise ValueError(error_message)
|
|
3931
|
+
|
|
3932
|
+
await self.initialize()
|
|
3933
|
+
|
|
3934
|
+
# Build update input
|
|
3935
|
+
update_input = {}
|
|
3936
|
+
if name:
|
|
3937
|
+
update_input["name"] = name
|
|
3938
|
+
if description is not None:
|
|
3939
|
+
update_input["description"] = description
|
|
3940
|
+
if target_date:
|
|
3941
|
+
from datetime import timezone
|
|
3942
|
+
|
|
3943
|
+
# Ensure target_date has timezone
|
|
3944
|
+
if target_date.tzinfo is None:
|
|
3945
|
+
target_date = target_date.replace(tzinfo=timezone.utc)
|
|
3946
|
+
update_input["endsAt"] = target_date.isoformat()
|
|
3947
|
+
if state == "completed":
|
|
3948
|
+
# Mark cycle as completed
|
|
3949
|
+
from datetime import datetime, timezone
|
|
3950
|
+
|
|
3951
|
+
update_input["completedAt"] = datetime.now(timezone.utc).isoformat()
|
|
3952
|
+
|
|
3953
|
+
if not update_input:
|
|
3954
|
+
# No updates provided, just return current milestone
|
|
3955
|
+
return await self.milestone_get(milestone_id)
|
|
3956
|
+
|
|
3957
|
+
try:
|
|
3958
|
+
result = await self.client.execute_query(
|
|
3959
|
+
UPDATE_CYCLE_MUTATION,
|
|
3960
|
+
{"id": milestone_id, "input": update_input},
|
|
3961
|
+
)
|
|
3962
|
+
|
|
3963
|
+
if not result.get("cycleUpdate", {}).get("success"):
|
|
3964
|
+
logger.warning(f"Failed to update cycle {milestone_id}")
|
|
3965
|
+
return None
|
|
3966
|
+
|
|
3967
|
+
cycle_data = result["cycleUpdate"]["cycle"]
|
|
3968
|
+
logger.info(f"Updated Linear cycle {milestone_id}")
|
|
3969
|
+
|
|
3970
|
+
return self._cycle_to_milestone(cycle_data, labels)
|
|
3971
|
+
|
|
3972
|
+
except Exception as e:
|
|
3973
|
+
logger.error(f"Failed to update milestone {milestone_id}: {e}")
|
|
3974
|
+
return None
|
|
3975
|
+
|
|
3976
|
+
async def milestone_delete(self, milestone_id: str) -> bool:
|
|
3977
|
+
"""Delete (archive) milestone.
|
|
3978
|
+
|
|
3979
|
+
Linear doesn't support permanent cycle deletion, so this archives the cycle.
|
|
3980
|
+
|
|
3981
|
+
Args:
|
|
3982
|
+
----
|
|
3983
|
+
milestone_id: Milestone identifier
|
|
3984
|
+
|
|
3985
|
+
Returns:
|
|
3986
|
+
-------
|
|
3987
|
+
True if deleted successfully, False otherwise
|
|
3988
|
+
|
|
3989
|
+
"""
|
|
3990
|
+
logger = logging.getLogger(__name__)
|
|
3991
|
+
|
|
3992
|
+
# Validate credentials
|
|
3993
|
+
is_valid, error_message = self.validate_credentials()
|
|
3994
|
+
if not is_valid:
|
|
3995
|
+
raise ValueError(error_message)
|
|
3996
|
+
|
|
3997
|
+
await self.initialize()
|
|
3998
|
+
|
|
3999
|
+
try:
|
|
4000
|
+
result = await self.client.execute_query(
|
|
4001
|
+
ARCHIVE_CYCLE_MUTATION, {"id": milestone_id}
|
|
4002
|
+
)
|
|
4003
|
+
|
|
4004
|
+
success = result.get("cycleArchive", {}).get("success", False)
|
|
4005
|
+
if success:
|
|
4006
|
+
logger.info(f"Archived Linear cycle {milestone_id}")
|
|
4007
|
+
else:
|
|
4008
|
+
logger.warning(f"Failed to archive cycle {milestone_id}")
|
|
4009
|
+
|
|
4010
|
+
return success
|
|
4011
|
+
|
|
4012
|
+
except Exception as e:
|
|
4013
|
+
logger.error(f"Failed to delete milestone {milestone_id}: {e}")
|
|
4014
|
+
return False
|
|
4015
|
+
|
|
4016
|
+
async def milestone_get_issues(
|
|
4017
|
+
self,
|
|
4018
|
+
milestone_id: str,
|
|
4019
|
+
state: str | None = None,
|
|
4020
|
+
) -> list[Task]:
|
|
4021
|
+
"""Get issues associated with milestone (cycle).
|
|
4022
|
+
|
|
4023
|
+
Args:
|
|
4024
|
+
----
|
|
4025
|
+
milestone_id: Milestone identifier
|
|
4026
|
+
state: Filter by issue state (optional)
|
|
4027
|
+
|
|
4028
|
+
Returns:
|
|
4029
|
+
-------
|
|
4030
|
+
List of Task objects in the milestone
|
|
4031
|
+
|
|
4032
|
+
"""
|
|
4033
|
+
logger = logging.getLogger(__name__)
|
|
4034
|
+
|
|
4035
|
+
# Validate credentials
|
|
4036
|
+
is_valid, error_message = self.validate_credentials()
|
|
4037
|
+
if not is_valid:
|
|
4038
|
+
raise ValueError(error_message)
|
|
4039
|
+
|
|
4040
|
+
await self.initialize()
|
|
4041
|
+
|
|
4042
|
+
try:
|
|
4043
|
+
result = await self.client.execute_query(
|
|
4044
|
+
GET_CYCLE_ISSUES_QUERY, {"cycleId": milestone_id, "first": 100}
|
|
4045
|
+
)
|
|
4046
|
+
|
|
4047
|
+
cycle_data = result.get("cycle")
|
|
4048
|
+
if not cycle_data:
|
|
4049
|
+
logger.warning(f"Cycle {milestone_id} not found")
|
|
4050
|
+
return []
|
|
4051
|
+
|
|
4052
|
+
issues = cycle_data.get("issues", {}).get("nodes", [])
|
|
4053
|
+
|
|
4054
|
+
# Convert Linear issues to Task objects
|
|
4055
|
+
tasks = [map_linear_issue_to_task(issue) for issue in issues]
|
|
4056
|
+
|
|
4057
|
+
# Filter by state if provided
|
|
4058
|
+
if state:
|
|
4059
|
+
state_filter = TicketState(state) if state else None
|
|
4060
|
+
tasks = [t for t in tasks if t.state == state_filter]
|
|
4061
|
+
|
|
4062
|
+
logger.debug(f"Retrieved {len(tasks)} issues from milestone {milestone_id}")
|
|
4063
|
+
return tasks
|
|
4064
|
+
|
|
4065
|
+
except Exception as e:
|
|
4066
|
+
logger.error(f"Failed to get milestone issues {milestone_id}: {e}")
|
|
4067
|
+
return []
|
|
4068
|
+
|
|
4069
|
+
def _cycle_to_milestone(
|
|
4070
|
+
self,
|
|
4071
|
+
cycle_data: dict[str, Any],
|
|
4072
|
+
labels: list[str] | None = None,
|
|
4073
|
+
) -> Milestone:
|
|
4074
|
+
"""Convert Linear Cycle to universal Milestone model.
|
|
4075
|
+
|
|
4076
|
+
Determines state based on dates:
|
|
4077
|
+
- completed: Has completedAt timestamp
|
|
4078
|
+
- closed: Past end date without completion
|
|
4079
|
+
- active: Current date between start and end
|
|
4080
|
+
- open: Before start date
|
|
4081
|
+
|
|
4082
|
+
Args:
|
|
4083
|
+
----
|
|
4084
|
+
cycle_data: Linear Cycle data from GraphQL
|
|
4085
|
+
labels: Optional labels to associate with milestone
|
|
4086
|
+
|
|
4087
|
+
Returns:
|
|
4088
|
+
-------
|
|
4089
|
+
Milestone object
|
|
4090
|
+
|
|
4091
|
+
"""
|
|
4092
|
+
from datetime import datetime, timezone
|
|
4093
|
+
|
|
4094
|
+
# Determine state from dates
|
|
4095
|
+
now = datetime.now(timezone.utc)
|
|
4096
|
+
|
|
4097
|
+
# Parse dates
|
|
4098
|
+
starts_at_str = cycle_data.get("startsAt")
|
|
4099
|
+
ends_at_str = cycle_data.get("endsAt")
|
|
4100
|
+
completed_at_str = cycle_data.get("completedAt")
|
|
4101
|
+
|
|
4102
|
+
starts_at = (
|
|
4103
|
+
datetime.fromisoformat(starts_at_str.replace("Z", "+00:00"))
|
|
4104
|
+
if starts_at_str
|
|
4105
|
+
else None
|
|
4106
|
+
)
|
|
4107
|
+
ends_at = (
|
|
4108
|
+
datetime.fromisoformat(ends_at_str.replace("Z", "+00:00"))
|
|
4109
|
+
if ends_at_str
|
|
4110
|
+
else None
|
|
4111
|
+
)
|
|
4112
|
+
completed_at = (
|
|
4113
|
+
datetime.fromisoformat(completed_at_str.replace("Z", "+00:00"))
|
|
4114
|
+
if completed_at_str
|
|
4115
|
+
else None
|
|
4116
|
+
)
|
|
4117
|
+
|
|
4118
|
+
# Determine state
|
|
4119
|
+
if completed_at:
|
|
4120
|
+
state = "completed"
|
|
4121
|
+
elif ends_at and now > ends_at:
|
|
4122
|
+
state = "closed" # Past due without completion
|
|
4123
|
+
elif starts_at and ends_at and starts_at <= now <= ends_at:
|
|
4124
|
+
state = "active"
|
|
4125
|
+
else:
|
|
4126
|
+
state = "open" # Before start date
|
|
4127
|
+
|
|
4128
|
+
# Parse progress (Linear uses 0.0-1.0, we use 0-100)
|
|
4129
|
+
progress = cycle_data.get("progress", 0.0)
|
|
4130
|
+
progress_pct = progress * 100.0
|
|
4131
|
+
|
|
4132
|
+
return Milestone(
|
|
4133
|
+
id=cycle_data["id"],
|
|
4134
|
+
name=cycle_data["name"],
|
|
4135
|
+
description=cycle_data.get("description", ""),
|
|
4136
|
+
target_date=ends_at,
|
|
4137
|
+
state=state,
|
|
4138
|
+
labels=labels or [],
|
|
4139
|
+
total_issues=cycle_data.get("issueCount", 0),
|
|
4140
|
+
closed_issues=cycle_data.get("completedIssueCount", 0),
|
|
4141
|
+
progress_pct=progress_pct,
|
|
4142
|
+
created_at=None, # Linear doesn't provide creation timestamp for cycles
|
|
4143
|
+
updated_at=None,
|
|
4144
|
+
platform_data={
|
|
4145
|
+
"linear": {
|
|
4146
|
+
"cycle_id": cycle_data["id"],
|
|
4147
|
+
"starts_at": starts_at_str,
|
|
4148
|
+
"ends_at": ends_at_str,
|
|
4149
|
+
"completed_at": completed_at_str,
|
|
4150
|
+
"team": cycle_data.get("team"),
|
|
4151
|
+
}
|
|
4152
|
+
},
|
|
4153
|
+
)
|
|
4154
|
+
|
|
3247
4155
|
async def close(self) -> None:
|
|
3248
4156
|
"""Close the adapter and clean up resources."""
|
|
3249
4157
|
await self.client.close()
|