mcp-ticketer 0.12.0__py3-none-any.whl → 2.2.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__init__.py +10 -10
- mcp_ticketer/__version__.py +1 -1
- mcp_ticketer/_version_scm.py +1 -0
- mcp_ticketer/adapters/aitrackdown.py +507 -6
- mcp_ticketer/adapters/asana/adapter.py +229 -0
- mcp_ticketer/adapters/asana/mappers.py +14 -0
- mcp_ticketer/adapters/github/__init__.py +26 -0
- mcp_ticketer/adapters/github/adapter.py +3229 -0
- mcp_ticketer/adapters/github/client.py +335 -0
- mcp_ticketer/adapters/github/mappers.py +797 -0
- mcp_ticketer/adapters/github/queries.py +692 -0
- mcp_ticketer/adapters/github/types.py +460 -0
- mcp_ticketer/adapters/hybrid.py +47 -5
- mcp_ticketer/adapters/jira/__init__.py +35 -0
- mcp_ticketer/adapters/jira/adapter.py +1351 -0
- mcp_ticketer/adapters/jira/client.py +271 -0
- mcp_ticketer/adapters/jira/mappers.py +246 -0
- mcp_ticketer/adapters/jira/queries.py +216 -0
- mcp_ticketer/adapters/jira/types.py +304 -0
- mcp_ticketer/adapters/linear/adapter.py +2730 -139
- mcp_ticketer/adapters/linear/client.py +175 -3
- mcp_ticketer/adapters/linear/mappers.py +203 -8
- mcp_ticketer/adapters/linear/queries.py +280 -3
- mcp_ticketer/adapters/linear/types.py +120 -4
- mcp_ticketer/analysis/__init__.py +56 -0
- mcp_ticketer/analysis/dependency_graph.py +255 -0
- mcp_ticketer/analysis/health_assessment.py +304 -0
- mcp_ticketer/analysis/orphaned.py +218 -0
- mcp_ticketer/analysis/project_status.py +594 -0
- mcp_ticketer/analysis/similarity.py +224 -0
- mcp_ticketer/analysis/staleness.py +266 -0
- mcp_ticketer/automation/__init__.py +11 -0
- mcp_ticketer/automation/project_updates.py +378 -0
- mcp_ticketer/cli/adapter_diagnostics.py +3 -1
- mcp_ticketer/cli/auggie_configure.py +17 -5
- mcp_ticketer/cli/codex_configure.py +97 -61
- mcp_ticketer/cli/configure.py +1288 -105
- mcp_ticketer/cli/cursor_configure.py +314 -0
- mcp_ticketer/cli/diagnostics.py +13 -12
- mcp_ticketer/cli/discover.py +5 -0
- mcp_ticketer/cli/gemini_configure.py +17 -5
- mcp_ticketer/cli/init_command.py +880 -0
- mcp_ticketer/cli/install_mcp_server.py +418 -0
- mcp_ticketer/cli/instruction_commands.py +6 -0
- mcp_ticketer/cli/main.py +267 -3175
- mcp_ticketer/cli/mcp_configure.py +821 -119
- mcp_ticketer/cli/mcp_server_commands.py +415 -0
- mcp_ticketer/cli/platform_detection.py +77 -12
- mcp_ticketer/cli/platform_installer.py +545 -0
- mcp_ticketer/cli/project_update_commands.py +350 -0
- mcp_ticketer/cli/setup_command.py +795 -0
- mcp_ticketer/cli/simple_health.py +12 -10
- mcp_ticketer/cli/ticket_commands.py +705 -103
- mcp_ticketer/cli/utils.py +113 -0
- mcp_ticketer/core/__init__.py +56 -6
- mcp_ticketer/core/adapter.py +533 -2
- mcp_ticketer/core/config.py +21 -21
- mcp_ticketer/core/exceptions.py +7 -1
- mcp_ticketer/core/label_manager.py +732 -0
- mcp_ticketer/core/mappers.py +31 -19
- mcp_ticketer/core/milestone_manager.py +252 -0
- mcp_ticketer/core/models.py +480 -0
- mcp_ticketer/core/onepassword_secrets.py +1 -1
- mcp_ticketer/core/priority_matcher.py +463 -0
- mcp_ticketer/core/project_config.py +132 -14
- mcp_ticketer/core/project_utils.py +281 -0
- mcp_ticketer/core/project_validator.py +376 -0
- mcp_ticketer/core/session_state.py +176 -0
- mcp_ticketer/core/state_matcher.py +625 -0
- mcp_ticketer/core/url_parser.py +425 -0
- mcp_ticketer/core/validators.py +69 -0
- mcp_ticketer/mcp/server/__main__.py +2 -1
- mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
- mcp_ticketer/mcp/server/main.py +106 -25
- mcp_ticketer/mcp/server/routing.py +723 -0
- mcp_ticketer/mcp/server/server_sdk.py +58 -0
- mcp_ticketer/mcp/server/tools/__init__.py +33 -11
- mcp_ticketer/mcp/server/tools/analysis_tools.py +854 -0
- mcp_ticketer/mcp/server/tools/attachment_tools.py +5 -5
- mcp_ticketer/mcp/server/tools/bulk_tools.py +259 -202
- mcp_ticketer/mcp/server/tools/comment_tools.py +74 -12
- mcp_ticketer/mcp/server/tools/config_tools.py +1391 -145
- mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
- mcp_ticketer/mcp/server/tools/hierarchy_tools.py +870 -460
- mcp_ticketer/mcp/server/tools/instruction_tools.py +7 -5
- mcp_ticketer/mcp/server/tools/label_tools.py +942 -0
- mcp_ticketer/mcp/server/tools/milestone_tools.py +338 -0
- mcp_ticketer/mcp/server/tools/pr_tools.py +3 -7
- mcp_ticketer/mcp/server/tools/project_status_tools.py +158 -0
- mcp_ticketer/mcp/server/tools/project_update_tools.py +473 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +209 -97
- mcp_ticketer/mcp/server/tools/session_tools.py +308 -0
- mcp_ticketer/mcp/server/tools/ticket_tools.py +1107 -124
- mcp_ticketer/mcp/server/tools/user_ticket_tools.py +218 -236
- mcp_ticketer/queue/queue.py +68 -0
- mcp_ticketer/queue/worker.py +1 -1
- mcp_ticketer/utils/__init__.py +5 -0
- mcp_ticketer/utils/token_utils.py +246 -0
- mcp_ticketer-2.2.13.dist-info/METADATA +1396 -0
- mcp_ticketer-2.2.13.dist-info/RECORD +158 -0
- mcp_ticketer-2.2.13.dist-info/top_level.txt +2 -0
- py_mcp_installer/examples/phase3_demo.py +178 -0
- py_mcp_installer/scripts/manage_version.py +54 -0
- py_mcp_installer/setup.py +6 -0
- py_mcp_installer/src/py_mcp_installer/__init__.py +153 -0
- py_mcp_installer/src/py_mcp_installer/command_builder.py +445 -0
- py_mcp_installer/src/py_mcp_installer/config_manager.py +541 -0
- py_mcp_installer/src/py_mcp_installer/exceptions.py +243 -0
- py_mcp_installer/src/py_mcp_installer/installation_strategy.py +617 -0
- py_mcp_installer/src/py_mcp_installer/installer.py +656 -0
- py_mcp_installer/src/py_mcp_installer/mcp_inspector.py +750 -0
- py_mcp_installer/src/py_mcp_installer/platform_detector.py +451 -0
- py_mcp_installer/src/py_mcp_installer/platforms/__init__.py +26 -0
- py_mcp_installer/src/py_mcp_installer/platforms/claude_code.py +225 -0
- py_mcp_installer/src/py_mcp_installer/platforms/codex.py +181 -0
- py_mcp_installer/src/py_mcp_installer/platforms/cursor.py +191 -0
- py_mcp_installer/src/py_mcp_installer/types.py +222 -0
- py_mcp_installer/src/py_mcp_installer/utils.py +463 -0
- py_mcp_installer/tests/__init__.py +0 -0
- py_mcp_installer/tests/platforms/__init__.py +0 -0
- py_mcp_installer/tests/test_platform_detector.py +17 -0
- mcp_ticketer/adapters/github.py +0 -1574
- mcp_ticketer/adapters/jira.py +0 -1258
- mcp_ticketer-0.12.0.dist-info/METADATA +0 -550
- mcp_ticketer-0.12.0.dist-info/RECORD +0 -91
- mcp_ticketer-0.12.0.dist-info/top_level.txt +0 -1
- {mcp_ticketer-0.12.0.dist-info → mcp_ticketer-2.2.13.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.12.0.dist-info → mcp_ticketer-2.2.13.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.12.0.dist-info → mcp_ticketer-2.2.13.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,3229 @@
|
|
|
1
|
+
"""GitHub adapter implementation using REST API v3 and GraphQL API v4."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import builtins
|
|
6
|
+
import logging
|
|
7
|
+
import re
|
|
8
|
+
from datetime import date, datetime
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
import httpx
|
|
13
|
+
|
|
14
|
+
from ...cache.memory import MemoryCache
|
|
15
|
+
from ...core.adapter import BaseAdapter
|
|
16
|
+
from ...core.env_loader import load_adapter_config, validate_adapter_config
|
|
17
|
+
from ...core.models import (
|
|
18
|
+
Comment,
|
|
19
|
+
Epic,
|
|
20
|
+
Milestone,
|
|
21
|
+
Priority,
|
|
22
|
+
Project,
|
|
23
|
+
ProjectScope,
|
|
24
|
+
ProjectState,
|
|
25
|
+
ProjectStatistics,
|
|
26
|
+
SearchQuery,
|
|
27
|
+
Task,
|
|
28
|
+
TicketState,
|
|
29
|
+
)
|
|
30
|
+
from ...core.registry import AdapterRegistry
|
|
31
|
+
from .client import GitHubClient
|
|
32
|
+
from .mappers import (
|
|
33
|
+
map_github_issue_to_task,
|
|
34
|
+
map_github_milestone_to_epic,
|
|
35
|
+
map_github_milestone_to_milestone,
|
|
36
|
+
map_github_projectv2_to_project,
|
|
37
|
+
)
|
|
38
|
+
from .queries import (
|
|
39
|
+
CREATE_PROJECT_MUTATION,
|
|
40
|
+
DELETE_PROJECT_MUTATION,
|
|
41
|
+
GET_PROJECT_BY_ID_QUERY,
|
|
42
|
+
GET_PROJECT_ITERATIONS,
|
|
43
|
+
GET_PROJECT_QUERY,
|
|
44
|
+
ISSUE_FRAGMENT,
|
|
45
|
+
LIST_PROJECTS_QUERY,
|
|
46
|
+
SEARCH_ISSUES,
|
|
47
|
+
UPDATE_PROJECT_MUTATION,
|
|
48
|
+
)
|
|
49
|
+
from .types import (
|
|
50
|
+
GitHubStateMapping,
|
|
51
|
+
extract_state_from_issue,
|
|
52
|
+
get_github_state,
|
|
53
|
+
get_priority_from_labels,
|
|
54
|
+
get_priority_label,
|
|
55
|
+
get_state_label,
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
logger = logging.getLogger(__name__)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class GitHubAdapter(BaseAdapter[Task]):
|
|
62
|
+
"""Adapter for GitHub Issues tracking system."""
|
|
63
|
+
|
|
64
|
+
def __init__(self, config: dict[str, Any]):
|
|
65
|
+
"""Initialize GitHub adapter.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
----
|
|
69
|
+
config: Configuration with:
|
|
70
|
+
- token: GitHub PAT (or GITHUB_TOKEN env var)
|
|
71
|
+
- owner: Repository owner (or GITHUB_OWNER env var)
|
|
72
|
+
- repo: Repository name (or GITHUB_REPO env var)
|
|
73
|
+
- api_url: Optional API URL for GitHub Enterprise
|
|
74
|
+
- use_projects_v2: Enable Projects v2 (default: False)
|
|
75
|
+
- custom_priority_scheme: Custom priority label mapping
|
|
76
|
+
- labels_ttl: Label cache TTL in seconds (default: 300.0)
|
|
77
|
+
|
|
78
|
+
"""
|
|
79
|
+
super().__init__(config)
|
|
80
|
+
|
|
81
|
+
# Load configuration with environment variable resolution
|
|
82
|
+
full_config = load_adapter_config("github", config)
|
|
83
|
+
|
|
84
|
+
# Validate required configuration
|
|
85
|
+
missing_keys = validate_adapter_config("github", full_config)
|
|
86
|
+
if missing_keys:
|
|
87
|
+
missing = ", ".join(missing_keys)
|
|
88
|
+
raise ValueError(
|
|
89
|
+
f"GitHub adapter missing required configuration: {missing}"
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
# Get authentication token - support 'api_key' and 'token'
|
|
93
|
+
self.token = (
|
|
94
|
+
full_config.get("api_key")
|
|
95
|
+
or full_config.get("token")
|
|
96
|
+
or full_config.get("token")
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
# Get repository information
|
|
100
|
+
self.owner = full_config.get("owner")
|
|
101
|
+
self.repo = full_config.get("repo")
|
|
102
|
+
|
|
103
|
+
# API URLs
|
|
104
|
+
self.api_url = config.get("api_url", "https://api.github.com")
|
|
105
|
+
self.graphql_url = (
|
|
106
|
+
f"{self.api_url}/graphql"
|
|
107
|
+
if "github.com" in self.api_url
|
|
108
|
+
else f"{self.api_url}/api/graphql"
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
# Configuration options
|
|
112
|
+
self.use_projects_v2 = config.get("use_projects_v2", False)
|
|
113
|
+
self.custom_priority_scheme = config.get("custom_priority_scheme", {})
|
|
114
|
+
|
|
115
|
+
# Initialize GitHub API client
|
|
116
|
+
self.gh_client = GitHubClient(
|
|
117
|
+
token=self.token,
|
|
118
|
+
owner=self.owner,
|
|
119
|
+
repo=self.repo,
|
|
120
|
+
api_url=self.api_url,
|
|
121
|
+
timeout=30.0,
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
# Keep legacy client reference for backward compatibility
|
|
125
|
+
# TODO: Gradually migrate all direct self.client usage to self.gh_client
|
|
126
|
+
self.client = self.gh_client.client
|
|
127
|
+
self.headers = self.gh_client.headers
|
|
128
|
+
self.graphql_url = self.gh_client.graphql_url
|
|
129
|
+
|
|
130
|
+
# Initialize TTL-based cache
|
|
131
|
+
self._labels_ttl = config.get("labels_ttl", 300.0) # 5 min default
|
|
132
|
+
self._labels_cache = MemoryCache(default_ttl=self._labels_ttl)
|
|
133
|
+
self._milestones_cache: list[dict[str, Any]] | None = None
|
|
134
|
+
|
|
135
|
+
def validate_credentials(self) -> tuple[bool, str]:
|
|
136
|
+
"""Validate that required credentials are present.
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
-------
|
|
140
|
+
(is_valid, error_message) - Tuple of validation result and error message
|
|
141
|
+
|
|
142
|
+
"""
|
|
143
|
+
if not self.token:
|
|
144
|
+
return (
|
|
145
|
+
False,
|
|
146
|
+
"GITHUB_TOKEN is required. Set it in .env.local or environment.",
|
|
147
|
+
)
|
|
148
|
+
if not self.owner:
|
|
149
|
+
return (
|
|
150
|
+
False,
|
|
151
|
+
"GitHub owner is required. Set GITHUB_OWNER in .env.local "
|
|
152
|
+
"or configure with 'mcp-ticketer init --adapter github "
|
|
153
|
+
"--github-owner <owner>'",
|
|
154
|
+
)
|
|
155
|
+
if not self.repo:
|
|
156
|
+
return (
|
|
157
|
+
False,
|
|
158
|
+
"GitHub repo is required. Set GITHUB_REPO in .env.local "
|
|
159
|
+
"or configure with 'mcp-ticketer init --adapter github "
|
|
160
|
+
"--github-repo <repo>'",
|
|
161
|
+
)
|
|
162
|
+
return True, ""
|
|
163
|
+
|
|
164
|
+
def _get_state_mapping(self) -> dict[TicketState, str]:
|
|
165
|
+
"""Map universal states to GitHub states (delegated to types module)."""
|
|
166
|
+
return {state: get_github_state(state) for state in TicketState}
|
|
167
|
+
|
|
168
|
+
def _get_state_label(self, state: TicketState) -> str | None:
|
|
169
|
+
"""Get the label name for extended states (delegated to types module)."""
|
|
170
|
+
return get_state_label(state)
|
|
171
|
+
|
|
172
|
+
def _get_priority_from_labels(self, labels: list[str]) -> Priority:
|
|
173
|
+
"""Extract priority from issue labels (delegated to types module)."""
|
|
174
|
+
return get_priority_from_labels(labels, self.custom_priority_scheme)
|
|
175
|
+
|
|
176
|
+
def _get_priority_label(self, priority: Priority) -> str:
|
|
177
|
+
"""Get label name for a priority level (delegated to types module)."""
|
|
178
|
+
return get_priority_label(priority, self.custom_priority_scheme)
|
|
179
|
+
|
|
180
|
+
def _milestone_to_epic(self, milestone: dict[str, Any]) -> Epic:
|
|
181
|
+
"""Convert GitHub milestone to Epic model (delegated to mappers module)."""
|
|
182
|
+
return map_github_milestone_to_epic(milestone)
|
|
183
|
+
|
|
184
|
+
def _extract_state_from_issue(self, issue: dict[str, Any]) -> TicketState:
|
|
185
|
+
"""Extract ticket state from GitHub issue data (delegated to types module)."""
|
|
186
|
+
return extract_state_from_issue(issue)
|
|
187
|
+
|
|
188
|
+
def _task_from_github_issue(self, issue: dict[str, Any]) -> Task:
|
|
189
|
+
"""Convert GitHub issue to universal Task (delegated to mappers module)."""
|
|
190
|
+
return map_github_issue_to_task(issue, self.custom_priority_scheme)
|
|
191
|
+
|
|
192
|
+
async def _ensure_label_exists(
|
|
193
|
+
self, label_name: str, color: str = "0366d6"
|
|
194
|
+
) -> None:
|
|
195
|
+
"""Ensure a label exists in the repository."""
|
|
196
|
+
cache_key = "github_labels"
|
|
197
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
198
|
+
|
|
199
|
+
if cached_labels is None:
|
|
200
|
+
response = await self.client.get(f"/repos/{self.owner}/{self.repo}/labels")
|
|
201
|
+
response.raise_for_status()
|
|
202
|
+
cached_labels = response.json()
|
|
203
|
+
await self._labels_cache.set(cache_key, cached_labels)
|
|
204
|
+
|
|
205
|
+
# Check if label exists
|
|
206
|
+
existing_labels = [label["name"].lower() for label in cached_labels]
|
|
207
|
+
if label_name.lower() not in existing_labels:
|
|
208
|
+
# Create the label
|
|
209
|
+
response = await self.client.post(
|
|
210
|
+
f"/repos/{self.owner}/{self.repo}/labels",
|
|
211
|
+
json={"name": label_name, "color": color},
|
|
212
|
+
)
|
|
213
|
+
if response.status_code == 201:
|
|
214
|
+
cached_labels.append(response.json())
|
|
215
|
+
await self._labels_cache.set(cache_key, cached_labels)
|
|
216
|
+
|
|
217
|
+
async def _graphql_request(
|
|
218
|
+
self, query: str, variables: dict[str, Any]
|
|
219
|
+
) -> dict[str, Any]:
|
|
220
|
+
"""Execute a GraphQL query."""
|
|
221
|
+
response = await self.client.post(
|
|
222
|
+
self.graphql_url, json={"query": query, "variables": variables}
|
|
223
|
+
)
|
|
224
|
+
response.raise_for_status()
|
|
225
|
+
|
|
226
|
+
data = response.json()
|
|
227
|
+
if "errors" in data:
|
|
228
|
+
raise ValueError(f"GraphQL errors: {data['errors']}")
|
|
229
|
+
|
|
230
|
+
return data["data"]
|
|
231
|
+
|
|
232
|
+
async def create(self, ticket: Task) -> Task:
|
|
233
|
+
"""Create a new GitHub issue."""
|
|
234
|
+
# Validate credentials before attempting operation
|
|
235
|
+
is_valid, error_message = self.validate_credentials()
|
|
236
|
+
if not is_valid:
|
|
237
|
+
raise ValueError(error_message)
|
|
238
|
+
|
|
239
|
+
# Prepare labels
|
|
240
|
+
labels = ticket.tags.copy() if ticket.tags else []
|
|
241
|
+
|
|
242
|
+
# Add state label if needed
|
|
243
|
+
state_label = self._get_state_label(ticket.state)
|
|
244
|
+
if state_label:
|
|
245
|
+
labels.append(state_label)
|
|
246
|
+
await self._ensure_label_exists(state_label, "fbca04")
|
|
247
|
+
|
|
248
|
+
# Add priority label
|
|
249
|
+
priority_label = self._get_priority_label(ticket.priority)
|
|
250
|
+
labels.append(priority_label)
|
|
251
|
+
await self._ensure_label_exists(priority_label, "d73a4a")
|
|
252
|
+
|
|
253
|
+
# Ensure all labels exist
|
|
254
|
+
for label in labels:
|
|
255
|
+
await self._ensure_label_exists(label)
|
|
256
|
+
|
|
257
|
+
# Build issue data
|
|
258
|
+
issue_data = {
|
|
259
|
+
"title": ticket.title,
|
|
260
|
+
"body": ticket.description or "",
|
|
261
|
+
"labels": labels,
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
# Add assignee if specified
|
|
265
|
+
if ticket.assignee:
|
|
266
|
+
issue_data["assignees"] = [ticket.assignee]
|
|
267
|
+
|
|
268
|
+
# Add milestone if parent_epic is specified
|
|
269
|
+
if ticket.parent_epic:
|
|
270
|
+
try:
|
|
271
|
+
milestone_number = int(ticket.parent_epic)
|
|
272
|
+
issue_data["milestone"] = milestone_number
|
|
273
|
+
except ValueError:
|
|
274
|
+
# Try to find milestone by title
|
|
275
|
+
if not self._milestones_cache:
|
|
276
|
+
response = await self.client.get(
|
|
277
|
+
f"/repos/{self.owner}/{self.repo}/milestones"
|
|
278
|
+
)
|
|
279
|
+
response.raise_for_status()
|
|
280
|
+
self._milestones_cache = response.json()
|
|
281
|
+
|
|
282
|
+
for milestone in self._milestones_cache:
|
|
283
|
+
if milestone["title"] == ticket.parent_epic:
|
|
284
|
+
issue_data["milestone"] = milestone["number"]
|
|
285
|
+
break
|
|
286
|
+
|
|
287
|
+
# Create the issue
|
|
288
|
+
response = await self.client.post(
|
|
289
|
+
f"/repos/{self.owner}/{self.repo}/issues", json=issue_data
|
|
290
|
+
)
|
|
291
|
+
response.raise_for_status()
|
|
292
|
+
|
|
293
|
+
created_issue = response.json()
|
|
294
|
+
|
|
295
|
+
# If state requires closing, close the issue
|
|
296
|
+
if ticket.state in [TicketState.DONE, TicketState.CLOSED]:
|
|
297
|
+
await self.client.patch(
|
|
298
|
+
f"/repos/{self.owner}/{self.repo}/issues/{created_issue['number']}",
|
|
299
|
+
json={"state": "closed"},
|
|
300
|
+
)
|
|
301
|
+
created_issue["state"] = "closed"
|
|
302
|
+
|
|
303
|
+
return self._task_from_github_issue(created_issue)
|
|
304
|
+
|
|
305
|
+
async def read(self, ticket_id: str) -> Task | Epic | None:
|
|
306
|
+
"""Read a GitHub issue OR milestone by number with unified find.
|
|
307
|
+
|
|
308
|
+
Tries to find the entity in the following order:
|
|
309
|
+
1. Issue (most common case) - returns Task
|
|
310
|
+
2. Milestone (project/epic) - returns Epic
|
|
311
|
+
|
|
312
|
+
Args:
|
|
313
|
+
----
|
|
314
|
+
ticket_id: GitHub issue number or milestone number (as string)
|
|
315
|
+
|
|
316
|
+
Returns:
|
|
317
|
+
-------
|
|
318
|
+
Task if issue found,
|
|
319
|
+
Epic if milestone found,
|
|
320
|
+
None if not found as either type
|
|
321
|
+
|
|
322
|
+
Examples:
|
|
323
|
+
--------
|
|
324
|
+
>>> # Read issue #123
|
|
325
|
+
>>> task = await adapter.read("123")
|
|
326
|
+
>>> isinstance(task, Task) # True
|
|
327
|
+
>>>
|
|
328
|
+
>>> # Read milestone #5
|
|
329
|
+
>>> epic = await adapter.read("5")
|
|
330
|
+
>>> isinstance(epic, Epic) # True (if 5 is milestone, not issue)
|
|
331
|
+
|
|
332
|
+
"""
|
|
333
|
+
# Validate credentials before attempting operation
|
|
334
|
+
is_valid, error_message = self.validate_credentials()
|
|
335
|
+
if not is_valid:
|
|
336
|
+
raise ValueError(error_message)
|
|
337
|
+
|
|
338
|
+
try:
|
|
339
|
+
entity_number = int(ticket_id)
|
|
340
|
+
except ValueError:
|
|
341
|
+
return None
|
|
342
|
+
|
|
343
|
+
# Try reading as Issue first (most common case)
|
|
344
|
+
try:
|
|
345
|
+
response = await self.client.get(
|
|
346
|
+
f"/repos/{self.owner}/{self.repo}/issues/{entity_number}"
|
|
347
|
+
)
|
|
348
|
+
if response.status_code == 200:
|
|
349
|
+
response.raise_for_status()
|
|
350
|
+
issue = response.json()
|
|
351
|
+
logger.debug(f"Found GitHub entity as Issue: {ticket_id}")
|
|
352
|
+
return self._task_from_github_issue(issue)
|
|
353
|
+
elif response.status_code == 404:
|
|
354
|
+
# Not found as issue, will try milestone next
|
|
355
|
+
logger.debug(f"Not found as Issue ({ticket_id}), trying Milestone")
|
|
356
|
+
except httpx.HTTPError as e:
|
|
357
|
+
logger.debug(f"Error reading as Issue ({ticket_id}): {e}")
|
|
358
|
+
|
|
359
|
+
# Try reading as Milestone (Epic)
|
|
360
|
+
try:
|
|
361
|
+
milestone = await self.get_milestone(entity_number)
|
|
362
|
+
if milestone:
|
|
363
|
+
logger.debug(f"Found GitHub entity as Milestone: {ticket_id}")
|
|
364
|
+
return milestone
|
|
365
|
+
except Exception as e:
|
|
366
|
+
logger.debug(f"Error reading as Milestone ({ticket_id}): {e}")
|
|
367
|
+
|
|
368
|
+
# Not found as either Issue or Milestone
|
|
369
|
+
logger.warning(f"GitHub entity not found: {ticket_id}")
|
|
370
|
+
return None
|
|
371
|
+
|
|
372
|
+
async def update(self, ticket_id: str, updates: dict[str, Any]) -> Task | None:
|
|
373
|
+
"""Update a GitHub issue."""
|
|
374
|
+
# Validate credentials before attempting operation
|
|
375
|
+
is_valid, error_message = self.validate_credentials()
|
|
376
|
+
if not is_valid:
|
|
377
|
+
raise ValueError(error_message)
|
|
378
|
+
|
|
379
|
+
try:
|
|
380
|
+
issue_number = int(ticket_id)
|
|
381
|
+
except ValueError:
|
|
382
|
+
return None
|
|
383
|
+
|
|
384
|
+
# Get current issue to preserve labels
|
|
385
|
+
response = await self.client.get(
|
|
386
|
+
f"/repos/{self.owner}/{self.repo}/issues/{issue_number}"
|
|
387
|
+
)
|
|
388
|
+
if response.status_code == 404:
|
|
389
|
+
return None
|
|
390
|
+
response.raise_for_status()
|
|
391
|
+
|
|
392
|
+
current_issue = response.json()
|
|
393
|
+
current_labels = [label["name"] for label in current_issue.get("labels", [])]
|
|
394
|
+
|
|
395
|
+
# Build update data
|
|
396
|
+
update_data = {}
|
|
397
|
+
|
|
398
|
+
if "title" in updates:
|
|
399
|
+
update_data["title"] = updates["title"]
|
|
400
|
+
|
|
401
|
+
if "description" in updates:
|
|
402
|
+
update_data["body"] = updates["description"]
|
|
403
|
+
|
|
404
|
+
# Handle state updates
|
|
405
|
+
if "state" in updates:
|
|
406
|
+
new_state = updates["state"]
|
|
407
|
+
if isinstance(new_state, str):
|
|
408
|
+
new_state = TicketState(new_state)
|
|
409
|
+
|
|
410
|
+
# Remove old state labels
|
|
411
|
+
labels_to_update = [
|
|
412
|
+
label
|
|
413
|
+
for label in current_labels
|
|
414
|
+
if label.lower()
|
|
415
|
+
not in [sl.lower() for sl in GitHubStateMapping.STATE_LABELS.values()]
|
|
416
|
+
]
|
|
417
|
+
|
|
418
|
+
# Add new state label if needed
|
|
419
|
+
state_label = self._get_state_label(new_state)
|
|
420
|
+
if state_label:
|
|
421
|
+
await self._ensure_label_exists(state_label, "fbca04")
|
|
422
|
+
labels_to_update.append(state_label)
|
|
423
|
+
|
|
424
|
+
update_data["labels"] = labels_to_update
|
|
425
|
+
|
|
426
|
+
# Update issue state if needed
|
|
427
|
+
if new_state in [TicketState.DONE, TicketState.CLOSED]:
|
|
428
|
+
update_data["state"] = "closed"
|
|
429
|
+
else:
|
|
430
|
+
update_data["state"] = "open"
|
|
431
|
+
|
|
432
|
+
# Handle priority updates
|
|
433
|
+
if "priority" in updates:
|
|
434
|
+
new_priority = updates["priority"]
|
|
435
|
+
if isinstance(new_priority, str):
|
|
436
|
+
new_priority = Priority(new_priority)
|
|
437
|
+
|
|
438
|
+
# Remove old priority labels
|
|
439
|
+
labels_to_update = update_data.get("labels", current_labels)
|
|
440
|
+
all_priority_labels = []
|
|
441
|
+
for labels in GitHubStateMapping.PRIORITY_LABELS.values():
|
|
442
|
+
all_priority_labels.extend([label.lower() for label in labels])
|
|
443
|
+
|
|
444
|
+
labels_to_update = [
|
|
445
|
+
label
|
|
446
|
+
for label in labels_to_update
|
|
447
|
+
if label.lower() not in all_priority_labels
|
|
448
|
+
and not re.match(r"^P[0-3]$", label, re.IGNORECASE)
|
|
449
|
+
]
|
|
450
|
+
|
|
451
|
+
# Add new priority label
|
|
452
|
+
priority_label = self._get_priority_label(new_priority)
|
|
453
|
+
await self._ensure_label_exists(priority_label, "d73a4a")
|
|
454
|
+
labels_to_update.append(priority_label)
|
|
455
|
+
|
|
456
|
+
update_data["labels"] = labels_to_update
|
|
457
|
+
|
|
458
|
+
# Handle assignee updates
|
|
459
|
+
if "assignee" in updates:
|
|
460
|
+
if updates["assignee"]:
|
|
461
|
+
update_data["assignees"] = [updates["assignee"]]
|
|
462
|
+
else:
|
|
463
|
+
update_data["assignees"] = []
|
|
464
|
+
|
|
465
|
+
# Handle tags updates
|
|
466
|
+
if "tags" in updates:
|
|
467
|
+
# Preserve state and priority labels
|
|
468
|
+
preserved_labels = []
|
|
469
|
+
for label in current_labels:
|
|
470
|
+
if label.lower() in [
|
|
471
|
+
sl.lower() for sl in GitHubStateMapping.STATE_LABELS.values()
|
|
472
|
+
]:
|
|
473
|
+
preserved_labels.append(label)
|
|
474
|
+
elif any(
|
|
475
|
+
label.lower() in [pl.lower() for pl in labels]
|
|
476
|
+
for labels in GitHubStateMapping.PRIORITY_LABELS.values()
|
|
477
|
+
):
|
|
478
|
+
preserved_labels.append(label)
|
|
479
|
+
elif re.match(r"^P[0-3]$", label, re.IGNORECASE):
|
|
480
|
+
preserved_labels.append(label)
|
|
481
|
+
|
|
482
|
+
# Add new tags
|
|
483
|
+
for tag in updates["tags"]:
|
|
484
|
+
await self._ensure_label_exists(tag)
|
|
485
|
+
|
|
486
|
+
update_data["labels"] = preserved_labels + updates["tags"]
|
|
487
|
+
|
|
488
|
+
# Apply updates
|
|
489
|
+
if update_data:
|
|
490
|
+
response = await self.client.patch(
|
|
491
|
+
f"/repos/{self.owner}/{self.repo}/issues/{issue_number}",
|
|
492
|
+
json=update_data,
|
|
493
|
+
)
|
|
494
|
+
response.raise_for_status()
|
|
495
|
+
|
|
496
|
+
updated_issue = response.json()
|
|
497
|
+
return self._task_from_github_issue(updated_issue)
|
|
498
|
+
|
|
499
|
+
return await self.read(ticket_id)
|
|
500
|
+
|
|
501
|
+
async def delete(self, ticket_id: str) -> bool:
|
|
502
|
+
"""Delete (close) a GitHub issue."""
|
|
503
|
+
# Validate credentials before attempting operation
|
|
504
|
+
is_valid, error_message = self.validate_credentials()
|
|
505
|
+
if not is_valid:
|
|
506
|
+
raise ValueError(error_message)
|
|
507
|
+
|
|
508
|
+
try:
|
|
509
|
+
issue_number = int(ticket_id)
|
|
510
|
+
except ValueError:
|
|
511
|
+
return False
|
|
512
|
+
|
|
513
|
+
try:
|
|
514
|
+
response = await self.client.patch(
|
|
515
|
+
f"/repos/{self.owner}/{self.repo}/issues/{issue_number}",
|
|
516
|
+
json={"state": "closed", "state_reason": "not_planned"},
|
|
517
|
+
)
|
|
518
|
+
response.raise_for_status()
|
|
519
|
+
return True
|
|
520
|
+
except httpx.HTTPError:
|
|
521
|
+
return False
|
|
522
|
+
|
|
523
|
+
async def list(
|
|
524
|
+
self, limit: int = 10, offset: int = 0, filters: dict[str, Any] | None = None
|
|
525
|
+
) -> list[Task]:
|
|
526
|
+
"""List GitHub issues with filters."""
|
|
527
|
+
# Build query parameters
|
|
528
|
+
params: dict[str, Any] = {
|
|
529
|
+
"per_page": min(limit, 100), # GitHub max is 100
|
|
530
|
+
"page": (offset // limit) + 1 if limit > 0 else 1,
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
if filters:
|
|
534
|
+
# State filter
|
|
535
|
+
if "state" in filters:
|
|
536
|
+
state = filters["state"]
|
|
537
|
+
if isinstance(state, str):
|
|
538
|
+
state = TicketState(state)
|
|
539
|
+
|
|
540
|
+
if state in [TicketState.DONE, TicketState.CLOSED]:
|
|
541
|
+
params["state"] = "closed"
|
|
542
|
+
else:
|
|
543
|
+
params["state"] = "open"
|
|
544
|
+
# Add label filter for extended states
|
|
545
|
+
state_label = self._get_state_label(state)
|
|
546
|
+
if state_label:
|
|
547
|
+
params["labels"] = state_label
|
|
548
|
+
|
|
549
|
+
# Priority filter via labels
|
|
550
|
+
if "priority" in filters:
|
|
551
|
+
priority = filters["priority"]
|
|
552
|
+
if isinstance(priority, str):
|
|
553
|
+
priority = Priority(priority)
|
|
554
|
+
priority_label = self._get_priority_label(priority)
|
|
555
|
+
|
|
556
|
+
if "labels" in params:
|
|
557
|
+
params["labels"] += f",{priority_label}"
|
|
558
|
+
else:
|
|
559
|
+
params["labels"] = priority_label
|
|
560
|
+
|
|
561
|
+
# Assignee filter
|
|
562
|
+
if "assignee" in filters:
|
|
563
|
+
params["assignee"] = filters["assignee"]
|
|
564
|
+
|
|
565
|
+
# Milestone filter (parent_epic)
|
|
566
|
+
if "parent_epic" in filters:
|
|
567
|
+
params["milestone"] = filters["parent_epic"]
|
|
568
|
+
|
|
569
|
+
response = await self.client.get(
|
|
570
|
+
f"/repos/{self.owner}/{self.repo}/issues", params=params
|
|
571
|
+
)
|
|
572
|
+
response.raise_for_status()
|
|
573
|
+
|
|
574
|
+
issues = response.json()
|
|
575
|
+
|
|
576
|
+
# Store rate limit info
|
|
577
|
+
self._rate_limit = {
|
|
578
|
+
"limit": response.headers.get("X-RateLimit-Limit"),
|
|
579
|
+
"remaining": response.headers.get("X-RateLimit-Remaining"),
|
|
580
|
+
"reset": response.headers.get("X-RateLimit-Reset"),
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
# Filter out pull requests (they appear as issues in the API)
|
|
584
|
+
issues = [issue for issue in issues if "pull_request" not in issue]
|
|
585
|
+
|
|
586
|
+
return [self._task_from_github_issue(issue) for issue in issues]
|
|
587
|
+
|
|
588
|
+
async def search(self, query: SearchQuery) -> builtins.list[Task]:
|
|
589
|
+
"""Search GitHub issues using advanced search syntax."""
|
|
590
|
+
# Build GitHub search query
|
|
591
|
+
search_parts = [f"repo:{self.owner}/{self.repo}", "is:issue"]
|
|
592
|
+
|
|
593
|
+
# Text search
|
|
594
|
+
if query.query:
|
|
595
|
+
# Escape special characters for GitHub search
|
|
596
|
+
escaped_query = query.query.replace('"', '\\"')
|
|
597
|
+
search_parts.append(f'"{escaped_query}"')
|
|
598
|
+
|
|
599
|
+
# State filter
|
|
600
|
+
if query.state:
|
|
601
|
+
if query.state in [TicketState.DONE, TicketState.CLOSED]:
|
|
602
|
+
search_parts.append("is:closed")
|
|
603
|
+
else:
|
|
604
|
+
search_parts.append("is:open")
|
|
605
|
+
# Add label filter for extended states
|
|
606
|
+
state_label = self._get_state_label(query.state)
|
|
607
|
+
if state_label:
|
|
608
|
+
search_parts.append(f'label:"{state_label}"')
|
|
609
|
+
|
|
610
|
+
# Priority filter
|
|
611
|
+
if query.priority:
|
|
612
|
+
priority_label = self._get_priority_label(query.priority)
|
|
613
|
+
search_parts.append(f'label:"{priority_label}"')
|
|
614
|
+
|
|
615
|
+
# Assignee filter
|
|
616
|
+
if query.assignee:
|
|
617
|
+
search_parts.append(f"assignee:{query.assignee}")
|
|
618
|
+
|
|
619
|
+
# Tags filter
|
|
620
|
+
if query.tags:
|
|
621
|
+
for tag in query.tags:
|
|
622
|
+
search_parts.append(f'label:"{tag}"')
|
|
623
|
+
|
|
624
|
+
# Build final search query
|
|
625
|
+
github_query = " ".join(search_parts)
|
|
626
|
+
|
|
627
|
+
# Use GraphQL for better search capabilities
|
|
628
|
+
full_query = ISSUE_FRAGMENT + SEARCH_ISSUES
|
|
629
|
+
|
|
630
|
+
variables = {
|
|
631
|
+
"query": github_query,
|
|
632
|
+
"first": min(query.limit, 100),
|
|
633
|
+
"after": None,
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
# Handle pagination for offset
|
|
637
|
+
if query.offset > 0:
|
|
638
|
+
# We need to paginate through to get to the offset
|
|
639
|
+
# This is inefficient but GitHub doesn't support direct offset
|
|
640
|
+
pages_to_skip = query.offset // 100
|
|
641
|
+
for _ in range(pages_to_skip):
|
|
642
|
+
temp_result = await self._graphql_request(full_query, variables)
|
|
643
|
+
page_info = temp_result["search"]["pageInfo"]
|
|
644
|
+
if page_info["hasNextPage"]:
|
|
645
|
+
variables["after"] = page_info["endCursor"]
|
|
646
|
+
else:
|
|
647
|
+
return [] # Offset beyond available results
|
|
648
|
+
|
|
649
|
+
result = await self._graphql_request(full_query, variables)
|
|
650
|
+
|
|
651
|
+
issues = []
|
|
652
|
+
for node in result["search"]["nodes"]:
|
|
653
|
+
if node: # Some nodes might be null
|
|
654
|
+
# Convert GraphQL format to REST format for consistency
|
|
655
|
+
rest_format = {
|
|
656
|
+
"number": node["number"],
|
|
657
|
+
"title": node["title"],
|
|
658
|
+
"body": node["body"],
|
|
659
|
+
"state": node["state"].lower(),
|
|
660
|
+
"created_at": node["createdAt"],
|
|
661
|
+
"updated_at": node["updatedAt"],
|
|
662
|
+
"html_url": node["url"],
|
|
663
|
+
"labels": node.get("labels", {}).get("nodes", []),
|
|
664
|
+
"milestone": node.get("milestone"),
|
|
665
|
+
"assignees": node.get("assignees", {}).get("nodes", []),
|
|
666
|
+
"author": node.get("author"),
|
|
667
|
+
}
|
|
668
|
+
issues.append(self._task_from_github_issue(rest_format))
|
|
669
|
+
|
|
670
|
+
return issues
|
|
671
|
+
|
|
672
|
+
async def transition_state(
|
|
673
|
+
self, ticket_id: str, target_state: TicketState
|
|
674
|
+
) -> Task | None:
|
|
675
|
+
"""Transition GitHub issue to a new state."""
|
|
676
|
+
# Validate transition
|
|
677
|
+
if not await self.validate_transition(ticket_id, target_state):
|
|
678
|
+
return None
|
|
679
|
+
|
|
680
|
+
# Update state
|
|
681
|
+
return await self.update(ticket_id, {"state": target_state})
|
|
682
|
+
|
|
683
|
+
async def add_comment(self, comment: Comment) -> Comment:
|
|
684
|
+
"""Add a comment to a GitHub issue."""
|
|
685
|
+
try:
|
|
686
|
+
issue_number = int(comment.ticket_id)
|
|
687
|
+
except ValueError as e:
|
|
688
|
+
raise ValueError(f"Invalid issue number: {comment.ticket_id}") from e
|
|
689
|
+
|
|
690
|
+
# Create comment
|
|
691
|
+
response = await self.client.post(
|
|
692
|
+
f"/repos/{self.owner}/{self.repo}/issues/{issue_number}/comments",
|
|
693
|
+
json={"body": comment.content},
|
|
694
|
+
)
|
|
695
|
+
response.raise_for_status()
|
|
696
|
+
|
|
697
|
+
created_comment = response.json()
|
|
698
|
+
|
|
699
|
+
return Comment(
|
|
700
|
+
id=str(created_comment["id"]),
|
|
701
|
+
ticket_id=comment.ticket_id,
|
|
702
|
+
author=created_comment["user"]["login"],
|
|
703
|
+
content=created_comment["body"],
|
|
704
|
+
created_at=datetime.fromisoformat(
|
|
705
|
+
created_comment["created_at"].replace("Z", "+00:00")
|
|
706
|
+
),
|
|
707
|
+
metadata={
|
|
708
|
+
"github": {
|
|
709
|
+
"id": created_comment["id"],
|
|
710
|
+
"url": created_comment["html_url"],
|
|
711
|
+
"author_avatar": created_comment["user"]["avatar_url"],
|
|
712
|
+
}
|
|
713
|
+
},
|
|
714
|
+
)
|
|
715
|
+
|
|
716
|
+
async def get_comments(
|
|
717
|
+
self, ticket_id: str, limit: int = 10, offset: int = 0
|
|
718
|
+
) -> builtins.list[Comment]:
|
|
719
|
+
"""Get comments for a GitHub issue."""
|
|
720
|
+
try:
|
|
721
|
+
issue_number = int(ticket_id)
|
|
722
|
+
except ValueError:
|
|
723
|
+
return []
|
|
724
|
+
|
|
725
|
+
params = {
|
|
726
|
+
"per_page": min(limit, 100),
|
|
727
|
+
"page": (offset // limit) + 1 if limit > 0 else 1,
|
|
728
|
+
}
|
|
729
|
+
|
|
730
|
+
try:
|
|
731
|
+
response = await self.client.get(
|
|
732
|
+
f"/repos/{self.owner}/{self.repo}/issues/{issue_number}/comments",
|
|
733
|
+
params=params,
|
|
734
|
+
)
|
|
735
|
+
response.raise_for_status()
|
|
736
|
+
|
|
737
|
+
comments = []
|
|
738
|
+
for comment_data in response.json():
|
|
739
|
+
comments.append(
|
|
740
|
+
Comment(
|
|
741
|
+
id=str(comment_data["id"]),
|
|
742
|
+
ticket_id=ticket_id,
|
|
743
|
+
author=comment_data["user"]["login"],
|
|
744
|
+
content=comment_data["body"],
|
|
745
|
+
created_at=datetime.fromisoformat(
|
|
746
|
+
comment_data["created_at"].replace("Z", "+00:00")
|
|
747
|
+
),
|
|
748
|
+
metadata={
|
|
749
|
+
"github": {
|
|
750
|
+
"id": comment_data["id"],
|
|
751
|
+
"url": comment_data["html_url"],
|
|
752
|
+
"author_avatar": comment_data["user"]["avatar_url"],
|
|
753
|
+
}
|
|
754
|
+
},
|
|
755
|
+
)
|
|
756
|
+
)
|
|
757
|
+
|
|
758
|
+
return comments
|
|
759
|
+
except httpx.HTTPError:
|
|
760
|
+
return []
|
|
761
|
+
|
|
762
|
+
async def get_rate_limit(self) -> dict[str, Any]:
|
|
763
|
+
"""Get current rate limit status."""
|
|
764
|
+
response = await self.client.get("/rate_limit")
|
|
765
|
+
response.raise_for_status()
|
|
766
|
+
return response.json()
|
|
767
|
+
|
|
768
|
+
async def create_milestone(self, epic: Epic) -> Epic:
|
|
769
|
+
"""Create a GitHub milestone as an Epic."""
|
|
770
|
+
milestone_data = {
|
|
771
|
+
"title": epic.title,
|
|
772
|
+
"description": epic.description or "",
|
|
773
|
+
"state": "open" if epic.state != TicketState.CLOSED else "closed",
|
|
774
|
+
}
|
|
775
|
+
|
|
776
|
+
response = await self.client.post(
|
|
777
|
+
f"/repos/{self.owner}/{self.repo}/milestones", json=milestone_data
|
|
778
|
+
)
|
|
779
|
+
response.raise_for_status()
|
|
780
|
+
|
|
781
|
+
created_milestone = response.json()
|
|
782
|
+
return self._milestone_to_epic(created_milestone)
|
|
783
|
+
|
|
784
|
+
async def get_milestone(self, milestone_number: int) -> Epic | None:
|
|
785
|
+
"""Get a GitHub milestone as an Epic."""
|
|
786
|
+
try:
|
|
787
|
+
response = await self.client.get(
|
|
788
|
+
f"/repos/{self.owner}/{self.repo}/milestones/{milestone_number}"
|
|
789
|
+
)
|
|
790
|
+
if response.status_code == 404:
|
|
791
|
+
return None
|
|
792
|
+
response.raise_for_status()
|
|
793
|
+
|
|
794
|
+
milestone = response.json()
|
|
795
|
+
return self._milestone_to_epic(milestone)
|
|
796
|
+
except httpx.HTTPError:
|
|
797
|
+
return None
|
|
798
|
+
|
|
799
|
+
async def list_milestones(
|
|
800
|
+
self, state: str = "open", limit: int = 10, offset: int = 0
|
|
801
|
+
) -> builtins.list[Epic]:
|
|
802
|
+
"""List GitHub milestones as Epics."""
|
|
803
|
+
params = {
|
|
804
|
+
"state": state,
|
|
805
|
+
"per_page": min(limit, 100),
|
|
806
|
+
"page": (offset // limit) + 1 if limit > 0 else 1,
|
|
807
|
+
}
|
|
808
|
+
|
|
809
|
+
response = await self.client.get(
|
|
810
|
+
f"/repos/{self.owner}/{self.repo}/milestones", params=params
|
|
811
|
+
)
|
|
812
|
+
response.raise_for_status()
|
|
813
|
+
|
|
814
|
+
return [self._milestone_to_epic(milestone) for milestone in response.json()]
|
|
815
|
+
|
|
816
|
+
async def delete_epic(self, epic_id: str) -> bool:
|
|
817
|
+
"""Delete a GitHub milestone (Epic).
|
|
818
|
+
|
|
819
|
+
Args:
|
|
820
|
+
----
|
|
821
|
+
epic_id: Milestone number (not ID) as a string
|
|
822
|
+
|
|
823
|
+
Returns:
|
|
824
|
+
-------
|
|
825
|
+
True if successfully deleted, False otherwise
|
|
826
|
+
|
|
827
|
+
Raises:
|
|
828
|
+
------
|
|
829
|
+
ValueError: If credentials are invalid or epic_id is not a valid number
|
|
830
|
+
|
|
831
|
+
"""
|
|
832
|
+
# Validate credentials
|
|
833
|
+
is_valid, error_message = self.validate_credentials()
|
|
834
|
+
if not is_valid:
|
|
835
|
+
raise ValueError(error_message)
|
|
836
|
+
|
|
837
|
+
try:
|
|
838
|
+
# Extract milestone number from epic_id
|
|
839
|
+
milestone_number = int(epic_id)
|
|
840
|
+
except ValueError as e:
|
|
841
|
+
raise ValueError(
|
|
842
|
+
f"Invalid milestone number '{epic_id}'. GitHub milestones use numeric IDs."
|
|
843
|
+
) from e
|
|
844
|
+
|
|
845
|
+
try:
|
|
846
|
+
# Delete milestone using REST API
|
|
847
|
+
response = await self.client.delete(
|
|
848
|
+
f"/repos/{self.owner}/{self.repo}/milestones/{milestone_number}"
|
|
849
|
+
)
|
|
850
|
+
|
|
851
|
+
# GitHub returns 204 No Content on successful deletion
|
|
852
|
+
if response.status_code == 204:
|
|
853
|
+
return True
|
|
854
|
+
|
|
855
|
+
# Handle 404 errors gracefully
|
|
856
|
+
if response.status_code == 404:
|
|
857
|
+
return False
|
|
858
|
+
|
|
859
|
+
# Other errors - raise for visibility
|
|
860
|
+
response.raise_for_status()
|
|
861
|
+
return True
|
|
862
|
+
|
|
863
|
+
except httpx.HTTPStatusError as e:
|
|
864
|
+
if e.response.status_code == 404:
|
|
865
|
+
# Milestone not found
|
|
866
|
+
return False
|
|
867
|
+
# Re-raise other HTTP errors
|
|
868
|
+
raise ValueError(f"Failed to delete milestone: {e}") from e
|
|
869
|
+
except Exception as e:
|
|
870
|
+
raise ValueError(f"Failed to delete milestone: {e}") from e
|
|
871
|
+
|
|
872
|
+
async def link_to_pull_request(self, issue_number: int, pr_number: int) -> bool:
|
|
873
|
+
"""Link an issue to a pull request using keywords."""
|
|
874
|
+
# This is typically done through PR description keywords like "fixes #123"
|
|
875
|
+
# We can add a comment to track the link
|
|
876
|
+
comment = f"Linked to PR #{pr_number}"
|
|
877
|
+
|
|
878
|
+
response = await self.client.post(
|
|
879
|
+
f"/repos/{self.owner}/{self.repo}/issues/{issue_number}/comments",
|
|
880
|
+
json={"body": comment},
|
|
881
|
+
)
|
|
882
|
+
|
|
883
|
+
return response.status_code == 201
|
|
884
|
+
|
|
885
|
+
async def create_pull_request(
|
|
886
|
+
self,
|
|
887
|
+
ticket_id: str,
|
|
888
|
+
base_branch: str = "main",
|
|
889
|
+
head_branch: str | None = None,
|
|
890
|
+
title: str | None = None,
|
|
891
|
+
body: str | None = None,
|
|
892
|
+
draft: bool = False,
|
|
893
|
+
) -> dict[str, Any]:
|
|
894
|
+
"""Create a pull request linked to an issue.
|
|
895
|
+
|
|
896
|
+
Args:
|
|
897
|
+
----
|
|
898
|
+
ticket_id: Issue number to link the PR to
|
|
899
|
+
base_branch: Target branch for the PR (default: main)
|
|
900
|
+
head_branch: Source branch name (auto-generated if not provided)
|
|
901
|
+
title: PR title (uses ticket title if not provided)
|
|
902
|
+
body: PR description (auto-generated with issue link if not provided)
|
|
903
|
+
draft: Create as draft PR
|
|
904
|
+
|
|
905
|
+
Returns:
|
|
906
|
+
-------
|
|
907
|
+
Dictionary with PR details including number, url, and branch
|
|
908
|
+
|
|
909
|
+
"""
|
|
910
|
+
try:
|
|
911
|
+
issue_number = int(ticket_id)
|
|
912
|
+
except ValueError as e:
|
|
913
|
+
raise ValueError(f"Invalid issue number: {ticket_id}") from e
|
|
914
|
+
|
|
915
|
+
# Get the issue details
|
|
916
|
+
issue = await self.read(ticket_id)
|
|
917
|
+
if not issue:
|
|
918
|
+
raise ValueError(f"Issue #{ticket_id} not found")
|
|
919
|
+
|
|
920
|
+
# Auto-generate branch name if not provided
|
|
921
|
+
if not head_branch:
|
|
922
|
+
# Create branch name from issue number and title
|
|
923
|
+
# e.g., "123-fix-authentication-bug"
|
|
924
|
+
safe_title = "-".join(
|
|
925
|
+
issue.title.lower()
|
|
926
|
+
.replace("[", "")
|
|
927
|
+
.replace("]", "")
|
|
928
|
+
.replace("#", "")
|
|
929
|
+
.replace("/", "-")
|
|
930
|
+
.replace("\\", "-")
|
|
931
|
+
.split()[:5] # Limit to 5 words
|
|
932
|
+
)
|
|
933
|
+
head_branch = f"{issue_number}-{safe_title}"
|
|
934
|
+
|
|
935
|
+
# Auto-generate title if not provided
|
|
936
|
+
if not title:
|
|
937
|
+
# Include issue number in PR title
|
|
938
|
+
title = f"[#{issue_number}] {issue.title}"
|
|
939
|
+
|
|
940
|
+
# Auto-generate body if not provided
|
|
941
|
+
if not body:
|
|
942
|
+
body = f"""## Summary
|
|
943
|
+
|
|
944
|
+
This PR addresses issue #{issue_number}.
|
|
945
|
+
|
|
946
|
+
**Issue:** #{issue_number} - {issue.title}
|
|
947
|
+
**Link:** {issue.metadata.get('github', {}).get('url', '')}
|
|
948
|
+
|
|
949
|
+
## Description
|
|
950
|
+
|
|
951
|
+
{issue.description or 'No description provided.'}
|
|
952
|
+
|
|
953
|
+
## Changes
|
|
954
|
+
|
|
955
|
+
- [ ] Implementation details to be added
|
|
956
|
+
|
|
957
|
+
## Testing
|
|
958
|
+
|
|
959
|
+
- [ ] Tests have been added/updated
|
|
960
|
+
- [ ] All tests pass
|
|
961
|
+
|
|
962
|
+
## Checklist
|
|
963
|
+
|
|
964
|
+
- [ ] Code follows project style guidelines
|
|
965
|
+
- [ ] Self-review completed
|
|
966
|
+
- [ ] Documentation updated if needed
|
|
967
|
+
|
|
968
|
+
Fixes #{issue_number}
|
|
969
|
+
"""
|
|
970
|
+
|
|
971
|
+
# Check if the head branch exists
|
|
972
|
+
try:
|
|
973
|
+
branch_response = await self.client.get(
|
|
974
|
+
f"/repos/{self.owner}/{self.repo}/branches/{head_branch}"
|
|
975
|
+
)
|
|
976
|
+
branch_exists = branch_response.status_code == 200
|
|
977
|
+
except httpx.HTTPError:
|
|
978
|
+
branch_exists = False
|
|
979
|
+
|
|
980
|
+
if not branch_exists:
|
|
981
|
+
# Get the base branch SHA
|
|
982
|
+
base_response = await self.client.get(
|
|
983
|
+
f"/repos/{self.owner}/{self.repo}/branches/{base_branch}"
|
|
984
|
+
)
|
|
985
|
+
base_response.raise_for_status()
|
|
986
|
+
base_sha = base_response.json()["commit"]["sha"]
|
|
987
|
+
|
|
988
|
+
# Create the new branch
|
|
989
|
+
ref_response = await self.client.post(
|
|
990
|
+
f"/repos/{self.owner}/{self.repo}/git/refs",
|
|
991
|
+
json={
|
|
992
|
+
"ref": f"refs/heads/{head_branch}",
|
|
993
|
+
"sha": base_sha,
|
|
994
|
+
},
|
|
995
|
+
)
|
|
996
|
+
|
|
997
|
+
if ref_response.status_code != 201:
|
|
998
|
+
# Branch might already exist on remote, try to use it
|
|
999
|
+
pass
|
|
1000
|
+
|
|
1001
|
+
# Create the pull request
|
|
1002
|
+
pr_data = {
|
|
1003
|
+
"title": title,
|
|
1004
|
+
"body": body,
|
|
1005
|
+
"head": head_branch,
|
|
1006
|
+
"base": base_branch,
|
|
1007
|
+
"draft": draft,
|
|
1008
|
+
}
|
|
1009
|
+
|
|
1010
|
+
pr_response = await self.client.post(
|
|
1011
|
+
f"/repos/{self.owner}/{self.repo}/pulls", json=pr_data
|
|
1012
|
+
)
|
|
1013
|
+
|
|
1014
|
+
if pr_response.status_code == 422:
|
|
1015
|
+
# PR might already exist, try to get it
|
|
1016
|
+
search_response = await self.client.get(
|
|
1017
|
+
f"/repos/{self.owner}/{self.repo}/pulls",
|
|
1018
|
+
params={
|
|
1019
|
+
"head": f"{self.owner}:{head_branch}",
|
|
1020
|
+
"base": base_branch,
|
|
1021
|
+
"state": "open",
|
|
1022
|
+
},
|
|
1023
|
+
)
|
|
1024
|
+
|
|
1025
|
+
if search_response.status_code == 200:
|
|
1026
|
+
existing_prs = search_response.json()
|
|
1027
|
+
if existing_prs:
|
|
1028
|
+
pr = existing_prs[0]
|
|
1029
|
+
return {
|
|
1030
|
+
"number": pr["number"],
|
|
1031
|
+
"url": pr["html_url"],
|
|
1032
|
+
"api_url": pr["url"],
|
|
1033
|
+
"branch": head_branch,
|
|
1034
|
+
"state": pr["state"],
|
|
1035
|
+
"draft": pr.get("draft", False),
|
|
1036
|
+
"title": pr["title"],
|
|
1037
|
+
"existing": True,
|
|
1038
|
+
"linked_issue": issue_number,
|
|
1039
|
+
}
|
|
1040
|
+
|
|
1041
|
+
raise ValueError(f"Failed to create PR: {pr_response.text}")
|
|
1042
|
+
|
|
1043
|
+
pr_response.raise_for_status()
|
|
1044
|
+
pr = pr_response.json()
|
|
1045
|
+
|
|
1046
|
+
# Add a comment to the issue about the PR
|
|
1047
|
+
pr_msg = f"Pull request #{pr['number']} has been created: " f"{pr['html_url']}"
|
|
1048
|
+
await self.add_comment(
|
|
1049
|
+
Comment(
|
|
1050
|
+
ticket_id=ticket_id,
|
|
1051
|
+
content=pr_msg,
|
|
1052
|
+
author="system",
|
|
1053
|
+
)
|
|
1054
|
+
)
|
|
1055
|
+
|
|
1056
|
+
return {
|
|
1057
|
+
"number": pr["number"],
|
|
1058
|
+
"url": pr["html_url"],
|
|
1059
|
+
"api_url": pr["url"],
|
|
1060
|
+
"branch": head_branch,
|
|
1061
|
+
"state": pr["state"],
|
|
1062
|
+
"draft": pr.get("draft", False),
|
|
1063
|
+
"title": pr["title"],
|
|
1064
|
+
"linked_issue": issue_number,
|
|
1065
|
+
}
|
|
1066
|
+
|
|
1067
|
+
async def link_existing_pull_request(
|
|
1068
|
+
self,
|
|
1069
|
+
ticket_id: str,
|
|
1070
|
+
pr_url: str,
|
|
1071
|
+
) -> dict[str, Any]:
|
|
1072
|
+
"""Link an existing pull request to a ticket.
|
|
1073
|
+
|
|
1074
|
+
Args:
|
|
1075
|
+
----
|
|
1076
|
+
ticket_id: Issue number to link the PR to
|
|
1077
|
+
pr_url: GitHub PR URL to link
|
|
1078
|
+
|
|
1079
|
+
Returns:
|
|
1080
|
+
-------
|
|
1081
|
+
Dictionary with link status and PR details
|
|
1082
|
+
|
|
1083
|
+
"""
|
|
1084
|
+
try:
|
|
1085
|
+
issue_number = int(ticket_id)
|
|
1086
|
+
except ValueError as e:
|
|
1087
|
+
raise ValueError(f"Invalid issue number: {ticket_id}") from e
|
|
1088
|
+
|
|
1089
|
+
# Parse PR URL to extract owner, repo, and PR number
|
|
1090
|
+
# Expected format: https://github.com/owner/repo/pull/123
|
|
1091
|
+
import re
|
|
1092
|
+
|
|
1093
|
+
pr_pattern = r"github\.com/([^/]+)/([^/]+)/pull/(\d+)"
|
|
1094
|
+
match = re.search(pr_pattern, pr_url)
|
|
1095
|
+
|
|
1096
|
+
if not match:
|
|
1097
|
+
raise ValueError(f"Invalid GitHub PR URL format: {pr_url}")
|
|
1098
|
+
|
|
1099
|
+
pr_owner, pr_repo, pr_number = match.groups()
|
|
1100
|
+
|
|
1101
|
+
# Verify the PR is from the same repository
|
|
1102
|
+
if pr_owner != self.owner or pr_repo != self.repo:
|
|
1103
|
+
raise ValueError(
|
|
1104
|
+
f"PR must be from the same repository ({self.owner}/{self.repo})"
|
|
1105
|
+
)
|
|
1106
|
+
|
|
1107
|
+
# Get PR details
|
|
1108
|
+
pr_response = await self.client.get(
|
|
1109
|
+
f"/repos/{self.owner}/{self.repo}/pulls/{pr_number}"
|
|
1110
|
+
)
|
|
1111
|
+
|
|
1112
|
+
if pr_response.status_code == 404:
|
|
1113
|
+
raise ValueError(f"Pull request #{pr_number} not found")
|
|
1114
|
+
|
|
1115
|
+
pr_response.raise_for_status()
|
|
1116
|
+
pr = pr_response.json()
|
|
1117
|
+
|
|
1118
|
+
# Update PR body to include issue reference if not already present
|
|
1119
|
+
current_body = pr.get("body", "")
|
|
1120
|
+
issue_ref = f"#{issue_number}"
|
|
1121
|
+
|
|
1122
|
+
if issue_ref not in current_body:
|
|
1123
|
+
# Add issue reference to the body
|
|
1124
|
+
updated_body = current_body or ""
|
|
1125
|
+
if updated_body:
|
|
1126
|
+
updated_body += "\n\n"
|
|
1127
|
+
updated_body += f"Related to #{issue_number}"
|
|
1128
|
+
|
|
1129
|
+
# Update the PR
|
|
1130
|
+
update_response = await self.client.patch(
|
|
1131
|
+
f"/repos/{self.owner}/{self.repo}/pulls/{pr_number}",
|
|
1132
|
+
json={"body": updated_body},
|
|
1133
|
+
)
|
|
1134
|
+
update_response.raise_for_status()
|
|
1135
|
+
|
|
1136
|
+
# Add a comment to the issue about the PR
|
|
1137
|
+
await self.add_comment(
|
|
1138
|
+
Comment(
|
|
1139
|
+
ticket_id=ticket_id,
|
|
1140
|
+
content=f"Linked to pull request #{pr_number}: {pr_url}",
|
|
1141
|
+
author="system",
|
|
1142
|
+
)
|
|
1143
|
+
)
|
|
1144
|
+
|
|
1145
|
+
return {
|
|
1146
|
+
"success": True,
|
|
1147
|
+
"pr_number": pr["number"],
|
|
1148
|
+
"pr_url": pr["html_url"],
|
|
1149
|
+
"pr_title": pr["title"],
|
|
1150
|
+
"pr_state": pr["state"],
|
|
1151
|
+
"linked_issue": issue_number,
|
|
1152
|
+
"message": f"Successfully linked PR #{pr_number} to issue #{issue_number}",
|
|
1153
|
+
}
|
|
1154
|
+
|
|
1155
|
+
async def get_collaborators(self) -> builtins.list[dict[str, Any]]:
|
|
1156
|
+
"""Get repository collaborators."""
|
|
1157
|
+
response = await self.client.get(
|
|
1158
|
+
f"/repos/{self.owner}/{self.repo}/collaborators"
|
|
1159
|
+
)
|
|
1160
|
+
response.raise_for_status()
|
|
1161
|
+
return response.json()
|
|
1162
|
+
|
|
1163
|
+
async def get_current_user(self) -> dict[str, Any] | None:
|
|
1164
|
+
"""Get current authenticated user information."""
|
|
1165
|
+
response = await self.client.get("/user")
|
|
1166
|
+
response.raise_for_status()
|
|
1167
|
+
return response.json()
|
|
1168
|
+
|
|
1169
|
+
async def list_labels(self) -> builtins.list[dict[str, Any]]:
|
|
1170
|
+
"""List all labels available in the repository.
|
|
1171
|
+
|
|
1172
|
+
Returns:
|
|
1173
|
+
-------
|
|
1174
|
+
List of label dictionaries with 'id', 'name', and 'color' fields
|
|
1175
|
+
|
|
1176
|
+
"""
|
|
1177
|
+
cache_key = "github_labels"
|
|
1178
|
+
cached = await self._labels_cache.get(cache_key)
|
|
1179
|
+
if cached is not None:
|
|
1180
|
+
return cached
|
|
1181
|
+
|
|
1182
|
+
response = await self.client.get(f"/repos/{self.owner}/{self.repo}/labels")
|
|
1183
|
+
response.raise_for_status()
|
|
1184
|
+
labels = response.json()
|
|
1185
|
+
|
|
1186
|
+
# Transform to standardized format
|
|
1187
|
+
standardized_labels = [
|
|
1188
|
+
{"id": label["name"], "name": label["name"], "color": label["color"]}
|
|
1189
|
+
for label in labels
|
|
1190
|
+
]
|
|
1191
|
+
|
|
1192
|
+
await self._labels_cache.set(cache_key, standardized_labels)
|
|
1193
|
+
return standardized_labels
|
|
1194
|
+
|
|
1195
|
+
async def update_milestone(
|
|
1196
|
+
self, milestone_number: int, updates: dict[str, Any]
|
|
1197
|
+
) -> Epic | None:
|
|
1198
|
+
"""Update a GitHub milestone (Epic).
|
|
1199
|
+
|
|
1200
|
+
Args:
|
|
1201
|
+
----
|
|
1202
|
+
milestone_number: Milestone number (not ID)
|
|
1203
|
+
updates: Dictionary with fields to update:
|
|
1204
|
+
- title: Milestone title
|
|
1205
|
+
- description: Milestone description (supports markdown)
|
|
1206
|
+
- state: TicketState value (maps to open/closed)
|
|
1207
|
+
- target_date: Due date in ISO format
|
|
1208
|
+
|
|
1209
|
+
Returns:
|
|
1210
|
+
-------
|
|
1211
|
+
Updated Epic object or None if not found
|
|
1212
|
+
|
|
1213
|
+
Raises:
|
|
1214
|
+
------
|
|
1215
|
+
ValueError: If no fields to update
|
|
1216
|
+
httpx.HTTPError: If API request fails
|
|
1217
|
+
|
|
1218
|
+
"""
|
|
1219
|
+
update_data = {}
|
|
1220
|
+
|
|
1221
|
+
# Map title directly
|
|
1222
|
+
if "title" in updates:
|
|
1223
|
+
update_data["title"] = updates["title"]
|
|
1224
|
+
|
|
1225
|
+
# Map description (supports markdown)
|
|
1226
|
+
if "description" in updates:
|
|
1227
|
+
update_data["description"] = updates["description"]
|
|
1228
|
+
|
|
1229
|
+
# Map state to GitHub milestone state
|
|
1230
|
+
if "state" in updates:
|
|
1231
|
+
state = updates["state"]
|
|
1232
|
+
if isinstance(state, TicketState):
|
|
1233
|
+
# GitHub only has open/closed
|
|
1234
|
+
update_data["state"] = (
|
|
1235
|
+
"closed"
|
|
1236
|
+
if state in [TicketState.DONE, TicketState.CLOSED]
|
|
1237
|
+
else "open"
|
|
1238
|
+
)
|
|
1239
|
+
else:
|
|
1240
|
+
update_data["state"] = state
|
|
1241
|
+
|
|
1242
|
+
# Map target_date to due_on
|
|
1243
|
+
if "target_date" in updates:
|
|
1244
|
+
# GitHub expects ISO 8601 format
|
|
1245
|
+
target_date = updates["target_date"]
|
|
1246
|
+
if isinstance(target_date, str):
|
|
1247
|
+
update_data["due_on"] = target_date
|
|
1248
|
+
elif hasattr(target_date, "isoformat"):
|
|
1249
|
+
update_data["due_on"] = target_date.isoformat()
|
|
1250
|
+
|
|
1251
|
+
if not update_data:
|
|
1252
|
+
raise ValueError("At least one field must be updated")
|
|
1253
|
+
|
|
1254
|
+
# Make API request
|
|
1255
|
+
response = await self.client.patch(
|
|
1256
|
+
f"/repos/{self.owner}/{self.repo}/milestones/{milestone_number}",
|
|
1257
|
+
json=update_data,
|
|
1258
|
+
)
|
|
1259
|
+
response.raise_for_status()
|
|
1260
|
+
|
|
1261
|
+
# Convert response to Epic
|
|
1262
|
+
milestone_data = response.json()
|
|
1263
|
+
return self._milestone_to_epic(milestone_data)
|
|
1264
|
+
|
|
1265
|
+
async def update_epic(self, epic_id: str, updates: dict[str, Any]) -> Epic | None:
|
|
1266
|
+
"""Update a GitHub epic (milestone) by ID or number.
|
|
1267
|
+
|
|
1268
|
+
This is a convenience wrapper around update_milestone() that accepts
|
|
1269
|
+
either a milestone number or the epic ID from the Epic object.
|
|
1270
|
+
|
|
1271
|
+
Args:
|
|
1272
|
+
----
|
|
1273
|
+
epic_id: Epic ID (e.g., "milestone-5") or milestone number as string
|
|
1274
|
+
updates: Dictionary with fields to update
|
|
1275
|
+
|
|
1276
|
+
Returns:
|
|
1277
|
+
-------
|
|
1278
|
+
Updated Epic object or None if not found
|
|
1279
|
+
|
|
1280
|
+
"""
|
|
1281
|
+
# Extract milestone number from ID
|
|
1282
|
+
if epic_id.startswith("milestone-"):
|
|
1283
|
+
milestone_number = int(epic_id.replace("milestone-", ""))
|
|
1284
|
+
else:
|
|
1285
|
+
milestone_number = int(epic_id)
|
|
1286
|
+
|
|
1287
|
+
return await self.update_milestone(milestone_number, updates)
|
|
1288
|
+
|
|
1289
|
+
async def add_attachment_to_issue(
|
|
1290
|
+
self, issue_number: int, file_path: str, comment: str | None = None
|
|
1291
|
+
) -> dict[str, Any]:
|
|
1292
|
+
"""Attach file to GitHub issue via comment.
|
|
1293
|
+
|
|
1294
|
+
GitHub doesn't have direct file attachment API. This method:
|
|
1295
|
+
1. Creates a comment with the file reference
|
|
1296
|
+
2. Returns metadata about the attachment
|
|
1297
|
+
|
|
1298
|
+
Note: GitHub's actual file upload in comments requires browser-based
|
|
1299
|
+
drag-and-drop or git-lfs. This method creates a placeholder comment
|
|
1300
|
+
that users can edit to add actual file attachments through the UI.
|
|
1301
|
+
|
|
1302
|
+
Args:
|
|
1303
|
+
----
|
|
1304
|
+
issue_number: Issue number
|
|
1305
|
+
file_path: Path to file to attach
|
|
1306
|
+
comment: Optional comment text (defaults to "Attached: {filename}")
|
|
1307
|
+
|
|
1308
|
+
Returns:
|
|
1309
|
+
-------
|
|
1310
|
+
Dictionary with comment data and file info
|
|
1311
|
+
|
|
1312
|
+
Raises:
|
|
1313
|
+
------
|
|
1314
|
+
FileNotFoundError: If file doesn't exist
|
|
1315
|
+
ValueError: If file too large (>25 MB)
|
|
1316
|
+
|
|
1317
|
+
Note:
|
|
1318
|
+
----
|
|
1319
|
+
GitHub file size limit: 25 MB
|
|
1320
|
+
Supported: Images, videos, documents
|
|
1321
|
+
|
|
1322
|
+
"""
|
|
1323
|
+
file_path_obj = Path(file_path)
|
|
1324
|
+
if not file_path_obj.exists():
|
|
1325
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
1326
|
+
|
|
1327
|
+
# Check file size (25 MB limit)
|
|
1328
|
+
file_size = file_path_obj.stat().st_size
|
|
1329
|
+
if file_size > 25 * 1024 * 1024: # 25 MB
|
|
1330
|
+
raise ValueError(
|
|
1331
|
+
f"File too large: {file_size} bytes (max 25 MB). "
|
|
1332
|
+
"Upload file externally and reference URL instead."
|
|
1333
|
+
)
|
|
1334
|
+
|
|
1335
|
+
# Prepare comment body
|
|
1336
|
+
comment_body = comment or f"📎 Attached: `{file_path_obj.name}`"
|
|
1337
|
+
comment_body += (
|
|
1338
|
+
f"\n\n*Note: File `{file_path_obj.name}` ({file_size} bytes) "
|
|
1339
|
+
"needs to be manually uploaded through GitHub UI or referenced via URL.*"
|
|
1340
|
+
)
|
|
1341
|
+
|
|
1342
|
+
# Create comment with file reference
|
|
1343
|
+
response = await self.client.post(
|
|
1344
|
+
f"/repos/{self.owner}/{self.repo}/issues/{issue_number}/comments",
|
|
1345
|
+
json={"body": comment_body},
|
|
1346
|
+
)
|
|
1347
|
+
response.raise_for_status()
|
|
1348
|
+
|
|
1349
|
+
comment_data = response.json()
|
|
1350
|
+
|
|
1351
|
+
return {
|
|
1352
|
+
"comment_id": comment_data["id"],
|
|
1353
|
+
"comment_url": comment_data["html_url"],
|
|
1354
|
+
"filename": file_path_obj.name,
|
|
1355
|
+
"file_size": file_size,
|
|
1356
|
+
"note": "File reference created. Upload file manually through GitHub UI.",
|
|
1357
|
+
}
|
|
1358
|
+
|
|
1359
|
+
async def add_attachment_reference_to_milestone(
|
|
1360
|
+
self, milestone_number: int, file_url: str, description: str
|
|
1361
|
+
) -> Epic | None:
|
|
1362
|
+
"""Add file reference to milestone description.
|
|
1363
|
+
|
|
1364
|
+
Since GitHub milestones don't support direct file attachments,
|
|
1365
|
+
this method appends a markdown link to the milestone description.
|
|
1366
|
+
|
|
1367
|
+
Args:
|
|
1368
|
+
----
|
|
1369
|
+
milestone_number: Milestone number
|
|
1370
|
+
file_url: URL to the file (external or GitHub-hosted)
|
|
1371
|
+
description: Description/title for the file
|
|
1372
|
+
|
|
1373
|
+
Returns:
|
|
1374
|
+
-------
|
|
1375
|
+
Updated Epic object
|
|
1376
|
+
|
|
1377
|
+
Example:
|
|
1378
|
+
-------
|
|
1379
|
+
await adapter.add_attachment_reference_to_milestone(
|
|
1380
|
+
5,
|
|
1381
|
+
"https://example.com/spec.pdf",
|
|
1382
|
+
"Technical Specification"
|
|
1383
|
+
)
|
|
1384
|
+
# Appends to description: "[Technical Specification](https://example.com/spec.pdf)"
|
|
1385
|
+
|
|
1386
|
+
"""
|
|
1387
|
+
# Get current milestone
|
|
1388
|
+
response = await self.client.get(
|
|
1389
|
+
f"/repos/{self.owner}/{self.repo}/milestones/{milestone_number}"
|
|
1390
|
+
)
|
|
1391
|
+
response.raise_for_status()
|
|
1392
|
+
milestone = response.json()
|
|
1393
|
+
|
|
1394
|
+
# Append file reference to description
|
|
1395
|
+
current_desc = milestone.get("description", "")
|
|
1396
|
+
attachment_markdown = f"\n\n📎 [{description}]({file_url})"
|
|
1397
|
+
new_description = current_desc + attachment_markdown
|
|
1398
|
+
|
|
1399
|
+
# Update milestone with new description
|
|
1400
|
+
return await self.update_milestone(
|
|
1401
|
+
milestone_number, {"description": new_description}
|
|
1402
|
+
)
|
|
1403
|
+
|
|
1404
|
+
async def add_attachment(
|
|
1405
|
+
self, ticket_id: str, file_path: str, description: str | None = None
|
|
1406
|
+
) -> dict[str, Any]:
|
|
1407
|
+
"""Add attachment to GitHub ticket (issue or milestone).
|
|
1408
|
+
|
|
1409
|
+
This method routes to appropriate attachment method based on ticket type:
|
|
1410
|
+
- Issues: Creates comment with file reference
|
|
1411
|
+
- Milestones: Not supported, raises NotImplementedError with guidance
|
|
1412
|
+
|
|
1413
|
+
Args:
|
|
1414
|
+
----
|
|
1415
|
+
ticket_id: Ticket identifier (issue number or milestone ID)
|
|
1416
|
+
file_path: Path to file to attach
|
|
1417
|
+
description: Optional description
|
|
1418
|
+
|
|
1419
|
+
Returns:
|
|
1420
|
+
-------
|
|
1421
|
+
Attachment metadata
|
|
1422
|
+
|
|
1423
|
+
Raises:
|
|
1424
|
+
------
|
|
1425
|
+
NotImplementedError: For milestones (no native support)
|
|
1426
|
+
FileNotFoundError: If file doesn't exist
|
|
1427
|
+
|
|
1428
|
+
"""
|
|
1429
|
+
# Determine ticket type from ID format
|
|
1430
|
+
if ticket_id.startswith("milestone-"):
|
|
1431
|
+
raise NotImplementedError(
|
|
1432
|
+
"GitHub milestones do not support direct file attachments. "
|
|
1433
|
+
"Workaround: Upload file externally and use "
|
|
1434
|
+
"add_attachment_reference_to_milestone() to add URL to description."
|
|
1435
|
+
)
|
|
1436
|
+
|
|
1437
|
+
# Assume it's an issue number
|
|
1438
|
+
issue_number = int(ticket_id.replace("issue-", ""))
|
|
1439
|
+
return await self.add_attachment_to_issue(issue_number, file_path, description)
|
|
1440
|
+
|
|
1441
|
+
async def list_cycles(
|
|
1442
|
+
self, project_id: str | None = None, limit: int = 50
|
|
1443
|
+
) -> builtins.list[dict[str, Any]]:
|
|
1444
|
+
"""List GitHub Project iterations (cycles/sprints).
|
|
1445
|
+
|
|
1446
|
+
GitHub Projects V2 uses "iterations" for sprint/cycle functionality.
|
|
1447
|
+
Requires a project node ID (not numeric ID).
|
|
1448
|
+
|
|
1449
|
+
Args:
|
|
1450
|
+
----
|
|
1451
|
+
project_id: GitHub Project V2 node ID (e.g., 'PVT_kwDOABcdefgh').
|
|
1452
|
+
This is required for Projects V2. Can be found in the
|
|
1453
|
+
project's GraphQL ID.
|
|
1454
|
+
limit: Maximum number of iterations to return (default: 50)
|
|
1455
|
+
|
|
1456
|
+
Returns:
|
|
1457
|
+
-------
|
|
1458
|
+
List of iteration dictionaries with fields:
|
|
1459
|
+
- id: Iteration node ID
|
|
1460
|
+
- title: Iteration title/name
|
|
1461
|
+
- startDate: Start date (ISO format)
|
|
1462
|
+
- duration: Duration in days
|
|
1463
|
+
- endDate: Calculated end date (startDate + duration)
|
|
1464
|
+
|
|
1465
|
+
Raises:
|
|
1466
|
+
------
|
|
1467
|
+
ValueError: If project_id not provided or credentials invalid
|
|
1468
|
+
httpx.HTTPError: If GraphQL query fails
|
|
1469
|
+
|
|
1470
|
+
Example:
|
|
1471
|
+
-------
|
|
1472
|
+
>>> iterations = await adapter.list_cycles(
|
|
1473
|
+
... project_id="PVT_kwDOABCD1234",
|
|
1474
|
+
... limit=10
|
|
1475
|
+
... )
|
|
1476
|
+
>>> for iteration in iterations:
|
|
1477
|
+
... print(f"{iteration['title']}: {iteration['startDate']} ({iteration['duration']} days)")
|
|
1478
|
+
|
|
1479
|
+
Note:
|
|
1480
|
+
----
|
|
1481
|
+
GitHub Projects V2 node IDs can be obtained via the GitHub GraphQL API.
|
|
1482
|
+
This is different from project numbers shown in the UI.
|
|
1483
|
+
|
|
1484
|
+
"""
|
|
1485
|
+
# Validate credentials
|
|
1486
|
+
is_valid, error_message = self.validate_credentials()
|
|
1487
|
+
if not is_valid:
|
|
1488
|
+
raise ValueError(error_message)
|
|
1489
|
+
|
|
1490
|
+
if not project_id:
|
|
1491
|
+
raise ValueError(
|
|
1492
|
+
"project_id is required for GitHub Projects V2. "
|
|
1493
|
+
"Provide a project node ID (e.g., 'PVT_kwDOABcdefgh'). "
|
|
1494
|
+
"Find this via GraphQL API: query { organization(login: 'org') { "
|
|
1495
|
+
"projectV2(number: 1) { id } } }"
|
|
1496
|
+
)
|
|
1497
|
+
|
|
1498
|
+
# Execute GraphQL query to fetch iterations
|
|
1499
|
+
query = GET_PROJECT_ITERATIONS
|
|
1500
|
+
variables = {"projectId": project_id, "first": min(limit, 100), "after": None}
|
|
1501
|
+
|
|
1502
|
+
try:
|
|
1503
|
+
result = await self._graphql_request(query, variables)
|
|
1504
|
+
|
|
1505
|
+
# Extract iterations from response
|
|
1506
|
+
project_node = result.get("node")
|
|
1507
|
+
if not project_node:
|
|
1508
|
+
raise ValueError(
|
|
1509
|
+
f"Project not found with ID: {project_id}. "
|
|
1510
|
+
"Verify the project ID is correct and you have access."
|
|
1511
|
+
)
|
|
1512
|
+
|
|
1513
|
+
iterations_data = project_node.get("iterations", {})
|
|
1514
|
+
iteration_nodes = iterations_data.get("nodes", [])
|
|
1515
|
+
|
|
1516
|
+
# Transform to standard format and calculate end dates
|
|
1517
|
+
iterations = []
|
|
1518
|
+
for iteration in iteration_nodes:
|
|
1519
|
+
# Calculate end date from start date + duration
|
|
1520
|
+
start_date = iteration.get("startDate")
|
|
1521
|
+
duration = iteration.get("duration", 0)
|
|
1522
|
+
|
|
1523
|
+
end_date = None
|
|
1524
|
+
if start_date and duration:
|
|
1525
|
+
from datetime import datetime, timedelta
|
|
1526
|
+
|
|
1527
|
+
start_dt = datetime.fromisoformat(start_date.replace("Z", "+00:00"))
|
|
1528
|
+
end_dt = start_dt + timedelta(days=duration)
|
|
1529
|
+
end_date = end_dt.isoformat()
|
|
1530
|
+
|
|
1531
|
+
iterations.append(
|
|
1532
|
+
{
|
|
1533
|
+
"id": iteration["id"],
|
|
1534
|
+
"title": iteration.get("title", ""),
|
|
1535
|
+
"startDate": start_date,
|
|
1536
|
+
"duration": duration,
|
|
1537
|
+
"endDate": end_date,
|
|
1538
|
+
}
|
|
1539
|
+
)
|
|
1540
|
+
|
|
1541
|
+
return iterations
|
|
1542
|
+
|
|
1543
|
+
except ValueError:
|
|
1544
|
+
# Re-raise validation errors
|
|
1545
|
+
raise
|
|
1546
|
+
except Exception as e:
|
|
1547
|
+
raise ValueError(f"Failed to list project iterations: {e}") from e
|
|
1548
|
+
|
|
1549
|
+
async def get_issue_status(self, issue_number: int) -> dict[str, Any]:
|
|
1550
|
+
"""Get rich status information for a GitHub issue.
|
|
1551
|
+
|
|
1552
|
+
GitHub issues have binary states (open/closed) natively. Extended status
|
|
1553
|
+
tracking uses labels following the status:* convention (e.g., status:in_progress).
|
|
1554
|
+
|
|
1555
|
+
Args:
|
|
1556
|
+
----
|
|
1557
|
+
issue_number: GitHub issue number
|
|
1558
|
+
|
|
1559
|
+
Returns:
|
|
1560
|
+
-------
|
|
1561
|
+
Dictionary with comprehensive status information:
|
|
1562
|
+
- state: Native GitHub state ('open' or 'closed')
|
|
1563
|
+
- status_label: Extended status from labels (in_progress, blocked, etc.)
|
|
1564
|
+
- extended_state: Universal TicketState value
|
|
1565
|
+
- labels: All issue labels
|
|
1566
|
+
- state_reason: For closed issues (completed or not_planned)
|
|
1567
|
+
- metadata: Additional issue metadata (assignees, milestone, etc.)
|
|
1568
|
+
|
|
1569
|
+
Raises:
|
|
1570
|
+
------
|
|
1571
|
+
ValueError: If credentials invalid or issue not found
|
|
1572
|
+
httpx.HTTPError: If API request fails
|
|
1573
|
+
|
|
1574
|
+
Example:
|
|
1575
|
+
-------
|
|
1576
|
+
>>> status = await adapter.get_issue_status(123)
|
|
1577
|
+
>>> print(f"Issue #{status['number']}: {status['extended_state']}")
|
|
1578
|
+
>>> print(f"Native state: {status['state']}")
|
|
1579
|
+
>>> if status['status_label']:
|
|
1580
|
+
... print(f"Label-based status: {status['status_label']}")
|
|
1581
|
+
|
|
1582
|
+
Note:
|
|
1583
|
+
----
|
|
1584
|
+
GitHub's binary state model is extended via labels:
|
|
1585
|
+
- open + no label = OPEN
|
|
1586
|
+
- open + status:in-progress = IN_PROGRESS
|
|
1587
|
+
- open + status:blocked = BLOCKED
|
|
1588
|
+
- closed = CLOSED (check state_reason for details)
|
|
1589
|
+
|
|
1590
|
+
"""
|
|
1591
|
+
# Validate credentials
|
|
1592
|
+
is_valid, error_message = self.validate_credentials()
|
|
1593
|
+
if not is_valid:
|
|
1594
|
+
raise ValueError(error_message)
|
|
1595
|
+
|
|
1596
|
+
try:
|
|
1597
|
+
# Fetch issue via REST API
|
|
1598
|
+
response = await self.client.get(
|
|
1599
|
+
f"/repos/{self.owner}/{self.repo}/issues/{issue_number}"
|
|
1600
|
+
)
|
|
1601
|
+
|
|
1602
|
+
if response.status_code == 404:
|
|
1603
|
+
raise ValueError(f"Issue #{issue_number} not found")
|
|
1604
|
+
|
|
1605
|
+
response.raise_for_status()
|
|
1606
|
+
issue = response.json()
|
|
1607
|
+
|
|
1608
|
+
# Extract labels
|
|
1609
|
+
labels = [label["name"] for label in issue.get("labels", [])]
|
|
1610
|
+
|
|
1611
|
+
# Derive extended state from issue data
|
|
1612
|
+
extended_state = self._extract_state_from_issue(issue)
|
|
1613
|
+
|
|
1614
|
+
# Find status label if present
|
|
1615
|
+
status_label = None
|
|
1616
|
+
for _state, label_name in GitHubStateMapping.STATE_LABELS.items():
|
|
1617
|
+
if label_name.lower() in [label.lower() for label in labels]:
|
|
1618
|
+
status_label = label_name
|
|
1619
|
+
break
|
|
1620
|
+
|
|
1621
|
+
# Build comprehensive status response
|
|
1622
|
+
status_info = {
|
|
1623
|
+
"number": issue["number"],
|
|
1624
|
+
"state": issue["state"], # 'open' or 'closed'
|
|
1625
|
+
"status_label": status_label, # Label-based extended status
|
|
1626
|
+
"extended_state": extended_state.value, # Universal TicketState
|
|
1627
|
+
"labels": labels,
|
|
1628
|
+
"state_reason": issue.get(
|
|
1629
|
+
"state_reason"
|
|
1630
|
+
), # 'completed' or 'not_planned'
|
|
1631
|
+
"metadata": {
|
|
1632
|
+
"title": issue["title"],
|
|
1633
|
+
"url": issue["html_url"],
|
|
1634
|
+
"assignees": [
|
|
1635
|
+
assignee["login"] for assignee in issue.get("assignees", [])
|
|
1636
|
+
],
|
|
1637
|
+
"milestone": (
|
|
1638
|
+
issue.get("milestone", {}).get("title")
|
|
1639
|
+
if issue.get("milestone")
|
|
1640
|
+
else None
|
|
1641
|
+
),
|
|
1642
|
+
"created_at": issue["created_at"],
|
|
1643
|
+
"updated_at": issue["updated_at"],
|
|
1644
|
+
"closed_at": issue.get("closed_at"),
|
|
1645
|
+
},
|
|
1646
|
+
}
|
|
1647
|
+
|
|
1648
|
+
return status_info
|
|
1649
|
+
|
|
1650
|
+
except ValueError:
|
|
1651
|
+
# Re-raise validation errors
|
|
1652
|
+
raise
|
|
1653
|
+
except httpx.HTTPError as e:
|
|
1654
|
+
raise ValueError(f"Failed to get issue status: {e}") from e
|
|
1655
|
+
|
|
1656
|
+
async def list_issue_statuses(self) -> builtins.list[dict[str, Any]]:
|
|
1657
|
+
"""List available issue statuses in GitHub.
|
|
1658
|
+
|
|
1659
|
+
Returns all possible issue statuses including native GitHub states
|
|
1660
|
+
and extended label-based states.
|
|
1661
|
+
|
|
1662
|
+
Returns:
|
|
1663
|
+
-------
|
|
1664
|
+
List of status dictionaries with fields:
|
|
1665
|
+
- name: Status name (e.g., 'open', 'in_progress', 'closed')
|
|
1666
|
+
- type: Status type ('native' or 'extended')
|
|
1667
|
+
- label: Associated label name (for extended statuses)
|
|
1668
|
+
- description: Human-readable description
|
|
1669
|
+
- category: Status category (open, in_progress, done, etc.)
|
|
1670
|
+
|
|
1671
|
+
Example:
|
|
1672
|
+
-------
|
|
1673
|
+
>>> statuses = await adapter.list_issue_statuses()
|
|
1674
|
+
>>> for status in statuses:
|
|
1675
|
+
... print(f"{status['name']}: {status['description']}")
|
|
1676
|
+
... if status['type'] == 'extended':
|
|
1677
|
+
... print(f" Label: {status['label']}")
|
|
1678
|
+
|
|
1679
|
+
Note:
|
|
1680
|
+
----
|
|
1681
|
+
GitHub natively supports only 'open' and 'closed' states.
|
|
1682
|
+
Extended statuses are implemented via labels following the
|
|
1683
|
+
status:* naming convention (e.g., status:in-progress).
|
|
1684
|
+
|
|
1685
|
+
"""
|
|
1686
|
+
# Define native GitHub states
|
|
1687
|
+
statuses = [
|
|
1688
|
+
{
|
|
1689
|
+
"name": "open",
|
|
1690
|
+
"type": "native",
|
|
1691
|
+
"label": None,
|
|
1692
|
+
"description": "Issue is open and not yet completed",
|
|
1693
|
+
"category": "open",
|
|
1694
|
+
},
|
|
1695
|
+
{
|
|
1696
|
+
"name": "closed",
|
|
1697
|
+
"type": "native",
|
|
1698
|
+
"label": None,
|
|
1699
|
+
"description": "Issue is closed (completed or not planned)",
|
|
1700
|
+
"category": "done",
|
|
1701
|
+
},
|
|
1702
|
+
]
|
|
1703
|
+
|
|
1704
|
+
# Add extended label-based states
|
|
1705
|
+
for state, label_name in GitHubStateMapping.STATE_LABELS.items():
|
|
1706
|
+
statuses.append(
|
|
1707
|
+
{
|
|
1708
|
+
"name": state.value,
|
|
1709
|
+
"type": "extended",
|
|
1710
|
+
"label": label_name,
|
|
1711
|
+
"description": f"Issue is {state.value.replace('_', ' ')} (tracked via label)",
|
|
1712
|
+
"category": state.value,
|
|
1713
|
+
}
|
|
1714
|
+
)
|
|
1715
|
+
|
|
1716
|
+
return statuses
|
|
1717
|
+
|
|
1718
|
+
async def list_project_labels(
|
|
1719
|
+
self, milestone_number: int | None = None
|
|
1720
|
+
) -> builtins.list[dict[str, Any]]:
|
|
1721
|
+
"""List labels used in a GitHub milestone (project/epic).
|
|
1722
|
+
|
|
1723
|
+
If milestone_number is provided, returns only labels used by issues
|
|
1724
|
+
in that milestone. Otherwise, returns all repository labels.
|
|
1725
|
+
|
|
1726
|
+
Args:
|
|
1727
|
+
----
|
|
1728
|
+
milestone_number: Optional milestone number to filter labels.
|
|
1729
|
+
If None, returns all repository labels.
|
|
1730
|
+
|
|
1731
|
+
Returns:
|
|
1732
|
+
-------
|
|
1733
|
+
List of label dictionaries with fields:
|
|
1734
|
+
- id: Label identifier (name)
|
|
1735
|
+
- name: Label name
|
|
1736
|
+
- color: Label color (hex without #)
|
|
1737
|
+
- description: Label description (if available)
|
|
1738
|
+
- usage_count: Number of issues using this label (if milestone filtered)
|
|
1739
|
+
|
|
1740
|
+
Example:
|
|
1741
|
+
-------
|
|
1742
|
+
>>> # Get all repository labels
|
|
1743
|
+
>>> all_labels = await adapter.list_project_labels()
|
|
1744
|
+
>>> print(f"Repository has {len(all_labels)} labels")
|
|
1745
|
+
>>>
|
|
1746
|
+
>>> # Get labels used in milestone 5
|
|
1747
|
+
>>> milestone_labels = await adapter.list_project_labels(milestone_number=5)
|
|
1748
|
+
>>> for label in milestone_labels:
|
|
1749
|
+
... print(f"{label['name']}: used by {label['usage_count']} issues")
|
|
1750
|
+
|
|
1751
|
+
Note:
|
|
1752
|
+
----
|
|
1753
|
+
Labels are repository-scoped in GitHub, not milestone-scoped.
|
|
1754
|
+
When filtering by milestone, this method queries issues in that
|
|
1755
|
+
milestone and extracts their unique labels.
|
|
1756
|
+
|
|
1757
|
+
"""
|
|
1758
|
+
# Validate credentials
|
|
1759
|
+
is_valid, error_message = self.validate_credentials()
|
|
1760
|
+
if not is_valid:
|
|
1761
|
+
raise ValueError(error_message)
|
|
1762
|
+
|
|
1763
|
+
try:
|
|
1764
|
+
if milestone_number is None:
|
|
1765
|
+
# Return all repository labels (delegate to existing method)
|
|
1766
|
+
return await self.list_labels()
|
|
1767
|
+
|
|
1768
|
+
# Query issues in the milestone
|
|
1769
|
+
params = {
|
|
1770
|
+
"milestone": str(milestone_number),
|
|
1771
|
+
"state": "all",
|
|
1772
|
+
"per_page": 100,
|
|
1773
|
+
}
|
|
1774
|
+
|
|
1775
|
+
response = await self.client.get(
|
|
1776
|
+
f"/repos/{self.owner}/{self.repo}/issues", params=params
|
|
1777
|
+
)
|
|
1778
|
+
response.raise_for_status()
|
|
1779
|
+
issues = response.json()
|
|
1780
|
+
|
|
1781
|
+
# Extract unique labels from issues
|
|
1782
|
+
label_usage = {} # {label_name: {data, count}}
|
|
1783
|
+
for issue in issues:
|
|
1784
|
+
# Skip pull requests
|
|
1785
|
+
if "pull_request" in issue:
|
|
1786
|
+
continue
|
|
1787
|
+
|
|
1788
|
+
for label in issue.get("labels", []):
|
|
1789
|
+
label_name = label["name"]
|
|
1790
|
+
if label_name not in label_usage:
|
|
1791
|
+
label_usage[label_name] = {
|
|
1792
|
+
"id": label_name,
|
|
1793
|
+
"name": label_name,
|
|
1794
|
+
"color": label["color"],
|
|
1795
|
+
"description": label.get("description", ""),
|
|
1796
|
+
"usage_count": 0,
|
|
1797
|
+
}
|
|
1798
|
+
label_usage[label_name]["usage_count"] += 1
|
|
1799
|
+
|
|
1800
|
+
# Convert to list and sort by usage count
|
|
1801
|
+
labels = list(label_usage.values())
|
|
1802
|
+
labels.sort(key=lambda x: x["usage_count"], reverse=True)
|
|
1803
|
+
|
|
1804
|
+
return labels
|
|
1805
|
+
|
|
1806
|
+
except httpx.HTTPError as e:
|
|
1807
|
+
raise ValueError(f"Failed to list project labels: {e}") from e
|
|
1808
|
+
|
|
1809
|
+
# ========================================================================
|
|
1810
|
+
# New Milestone Methods (Phase 2 - GitHub Native Support)
|
|
1811
|
+
# ========================================================================
|
|
1812
|
+
|
|
1813
|
+
async def milestone_create(
|
|
1814
|
+
self,
|
|
1815
|
+
name: str,
|
|
1816
|
+
target_date: date | None = None,
|
|
1817
|
+
labels: list[str] | None = None,
|
|
1818
|
+
description: str = "",
|
|
1819
|
+
project_id: str | None = None,
|
|
1820
|
+
) -> Milestone:
|
|
1821
|
+
"""Create milestone using GitHub Milestones API.
|
|
1822
|
+
|
|
1823
|
+
GitHub milestones are repository-scoped and natively supported.
|
|
1824
|
+
|
|
1825
|
+
Args:
|
|
1826
|
+
----
|
|
1827
|
+
name: Milestone name/title
|
|
1828
|
+
target_date: Target completion date (optional)
|
|
1829
|
+
labels: Labels for local storage (GitHub doesn't store labels on milestones)
|
|
1830
|
+
description: Milestone description
|
|
1831
|
+
project_id: Project ID (ignored for GitHub, repo-scoped)
|
|
1832
|
+
|
|
1833
|
+
Returns:
|
|
1834
|
+
-------
|
|
1835
|
+
Created Milestone object
|
|
1836
|
+
|
|
1837
|
+
Raises:
|
|
1838
|
+
------
|
|
1839
|
+
ValueError: If repository is not configured
|
|
1840
|
+
httpx.HTTPError: If API request fails
|
|
1841
|
+
|
|
1842
|
+
"""
|
|
1843
|
+
from datetime import datetime as dt
|
|
1844
|
+
|
|
1845
|
+
if not self.repo:
|
|
1846
|
+
raise ValueError("Repository required for GitHub milestone operations")
|
|
1847
|
+
|
|
1848
|
+
# GitHub API expects ISO 8601 datetime for due_on
|
|
1849
|
+
due_on = None
|
|
1850
|
+
if target_date:
|
|
1851
|
+
due_on = dt.combine(target_date, dt.min.time()).isoformat() + "Z"
|
|
1852
|
+
|
|
1853
|
+
milestone_data = {
|
|
1854
|
+
"title": name,
|
|
1855
|
+
"description": description,
|
|
1856
|
+
"state": "open",
|
|
1857
|
+
}
|
|
1858
|
+
|
|
1859
|
+
if due_on:
|
|
1860
|
+
milestone_data["due_on"] = due_on
|
|
1861
|
+
|
|
1862
|
+
# Create milestone via GitHub API
|
|
1863
|
+
response = await self.client.post(
|
|
1864
|
+
f"/repos/{self.owner}/{self.repo}/milestones",
|
|
1865
|
+
json=milestone_data,
|
|
1866
|
+
)
|
|
1867
|
+
response.raise_for_status()
|
|
1868
|
+
|
|
1869
|
+
gh_milestone = response.json()
|
|
1870
|
+
|
|
1871
|
+
# Convert to Milestone model
|
|
1872
|
+
milestone = self._github_milestone_to_milestone(gh_milestone, labels)
|
|
1873
|
+
|
|
1874
|
+
# Save to local storage for label tracking
|
|
1875
|
+
from pathlib import Path
|
|
1876
|
+
|
|
1877
|
+
from ...core.milestone_manager import MilestoneManager
|
|
1878
|
+
|
|
1879
|
+
config_dir = Path.home() / ".mcp-ticketer"
|
|
1880
|
+
manager = MilestoneManager(config_dir)
|
|
1881
|
+
manager.save_milestone(milestone)
|
|
1882
|
+
|
|
1883
|
+
logger.info(f"Created GitHub milestone: {milestone.id} ({milestone.name})")
|
|
1884
|
+
return milestone
|
|
1885
|
+
|
|
1886
|
+
async def milestone_get(self, milestone_id: str) -> Milestone | None:
|
|
1887
|
+
"""Get milestone by ID (milestone number in GitHub).
|
|
1888
|
+
|
|
1889
|
+
Args:
|
|
1890
|
+
----
|
|
1891
|
+
milestone_id: Milestone number as string
|
|
1892
|
+
|
|
1893
|
+
Returns:
|
|
1894
|
+
-------
|
|
1895
|
+
Milestone object or None if not found
|
|
1896
|
+
|
|
1897
|
+
Raises:
|
|
1898
|
+
------
|
|
1899
|
+
ValueError: If repository is not configured
|
|
1900
|
+
|
|
1901
|
+
"""
|
|
1902
|
+
from pathlib import Path
|
|
1903
|
+
|
|
1904
|
+
from ...core.milestone_manager import MilestoneManager
|
|
1905
|
+
|
|
1906
|
+
if not self.repo:
|
|
1907
|
+
raise ValueError("Repository required for GitHub milestone operations")
|
|
1908
|
+
|
|
1909
|
+
try:
|
|
1910
|
+
# milestone_id is the milestone number in GitHub
|
|
1911
|
+
response = await self.client.get(
|
|
1912
|
+
f"/repos/{self.owner}/{self.repo}/milestones/{milestone_id}"
|
|
1913
|
+
)
|
|
1914
|
+
|
|
1915
|
+
if response.status_code == 404:
|
|
1916
|
+
return None
|
|
1917
|
+
|
|
1918
|
+
response.raise_for_status()
|
|
1919
|
+
gh_milestone = response.json()
|
|
1920
|
+
|
|
1921
|
+
# Load labels from local storage
|
|
1922
|
+
config_dir = Path.home() / ".mcp-ticketer"
|
|
1923
|
+
manager = MilestoneManager(config_dir)
|
|
1924
|
+
local_milestone = manager.get_milestone(milestone_id)
|
|
1925
|
+
labels = local_milestone.labels if local_milestone else []
|
|
1926
|
+
|
|
1927
|
+
return self._github_milestone_to_milestone(gh_milestone, labels)
|
|
1928
|
+
|
|
1929
|
+
except httpx.HTTPError as e:
|
|
1930
|
+
logger.error(f"Failed to get milestone {milestone_id}: {e}")
|
|
1931
|
+
return None
|
|
1932
|
+
|
|
1933
|
+
async def milestone_list(
|
|
1934
|
+
self,
|
|
1935
|
+
project_id: str | None = None,
|
|
1936
|
+
state: str | None = None,
|
|
1937
|
+
) -> list[Milestone]:
|
|
1938
|
+
"""List milestones from GitHub repository.
|
|
1939
|
+
|
|
1940
|
+
Note: project_id is ignored for GitHub (repo-scoped).
|
|
1941
|
+
|
|
1942
|
+
Args:
|
|
1943
|
+
----
|
|
1944
|
+
project_id: Project ID (ignored, GitHub is repo-scoped)
|
|
1945
|
+
state: Filter by state (open, active, closed, completed)
|
|
1946
|
+
|
|
1947
|
+
Returns:
|
|
1948
|
+
-------
|
|
1949
|
+
List of Milestone objects
|
|
1950
|
+
|
|
1951
|
+
Raises:
|
|
1952
|
+
------
|
|
1953
|
+
ValueError: If repository is not configured
|
|
1954
|
+
|
|
1955
|
+
"""
|
|
1956
|
+
from pathlib import Path
|
|
1957
|
+
|
|
1958
|
+
from ...core.milestone_manager import MilestoneManager
|
|
1959
|
+
|
|
1960
|
+
if not self.repo:
|
|
1961
|
+
raise ValueError("Repository required for GitHub milestone operations")
|
|
1962
|
+
|
|
1963
|
+
# Map our states to GitHub states
|
|
1964
|
+
github_state = "all"
|
|
1965
|
+
if state in ["open", "active"]:
|
|
1966
|
+
github_state = "open"
|
|
1967
|
+
elif state in ["completed", "closed"]:
|
|
1968
|
+
github_state = "closed"
|
|
1969
|
+
|
|
1970
|
+
params = {
|
|
1971
|
+
"state": github_state,
|
|
1972
|
+
"sort": "due_on",
|
|
1973
|
+
"direction": "asc",
|
|
1974
|
+
"per_page": 100,
|
|
1975
|
+
}
|
|
1976
|
+
|
|
1977
|
+
response = await self.client.get(
|
|
1978
|
+
f"/repos/{self.owner}/{self.repo}/milestones",
|
|
1979
|
+
params=params,
|
|
1980
|
+
)
|
|
1981
|
+
response.raise_for_status()
|
|
1982
|
+
|
|
1983
|
+
# Load labels from local storage
|
|
1984
|
+
config_dir = Path.home() / ".mcp-ticketer"
|
|
1985
|
+
manager = MilestoneManager(config_dir)
|
|
1986
|
+
|
|
1987
|
+
milestones = []
|
|
1988
|
+
for gh_milestone in response.json():
|
|
1989
|
+
milestone_id = str(gh_milestone["number"])
|
|
1990
|
+
local_milestone = manager.get_milestone(milestone_id)
|
|
1991
|
+
labels = local_milestone.labels if local_milestone else []
|
|
1992
|
+
|
|
1993
|
+
milestone = self._github_milestone_to_milestone(gh_milestone, labels)
|
|
1994
|
+
milestones.append(milestone)
|
|
1995
|
+
|
|
1996
|
+
logger.info(
|
|
1997
|
+
f"Listed {len(milestones)} GitHub milestones (state={github_state})"
|
|
1998
|
+
)
|
|
1999
|
+
return milestones
|
|
2000
|
+
|
|
2001
|
+
async def milestone_update(
|
|
2002
|
+
self,
|
|
2003
|
+
milestone_id: str,
|
|
2004
|
+
name: str | None = None,
|
|
2005
|
+
target_date: date | None = None,
|
|
2006
|
+
state: str | None = None,
|
|
2007
|
+
labels: list[str] | None = None,
|
|
2008
|
+
description: str | None = None,
|
|
2009
|
+
) -> Milestone | None:
|
|
2010
|
+
"""Update milestone properties.
|
|
2011
|
+
|
|
2012
|
+
Args:
|
|
2013
|
+
----
|
|
2014
|
+
milestone_id: Milestone number as string
|
|
2015
|
+
name: New milestone name
|
|
2016
|
+
target_date: New target date
|
|
2017
|
+
state: New state (open, closed)
|
|
2018
|
+
labels: New labels (stored locally)
|
|
2019
|
+
description: New description
|
|
2020
|
+
|
|
2021
|
+
Returns:
|
|
2022
|
+
-------
|
|
2023
|
+
Updated Milestone object or None if not found
|
|
2024
|
+
|
|
2025
|
+
Raises:
|
|
2026
|
+
------
|
|
2027
|
+
ValueError: If repository is not configured
|
|
2028
|
+
|
|
2029
|
+
"""
|
|
2030
|
+
from datetime import datetime as dt
|
|
2031
|
+
from pathlib import Path
|
|
2032
|
+
|
|
2033
|
+
from ...core.milestone_manager import MilestoneManager
|
|
2034
|
+
|
|
2035
|
+
if not self.repo:
|
|
2036
|
+
raise ValueError("Repository required for GitHub milestone operations")
|
|
2037
|
+
|
|
2038
|
+
update_data = {}
|
|
2039
|
+
|
|
2040
|
+
if name:
|
|
2041
|
+
update_data["title"] = name
|
|
2042
|
+
if description is not None:
|
|
2043
|
+
update_data["description"] = description
|
|
2044
|
+
if target_date:
|
|
2045
|
+
due_on = dt.combine(target_date, dt.min.time()).isoformat() + "Z"
|
|
2046
|
+
update_data["due_on"] = due_on
|
|
2047
|
+
if state:
|
|
2048
|
+
# Map our states to GitHub states
|
|
2049
|
+
if state in ["completed", "closed"]:
|
|
2050
|
+
update_data["state"] = "closed"
|
|
2051
|
+
elif state in ["open", "active"]:
|
|
2052
|
+
update_data["state"] = "open"
|
|
2053
|
+
|
|
2054
|
+
if not update_data and labels is None:
|
|
2055
|
+
raise ValueError("At least one field must be updated")
|
|
2056
|
+
|
|
2057
|
+
# Update milestone via GitHub API
|
|
2058
|
+
if update_data:
|
|
2059
|
+
response = await self.client.patch(
|
|
2060
|
+
f"/repos/{self.owner}/{self.repo}/milestones/{milestone_id}",
|
|
2061
|
+
json=update_data,
|
|
2062
|
+
)
|
|
2063
|
+
response.raise_for_status()
|
|
2064
|
+
gh_milestone = response.json()
|
|
2065
|
+
else:
|
|
2066
|
+
# Only labels updated, fetch current milestone
|
|
2067
|
+
response = await self.client.get(
|
|
2068
|
+
f"/repos/{self.owner}/{self.repo}/milestones/{milestone_id}"
|
|
2069
|
+
)
|
|
2070
|
+
response.raise_for_status()
|
|
2071
|
+
gh_milestone = response.json()
|
|
2072
|
+
|
|
2073
|
+
# Update labels in local storage
|
|
2074
|
+
config_dir = Path.home() / ".mcp-ticketer"
|
|
2075
|
+
manager = MilestoneManager(config_dir)
|
|
2076
|
+
|
|
2077
|
+
if labels is not None:
|
|
2078
|
+
milestone = self._github_milestone_to_milestone(gh_milestone, labels)
|
|
2079
|
+
manager.save_milestone(milestone)
|
|
2080
|
+
logger.info(f"Updated GitHub milestone: {milestone_id} (including labels)")
|
|
2081
|
+
return milestone
|
|
2082
|
+
|
|
2083
|
+
# Load existing labels
|
|
2084
|
+
local_milestone = manager.get_milestone(milestone_id)
|
|
2085
|
+
existing_labels = local_milestone.labels if local_milestone else []
|
|
2086
|
+
|
|
2087
|
+
milestone = self._github_milestone_to_milestone(gh_milestone, existing_labels)
|
|
2088
|
+
logger.info(f"Updated GitHub milestone: {milestone_id}")
|
|
2089
|
+
return milestone
|
|
2090
|
+
|
|
2091
|
+
async def milestone_delete(self, milestone_id: str) -> bool:
|
|
2092
|
+
"""Delete milestone from GitHub repository.
|
|
2093
|
+
|
|
2094
|
+
Args:
|
|
2095
|
+
----
|
|
2096
|
+
milestone_id: Milestone number as string
|
|
2097
|
+
|
|
2098
|
+
Returns:
|
|
2099
|
+
-------
|
|
2100
|
+
True if deleted, False if not found
|
|
2101
|
+
|
|
2102
|
+
Raises:
|
|
2103
|
+
------
|
|
2104
|
+
ValueError: If repository is not configured
|
|
2105
|
+
|
|
2106
|
+
"""
|
|
2107
|
+
from pathlib import Path
|
|
2108
|
+
|
|
2109
|
+
from ...core.milestone_manager import MilestoneManager
|
|
2110
|
+
|
|
2111
|
+
if not self.repo:
|
|
2112
|
+
raise ValueError("Repository required for GitHub milestone operations")
|
|
2113
|
+
|
|
2114
|
+
try:
|
|
2115
|
+
response = await self.client.delete(
|
|
2116
|
+
f"/repos/{self.owner}/{self.repo}/milestones/{milestone_id}"
|
|
2117
|
+
)
|
|
2118
|
+
|
|
2119
|
+
# GitHub returns 204 No Content on successful deletion
|
|
2120
|
+
if response.status_code == 204:
|
|
2121
|
+
# Remove from local storage
|
|
2122
|
+
config_dir = Path.home() / ".mcp-ticketer"
|
|
2123
|
+
manager = MilestoneManager(config_dir)
|
|
2124
|
+
manager.delete_milestone(milestone_id)
|
|
2125
|
+
|
|
2126
|
+
logger.info(f"Deleted GitHub milestone: {milestone_id}")
|
|
2127
|
+
return True
|
|
2128
|
+
|
|
2129
|
+
# Handle 404 errors gracefully
|
|
2130
|
+
if response.status_code == 404:
|
|
2131
|
+
logger.warning(f"Milestone {milestone_id} not found for deletion")
|
|
2132
|
+
return False
|
|
2133
|
+
|
|
2134
|
+
response.raise_for_status()
|
|
2135
|
+
return True
|
|
2136
|
+
|
|
2137
|
+
except httpx.HTTPError as e:
|
|
2138
|
+
logger.error(f"Failed to delete milestone {milestone_id}: {e}")
|
|
2139
|
+
return False
|
|
2140
|
+
|
|
2141
|
+
async def milestone_get_issues(
|
|
2142
|
+
self,
|
|
2143
|
+
milestone_id: str,
|
|
2144
|
+
state: str | None = None,
|
|
2145
|
+
) -> list[dict[str, Any]]:
|
|
2146
|
+
"""Get issues in milestone.
|
|
2147
|
+
|
|
2148
|
+
Args:
|
|
2149
|
+
----
|
|
2150
|
+
milestone_id: Milestone number as string
|
|
2151
|
+
state: Filter by state (open, closed, all)
|
|
2152
|
+
|
|
2153
|
+
Returns:
|
|
2154
|
+
-------
|
|
2155
|
+
List of issue dictionaries
|
|
2156
|
+
|
|
2157
|
+
Raises:
|
|
2158
|
+
------
|
|
2159
|
+
ValueError: If repository is not configured
|
|
2160
|
+
|
|
2161
|
+
"""
|
|
2162
|
+
if not self.repo:
|
|
2163
|
+
raise ValueError("Repository required for GitHub milestone operations")
|
|
2164
|
+
|
|
2165
|
+
params = {
|
|
2166
|
+
"milestone": milestone_id,
|
|
2167
|
+
"state": state or "all",
|
|
2168
|
+
"per_page": 100,
|
|
2169
|
+
}
|
|
2170
|
+
|
|
2171
|
+
response = await self.client.get(
|
|
2172
|
+
f"/repos/{self.owner}/{self.repo}/issues",
|
|
2173
|
+
params=params,
|
|
2174
|
+
)
|
|
2175
|
+
response.raise_for_status()
|
|
2176
|
+
|
|
2177
|
+
# Convert GitHub issues to our format
|
|
2178
|
+
issues = []
|
|
2179
|
+
for gh_issue in response.json():
|
|
2180
|
+
# Skip pull requests (GitHub includes them in issues endpoint)
|
|
2181
|
+
if "pull_request" in gh_issue:
|
|
2182
|
+
continue
|
|
2183
|
+
|
|
2184
|
+
issues.append(
|
|
2185
|
+
{
|
|
2186
|
+
"id": str(gh_issue["number"]),
|
|
2187
|
+
"identifier": f"#{gh_issue['number']}",
|
|
2188
|
+
"title": gh_issue["title"],
|
|
2189
|
+
"state": gh_issue["state"],
|
|
2190
|
+
"labels": [label["name"] for label in gh_issue.get("labels", [])],
|
|
2191
|
+
"created_at": gh_issue["created_at"],
|
|
2192
|
+
"updated_at": gh_issue["updated_at"],
|
|
2193
|
+
}
|
|
2194
|
+
)
|
|
2195
|
+
|
|
2196
|
+
logger.info(f"Retrieved {len(issues)} issues from milestone {milestone_id}")
|
|
2197
|
+
return issues
|
|
2198
|
+
|
|
2199
|
+
def _github_milestone_to_milestone(
|
|
2200
|
+
self,
|
|
2201
|
+
gh_milestone: dict[str, Any],
|
|
2202
|
+
labels: list[str] | None = None,
|
|
2203
|
+
) -> Milestone:
|
|
2204
|
+
"""Convert GitHub Milestone to universal Milestone model (delegated to mappers module)."""
|
|
2205
|
+
return map_github_milestone_to_milestone(gh_milestone, self.repo, labels)
|
|
2206
|
+
|
|
2207
|
+
# =============================================================================
|
|
2208
|
+
# GitHub Projects V2 Operations (Week 2: Core CRUD)
|
|
2209
|
+
# =============================================================================
|
|
2210
|
+
|
|
2211
|
+
async def project_list(
|
|
2212
|
+
self,
|
|
2213
|
+
owner: str | None = None,
|
|
2214
|
+
scope: ProjectScope = ProjectScope.ORGANIZATION,
|
|
2215
|
+
state: ProjectState | None = None,
|
|
2216
|
+
limit: int = 10,
|
|
2217
|
+
cursor: str | None = None,
|
|
2218
|
+
) -> list[Project]:
|
|
2219
|
+
"""List projects for an organization or user.
|
|
2220
|
+
|
|
2221
|
+
Args:
|
|
2222
|
+
----
|
|
2223
|
+
owner: Organization or user login (defaults to configured owner)
|
|
2224
|
+
scope: Project scope (ORGANIZATION or USER)
|
|
2225
|
+
state: Filter by project state (ACTIVE, COMPLETED, ARCHIVED)
|
|
2226
|
+
limit: Maximum number of projects to return (default: 10)
|
|
2227
|
+
cursor: Pagination cursor for next page
|
|
2228
|
+
|
|
2229
|
+
Returns:
|
|
2230
|
+
-------
|
|
2231
|
+
List of Project objects
|
|
2232
|
+
|
|
2233
|
+
Raises:
|
|
2234
|
+
------
|
|
2235
|
+
ValueError: If owner not provided and not configured
|
|
2236
|
+
RuntimeError: If GraphQL query fails
|
|
2237
|
+
|
|
2238
|
+
Example:
|
|
2239
|
+
-------
|
|
2240
|
+
projects = await adapter.project_list(owner="myorg", limit=20)
|
|
2241
|
+
|
|
2242
|
+
"""
|
|
2243
|
+
# Validate owner (use self.owner if not provided)
|
|
2244
|
+
owner = owner or self.owner
|
|
2245
|
+
if not owner:
|
|
2246
|
+
raise ValueError("Owner required for GitHub project operations")
|
|
2247
|
+
|
|
2248
|
+
# Build GraphQL variables
|
|
2249
|
+
variables = {
|
|
2250
|
+
"owner": owner,
|
|
2251
|
+
"first": limit,
|
|
2252
|
+
"after": cursor,
|
|
2253
|
+
}
|
|
2254
|
+
|
|
2255
|
+
try:
|
|
2256
|
+
# Execute LIST_PROJECTS_QUERY
|
|
2257
|
+
data = await self.gh_client.execute_graphql(
|
|
2258
|
+
query=LIST_PROJECTS_QUERY,
|
|
2259
|
+
variables=variables,
|
|
2260
|
+
)
|
|
2261
|
+
|
|
2262
|
+
# Parse response and extract projects array
|
|
2263
|
+
org_data = data.get("organization")
|
|
2264
|
+
if not org_data:
|
|
2265
|
+
logger.warning(f"Organization {owner} not found")
|
|
2266
|
+
return []
|
|
2267
|
+
|
|
2268
|
+
projects_data = org_data.get("projectsV2", {})
|
|
2269
|
+
project_nodes = projects_data.get("nodes", [])
|
|
2270
|
+
|
|
2271
|
+
# Map each project using mapper
|
|
2272
|
+
projects = []
|
|
2273
|
+
for project_data in project_nodes:
|
|
2274
|
+
project = map_github_projectv2_to_project(project_data, owner)
|
|
2275
|
+
|
|
2276
|
+
# Filter by state if provided (post-query filtering)
|
|
2277
|
+
if state is None or project.state == state:
|
|
2278
|
+
projects.append(project)
|
|
2279
|
+
|
|
2280
|
+
logger.info(f"Retrieved {len(projects)} projects for {owner}")
|
|
2281
|
+
return projects
|
|
2282
|
+
|
|
2283
|
+
except Exception as e:
|
|
2284
|
+
logger.error(f"Failed to list projects for {owner}: {e}")
|
|
2285
|
+
raise RuntimeError(f"Failed to list projects: {e}") from e
|
|
2286
|
+
|
|
2287
|
+
async def project_get(
|
|
2288
|
+
self,
|
|
2289
|
+
project_id: str,
|
|
2290
|
+
owner: str | None = None,
|
|
2291
|
+
) -> Project | None:
|
|
2292
|
+
"""Get a single project by ID or number.
|
|
2293
|
+
|
|
2294
|
+
Automatically detects ID format:
|
|
2295
|
+
- Node ID format: "PVT_kwDOABCD..." (starts with PVT_)
|
|
2296
|
+
- Number format: "123" (numeric string)
|
|
2297
|
+
|
|
2298
|
+
Args:
|
|
2299
|
+
----
|
|
2300
|
+
project_id: Project node ID or number
|
|
2301
|
+
owner: Organization or user login (defaults to configured owner)
|
|
2302
|
+
|
|
2303
|
+
Returns:
|
|
2304
|
+
-------
|
|
2305
|
+
Project object if found, None otherwise
|
|
2306
|
+
|
|
2307
|
+
Raises:
|
|
2308
|
+
------
|
|
2309
|
+
ValueError: If owner not provided for number-based lookup
|
|
2310
|
+
RuntimeError: If GraphQL query fails
|
|
2311
|
+
|
|
2312
|
+
Example:
|
|
2313
|
+
-------
|
|
2314
|
+
# By number
|
|
2315
|
+
project = await adapter.project_get("42", owner="myorg")
|
|
2316
|
+
|
|
2317
|
+
# By node ID
|
|
2318
|
+
project = await adapter.project_get("PVT_kwDOABCD1234")
|
|
2319
|
+
|
|
2320
|
+
"""
|
|
2321
|
+
try:
|
|
2322
|
+
# Auto-detect ID format
|
|
2323
|
+
if project_id.startswith("PVT_"):
|
|
2324
|
+
# Use GET_PROJECT_BY_ID_QUERY for node IDs
|
|
2325
|
+
data = await self.gh_client.execute_graphql(
|
|
2326
|
+
query=GET_PROJECT_BY_ID_QUERY,
|
|
2327
|
+
variables={"projectId": project_id},
|
|
2328
|
+
)
|
|
2329
|
+
|
|
2330
|
+
project_data = data.get("node")
|
|
2331
|
+
if not project_data:
|
|
2332
|
+
logger.warning(f"Project {project_id} not found")
|
|
2333
|
+
return None
|
|
2334
|
+
|
|
2335
|
+
# Extract owner from project data
|
|
2336
|
+
owner_data = project_data.get("owner", {})
|
|
2337
|
+
owner_login = owner_data.get("login", owner or self.owner)
|
|
2338
|
+
|
|
2339
|
+
project = map_github_projectv2_to_project(project_data, owner_login)
|
|
2340
|
+
logger.info(f"Retrieved project {project_id} by node ID")
|
|
2341
|
+
return project
|
|
2342
|
+
|
|
2343
|
+
else:
|
|
2344
|
+
# Numeric ID - requires owner
|
|
2345
|
+
owner = owner or self.owner
|
|
2346
|
+
if not owner:
|
|
2347
|
+
raise ValueError("Owner required for number-based project lookup")
|
|
2348
|
+
|
|
2349
|
+
# Convert to integer
|
|
2350
|
+
try:
|
|
2351
|
+
project_number = int(project_id)
|
|
2352
|
+
except ValueError as e:
|
|
2353
|
+
raise ValueError(f"Invalid project ID format: {project_id}") from e
|
|
2354
|
+
|
|
2355
|
+
# Use GET_PROJECT_QUERY for number-based lookup
|
|
2356
|
+
data = await self.gh_client.execute_graphql(
|
|
2357
|
+
query=GET_PROJECT_QUERY,
|
|
2358
|
+
variables={"owner": owner, "number": project_number},
|
|
2359
|
+
)
|
|
2360
|
+
|
|
2361
|
+
org_data = data.get("organization")
|
|
2362
|
+
if not org_data:
|
|
2363
|
+
logger.warning(f"Organization {owner} not found")
|
|
2364
|
+
return None
|
|
2365
|
+
|
|
2366
|
+
project_data = org_data.get("projectV2")
|
|
2367
|
+
if not project_data:
|
|
2368
|
+
logger.warning(f"Project {project_id} not found for {owner}")
|
|
2369
|
+
return None
|
|
2370
|
+
|
|
2371
|
+
project = map_github_projectv2_to_project(project_data, owner)
|
|
2372
|
+
logger.info(f"Retrieved project {project_id} by number")
|
|
2373
|
+
return project
|
|
2374
|
+
|
|
2375
|
+
except Exception as e:
|
|
2376
|
+
logger.error(f"Failed to get project {project_id}: {e}")
|
|
2377
|
+
raise RuntimeError(f"Failed to get project: {e}") from e
|
|
2378
|
+
|
|
2379
|
+
async def project_create(
|
|
2380
|
+
self,
|
|
2381
|
+
title: str,
|
|
2382
|
+
description: str | None = None,
|
|
2383
|
+
owner: str | None = None,
|
|
2384
|
+
scope: ProjectScope = ProjectScope.ORGANIZATION,
|
|
2385
|
+
) -> Project:
|
|
2386
|
+
"""Create a new GitHub Projects V2 project.
|
|
2387
|
+
|
|
2388
|
+
Args:
|
|
2389
|
+
----
|
|
2390
|
+
title: Project title (required)
|
|
2391
|
+
description: Project description (optional)
|
|
2392
|
+
owner: Organization or user login (defaults to configured owner)
|
|
2393
|
+
scope: Project scope (ORGANIZATION or USER)
|
|
2394
|
+
|
|
2395
|
+
Returns:
|
|
2396
|
+
-------
|
|
2397
|
+
Newly created Project object
|
|
2398
|
+
|
|
2399
|
+
Raises:
|
|
2400
|
+
------
|
|
2401
|
+
ValueError: If owner not provided
|
|
2402
|
+
RuntimeError: If creation fails (permissions, etc.)
|
|
2403
|
+
|
|
2404
|
+
Example:
|
|
2405
|
+
-------
|
|
2406
|
+
project = await adapter.project_create(
|
|
2407
|
+
title="Q4 Features",
|
|
2408
|
+
description="New features for Q4 2025",
|
|
2409
|
+
owner="myorg"
|
|
2410
|
+
)
|
|
2411
|
+
|
|
2412
|
+
"""
|
|
2413
|
+
# Validate owner
|
|
2414
|
+
owner = owner or self.owner
|
|
2415
|
+
if not owner:
|
|
2416
|
+
raise ValueError("Owner required for GitHub project creation")
|
|
2417
|
+
|
|
2418
|
+
try:
|
|
2419
|
+
# Get owner node ID (query organization)
|
|
2420
|
+
# We need to fetch the organization/user to get its node ID
|
|
2421
|
+
org_query = """
|
|
2422
|
+
query GetOrgId($login: String!) {
|
|
2423
|
+
organization(login: $login) {
|
|
2424
|
+
id
|
|
2425
|
+
}
|
|
2426
|
+
}
|
|
2427
|
+
"""
|
|
2428
|
+
|
|
2429
|
+
org_data = await self.gh_client.execute_graphql(
|
|
2430
|
+
query=org_query,
|
|
2431
|
+
variables={"login": owner},
|
|
2432
|
+
)
|
|
2433
|
+
|
|
2434
|
+
org = org_data.get("organization")
|
|
2435
|
+
if not org:
|
|
2436
|
+
raise ValueError(f"Organization {owner} not found")
|
|
2437
|
+
|
|
2438
|
+
owner_id = org.get("id")
|
|
2439
|
+
|
|
2440
|
+
# Execute CREATE_PROJECT_MUTATION
|
|
2441
|
+
data = await self.gh_client.execute_graphql(
|
|
2442
|
+
query=CREATE_PROJECT_MUTATION,
|
|
2443
|
+
variables={
|
|
2444
|
+
"ownerId": owner_id,
|
|
2445
|
+
"title": title,
|
|
2446
|
+
},
|
|
2447
|
+
)
|
|
2448
|
+
|
|
2449
|
+
# Parse response and extract created project
|
|
2450
|
+
create_result = data.get("createProjectV2", {})
|
|
2451
|
+
project_data = create_result.get("projectV2")
|
|
2452
|
+
|
|
2453
|
+
if not project_data:
|
|
2454
|
+
raise RuntimeError("Project creation returned no data")
|
|
2455
|
+
|
|
2456
|
+
# Map using mapper
|
|
2457
|
+
project = map_github_projectv2_to_project(project_data, owner)
|
|
2458
|
+
|
|
2459
|
+
# Update description if provided (requires separate mutation)
|
|
2460
|
+
if description:
|
|
2461
|
+
await self.project_update(
|
|
2462
|
+
project_id=project.id,
|
|
2463
|
+
description=description,
|
|
2464
|
+
)
|
|
2465
|
+
|
|
2466
|
+
logger.info(f"Created project: {project.id} ({title})")
|
|
2467
|
+
return project
|
|
2468
|
+
|
|
2469
|
+
except Exception as e:
|
|
2470
|
+
logger.error(f"Failed to create project '{title}': {e}")
|
|
2471
|
+
raise RuntimeError(f"Failed to create project: {e}") from e
|
|
2472
|
+
|
|
2473
|
+
async def project_update(
|
|
2474
|
+
self,
|
|
2475
|
+
project_id: str,
|
|
2476
|
+
title: str | None = None,
|
|
2477
|
+
description: str | None = None,
|
|
2478
|
+
readme: str | None = None,
|
|
2479
|
+
state: ProjectState | None = None,
|
|
2480
|
+
) -> Project | None:
|
|
2481
|
+
"""Update project metadata.
|
|
2482
|
+
|
|
2483
|
+
Supports partial updates - only provided fields are updated.
|
|
2484
|
+
|
|
2485
|
+
Args:
|
|
2486
|
+
----
|
|
2487
|
+
project_id: Project node ID (PVT_...)
|
|
2488
|
+
title: New project title (optional)
|
|
2489
|
+
description: New project description (optional)
|
|
2490
|
+
readme: New project readme (optional)
|
|
2491
|
+
state: New project state (optional)
|
|
2492
|
+
|
|
2493
|
+
Returns:
|
|
2494
|
+
-------
|
|
2495
|
+
Updated Project object
|
|
2496
|
+
|
|
2497
|
+
Raises:
|
|
2498
|
+
------
|
|
2499
|
+
ValueError: If project_id invalid or no fields to update
|
|
2500
|
+
RuntimeError: If update fails
|
|
2501
|
+
|
|
2502
|
+
Example:
|
|
2503
|
+
-------
|
|
2504
|
+
project = await adapter.project_update(
|
|
2505
|
+
project_id="PVT_kwDOABCD1234",
|
|
2506
|
+
title="Updated Title",
|
|
2507
|
+
state=ProjectState.COMPLETED
|
|
2508
|
+
)
|
|
2509
|
+
|
|
2510
|
+
"""
|
|
2511
|
+
# Validate at least one field is provided
|
|
2512
|
+
if not any([title, description, readme, state]):
|
|
2513
|
+
raise ValueError("At least one field must be provided for update")
|
|
2514
|
+
|
|
2515
|
+
try:
|
|
2516
|
+
# Build mutation variables (only include provided fields)
|
|
2517
|
+
variables: dict[str, Any] = {"projectId": project_id}
|
|
2518
|
+
|
|
2519
|
+
if title is not None:
|
|
2520
|
+
variables["title"] = title
|
|
2521
|
+
|
|
2522
|
+
if description is not None:
|
|
2523
|
+
variables["shortDescription"] = description
|
|
2524
|
+
|
|
2525
|
+
if readme is not None:
|
|
2526
|
+
variables["readme"] = readme
|
|
2527
|
+
|
|
2528
|
+
# Convert ProjectState to GitHub boolean
|
|
2529
|
+
if state is not None:
|
|
2530
|
+
# GitHub only has open/closed via the 'closed' boolean
|
|
2531
|
+
if state in (ProjectState.COMPLETED, ProjectState.ARCHIVED):
|
|
2532
|
+
variables["closed"] = True
|
|
2533
|
+
elif state == ProjectState.ACTIVE:
|
|
2534
|
+
variables["closed"] = False
|
|
2535
|
+
# PLANNED and CANCELLED don't have direct mappings
|
|
2536
|
+
# We'll keep the project open for PLANNED
|
|
2537
|
+
|
|
2538
|
+
# Execute UPDATE_PROJECT_MUTATION
|
|
2539
|
+
data = await self.gh_client.execute_graphql(
|
|
2540
|
+
query=UPDATE_PROJECT_MUTATION,
|
|
2541
|
+
variables=variables,
|
|
2542
|
+
)
|
|
2543
|
+
|
|
2544
|
+
# Parse response
|
|
2545
|
+
update_result = data.get("updateProjectV2", {})
|
|
2546
|
+
project_data = update_result.get("projectV2")
|
|
2547
|
+
|
|
2548
|
+
if not project_data:
|
|
2549
|
+
logger.warning(f"Project {project_id} not found for update")
|
|
2550
|
+
return None
|
|
2551
|
+
|
|
2552
|
+
# Extract owner from project data
|
|
2553
|
+
owner_data = project_data.get("owner", {})
|
|
2554
|
+
owner = owner_data.get("login", self.owner)
|
|
2555
|
+
|
|
2556
|
+
# Map using mapper
|
|
2557
|
+
project = map_github_projectv2_to_project(project_data, owner)
|
|
2558
|
+
|
|
2559
|
+
logger.info(f"Updated project: {project_id}")
|
|
2560
|
+
return project
|
|
2561
|
+
|
|
2562
|
+
except Exception as e:
|
|
2563
|
+
logger.error(f"Failed to update project {project_id}: {e}")
|
|
2564
|
+
raise RuntimeError(f"Failed to update project: {e}") from e
|
|
2565
|
+
|
|
2566
|
+
async def project_delete(
|
|
2567
|
+
self,
|
|
2568
|
+
project_id: str,
|
|
2569
|
+
hard_delete: bool = False,
|
|
2570
|
+
) -> bool:
|
|
2571
|
+
"""Delete a project.
|
|
2572
|
+
|
|
2573
|
+
By default performs soft delete (closes project).
|
|
2574
|
+
Set hard_delete=True to permanently delete.
|
|
2575
|
+
|
|
2576
|
+
Args:
|
|
2577
|
+
----
|
|
2578
|
+
project_id: Project node ID (PVT_...)
|
|
2579
|
+
hard_delete: If True, permanently delete; if False, soft delete (close)
|
|
2580
|
+
|
|
2581
|
+
Returns:
|
|
2582
|
+
-------
|
|
2583
|
+
True if successful, False otherwise
|
|
2584
|
+
|
|
2585
|
+
Raises:
|
|
2586
|
+
------
|
|
2587
|
+
RuntimeError: If deletion fails
|
|
2588
|
+
|
|
2589
|
+
Example:
|
|
2590
|
+
-------
|
|
2591
|
+
# Soft delete (close)
|
|
2592
|
+
await adapter.project_delete("PVT_kwDOABCD1234")
|
|
2593
|
+
|
|
2594
|
+
# Hard delete (permanent)
|
|
2595
|
+
await adapter.project_delete("PVT_kwDOABCD1234", hard_delete=True)
|
|
2596
|
+
|
|
2597
|
+
"""
|
|
2598
|
+
try:
|
|
2599
|
+
if hard_delete:
|
|
2600
|
+
# Hard delete using DELETE_PROJECT_MUTATION
|
|
2601
|
+
logger.warning(f"Permanently deleting project {project_id}")
|
|
2602
|
+
|
|
2603
|
+
data = await self.gh_client.execute_graphql(
|
|
2604
|
+
query=DELETE_PROJECT_MUTATION,
|
|
2605
|
+
variables={"projectId": project_id},
|
|
2606
|
+
)
|
|
2607
|
+
|
|
2608
|
+
delete_result = data.get("deleteProjectV2", {})
|
|
2609
|
+
deleted_project = delete_result.get("projectV2")
|
|
2610
|
+
|
|
2611
|
+
if deleted_project:
|
|
2612
|
+
logger.info(f"Permanently deleted project: {project_id}")
|
|
2613
|
+
return True
|
|
2614
|
+
else:
|
|
2615
|
+
logger.warning(f"Failed to delete project {project_id}")
|
|
2616
|
+
return False
|
|
2617
|
+
|
|
2618
|
+
else:
|
|
2619
|
+
# Soft delete by setting public=false and closed=true
|
|
2620
|
+
data = await self.gh_client.execute_graphql(
|
|
2621
|
+
query=UPDATE_PROJECT_MUTATION,
|
|
2622
|
+
variables={
|
|
2623
|
+
"projectId": project_id,
|
|
2624
|
+
"public": False,
|
|
2625
|
+
"closed": True,
|
|
2626
|
+
},
|
|
2627
|
+
)
|
|
2628
|
+
|
|
2629
|
+
update_result = data.get("updateProjectV2", {})
|
|
2630
|
+
updated_project = update_result.get("projectV2")
|
|
2631
|
+
|
|
2632
|
+
if updated_project:
|
|
2633
|
+
logger.info(f"Soft deleted (closed) project: {project_id}")
|
|
2634
|
+
return True
|
|
2635
|
+
else:
|
|
2636
|
+
logger.warning(f"Failed to close project {project_id}")
|
|
2637
|
+
return False
|
|
2638
|
+
|
|
2639
|
+
except Exception as e:
|
|
2640
|
+
logger.error(f"Failed to delete project {project_id}: {e}")
|
|
2641
|
+
raise RuntimeError(f"Failed to delete project: {e}") from e
|
|
2642
|
+
|
|
2643
|
+
async def invalidate_label_cache(self) -> None:
|
|
2644
|
+
"""Manually invalidate the label cache.
|
|
2645
|
+
|
|
2646
|
+
Useful when labels are modified externally or when you need
|
|
2647
|
+
to force a refresh of cached label data.
|
|
2648
|
+
"""
|
|
2649
|
+
await self._labels_cache.clear()
|
|
2650
|
+
|
|
2651
|
+
# =============================================================================
|
|
2652
|
+
# GitHub Projects V2 Issue Operations (Week 3)
|
|
2653
|
+
# =============================================================================
|
|
2654
|
+
|
|
2655
|
+
async def project_add_issue(
|
|
2656
|
+
self,
|
|
2657
|
+
project_id: str,
|
|
2658
|
+
issue_id: str,
|
|
2659
|
+
) -> bool:
|
|
2660
|
+
"""Add an issue to a GitHub Projects V2 project.
|
|
2661
|
+
|
|
2662
|
+
Args:
|
|
2663
|
+
----
|
|
2664
|
+
project_id: Project node ID (PVT_kwDOABCD...)
|
|
2665
|
+
issue_id: Issue node ID (I_kwDOABCD...) or issue number with owner/repo
|
|
2666
|
+
|
|
2667
|
+
Returns:
|
|
2668
|
+
-------
|
|
2669
|
+
True if issue was added successfully
|
|
2670
|
+
|
|
2671
|
+
Raises:
|
|
2672
|
+
------
|
|
2673
|
+
ValueError: If project_id or issue_id is invalid
|
|
2674
|
+
RuntimeError: If GraphQL mutation fails
|
|
2675
|
+
|
|
2676
|
+
Example:
|
|
2677
|
+
-------
|
|
2678
|
+
# Add by issue node ID
|
|
2679
|
+
success = await adapter.project_add_issue(
|
|
2680
|
+
project_id="PVT_kwDOABCD1234",
|
|
2681
|
+
issue_id="I_kwDOABCD5678"
|
|
2682
|
+
)
|
|
2683
|
+
|
|
2684
|
+
# Add by issue number (requires owner/repo context)
|
|
2685
|
+
success = await adapter.project_add_issue(
|
|
2686
|
+
project_id="PVT_kwDOABCD1234",
|
|
2687
|
+
issue_id="owner/repo#123"
|
|
2688
|
+
)
|
|
2689
|
+
|
|
2690
|
+
Note:
|
|
2691
|
+
----
|
|
2692
|
+
GitHub's addProjectV2ItemById mutation requires:
|
|
2693
|
+
- projectId: Project node ID
|
|
2694
|
+
- contentId: Issue/PR node ID (not item ID)
|
|
2695
|
+
|
|
2696
|
+
"""
|
|
2697
|
+
# Validate project_id format
|
|
2698
|
+
if not project_id or not project_id.startswith("PVT_"):
|
|
2699
|
+
raise ValueError(
|
|
2700
|
+
f"Invalid project_id: {project_id}. "
|
|
2701
|
+
"Project ID must start with 'PVT_' (e.g., PVT_kwDOABCD1234)"
|
|
2702
|
+
)
|
|
2703
|
+
|
|
2704
|
+
# Validate issue_id is provided
|
|
2705
|
+
if not issue_id:
|
|
2706
|
+
raise ValueError("issue_id is required")
|
|
2707
|
+
|
|
2708
|
+
# If issue_id is in "owner/repo#number" format, resolve to node ID
|
|
2709
|
+
content_id = issue_id
|
|
2710
|
+
if "#" in issue_id and "/" in issue_id:
|
|
2711
|
+
# Parse owner/repo#number format
|
|
2712
|
+
try:
|
|
2713
|
+
repo_part, number_str = issue_id.rsplit("#", 1)
|
|
2714
|
+
owner, repo = repo_part.split("/")
|
|
2715
|
+
issue_number = int(number_str)
|
|
2716
|
+
|
|
2717
|
+
# Query GitHub to get issue node ID
|
|
2718
|
+
issue_query = """
|
|
2719
|
+
query GetIssueNodeId($owner: String!, $repo: String!, $number: Int!) {
|
|
2720
|
+
repository(owner: $owner, name: $repo) {
|
|
2721
|
+
issue(number: $number) {
|
|
2722
|
+
id
|
|
2723
|
+
}
|
|
2724
|
+
}
|
|
2725
|
+
}
|
|
2726
|
+
"""
|
|
2727
|
+
|
|
2728
|
+
result = await self._graphql_request(
|
|
2729
|
+
issue_query,
|
|
2730
|
+
{"owner": owner, "repo": repo, "number": issue_number},
|
|
2731
|
+
)
|
|
2732
|
+
|
|
2733
|
+
repo_data = result.get("repository")
|
|
2734
|
+
if not repo_data:
|
|
2735
|
+
raise ValueError(f"Repository {owner}/{repo} not found")
|
|
2736
|
+
|
|
2737
|
+
issue_data = repo_data.get("issue")
|
|
2738
|
+
if not issue_data:
|
|
2739
|
+
raise ValueError(
|
|
2740
|
+
f"Issue #{issue_number} not found in {owner}/{repo}"
|
|
2741
|
+
)
|
|
2742
|
+
|
|
2743
|
+
content_id = issue_data["id"]
|
|
2744
|
+
logger.debug(f"Resolved issue {issue_id} to node ID {content_id}")
|
|
2745
|
+
|
|
2746
|
+
except ValueError:
|
|
2747
|
+
# Re-raise ValueError as-is (already has good message)
|
|
2748
|
+
raise
|
|
2749
|
+
except (KeyError, TypeError) as e:
|
|
2750
|
+
raise ValueError(
|
|
2751
|
+
f"Invalid issue_id format: {issue_id}. "
|
|
2752
|
+
"Expected 'owner/repo#number' or issue node ID (I_kwDO...)"
|
|
2753
|
+
) from e
|
|
2754
|
+
|
|
2755
|
+
# Validate issue node ID format
|
|
2756
|
+
if not content_id.startswith("I_") and not content_id.startswith("PR_"):
|
|
2757
|
+
raise ValueError(
|
|
2758
|
+
f"Invalid issue_id: {content_id}. "
|
|
2759
|
+
"Issue ID must start with 'I_' or 'PR_' (e.g., I_kwDOABCD5678)"
|
|
2760
|
+
)
|
|
2761
|
+
|
|
2762
|
+
try:
|
|
2763
|
+
# Execute ADD_PROJECT_ITEM_MUTATION
|
|
2764
|
+
from .queries import ADD_PROJECT_ITEM_MUTATION
|
|
2765
|
+
|
|
2766
|
+
data = await self.gh_client.execute_graphql(
|
|
2767
|
+
query=ADD_PROJECT_ITEM_MUTATION,
|
|
2768
|
+
variables={
|
|
2769
|
+
"projectId": project_id,
|
|
2770
|
+
"contentId": content_id,
|
|
2771
|
+
},
|
|
2772
|
+
)
|
|
2773
|
+
|
|
2774
|
+
# Check for successful addition
|
|
2775
|
+
add_result = data.get("addProjectV2ItemById", {})
|
|
2776
|
+
item_data = add_result.get("item")
|
|
2777
|
+
|
|
2778
|
+
if item_data:
|
|
2779
|
+
logger.info(
|
|
2780
|
+
f"Successfully added issue {issue_id} to project {project_id}"
|
|
2781
|
+
)
|
|
2782
|
+
return True
|
|
2783
|
+
else:
|
|
2784
|
+
logger.warning(
|
|
2785
|
+
f"Failed to add issue {issue_id} to project {project_id}: No item returned"
|
|
2786
|
+
)
|
|
2787
|
+
return False
|
|
2788
|
+
|
|
2789
|
+
except Exception as e:
|
|
2790
|
+
error_msg = str(e).lower()
|
|
2791
|
+
|
|
2792
|
+
# Handle "already exists" errors gracefully
|
|
2793
|
+
if "already exists" in error_msg or "duplicate" in error_msg:
|
|
2794
|
+
logger.info(f"Issue {issue_id} already exists in project {project_id}")
|
|
2795
|
+
return True
|
|
2796
|
+
|
|
2797
|
+
# Log and re-raise other errors
|
|
2798
|
+
logger.error(f"Failed to add issue {issue_id} to project {project_id}: {e}")
|
|
2799
|
+
raise RuntimeError(f"Failed to add issue to project: {e}") from e
|
|
2800
|
+
|
|
2801
|
+
async def project_remove_issue(
|
|
2802
|
+
self,
|
|
2803
|
+
project_id: str,
|
|
2804
|
+
item_id: str,
|
|
2805
|
+
) -> bool:
|
|
2806
|
+
"""Remove an issue from a GitHub Projects V2 project.
|
|
2807
|
+
|
|
2808
|
+
Args:
|
|
2809
|
+
----
|
|
2810
|
+
project_id: Project node ID (PVT_kwDOABCD...)
|
|
2811
|
+
item_id: Project item ID (PVTI_kwDOABCD...) NOT issue ID
|
|
2812
|
+
|
|
2813
|
+
Returns:
|
|
2814
|
+
-------
|
|
2815
|
+
True if issue was removed successfully
|
|
2816
|
+
|
|
2817
|
+
Raises:
|
|
2818
|
+
------
|
|
2819
|
+
ValueError: If project_id or item_id is invalid
|
|
2820
|
+
RuntimeError: If GraphQL mutation fails
|
|
2821
|
+
|
|
2822
|
+
Example:
|
|
2823
|
+
-------
|
|
2824
|
+
success = await adapter.project_remove_issue(
|
|
2825
|
+
project_id="PVT_kwDOABCD1234",
|
|
2826
|
+
item_id="PVTI_kwDOABCD5678"
|
|
2827
|
+
)
|
|
2828
|
+
|
|
2829
|
+
Note:
|
|
2830
|
+
----
|
|
2831
|
+
Requires the project ITEM ID (PVTI_*), not the issue ID (I_*).
|
|
2832
|
+
Use project_get_issues() to find the item ID for an issue.
|
|
2833
|
+
|
|
2834
|
+
"""
|
|
2835
|
+
# Validate project_id format
|
|
2836
|
+
if not project_id or not project_id.startswith("PVT_"):
|
|
2837
|
+
raise ValueError(
|
|
2838
|
+
f"Invalid project_id: {project_id}. "
|
|
2839
|
+
"Project ID must start with 'PVT_' (e.g., PVT_kwDOABCD1234)"
|
|
2840
|
+
)
|
|
2841
|
+
|
|
2842
|
+
# Validate item_id format
|
|
2843
|
+
if not item_id or not item_id.startswith("PVTI_"):
|
|
2844
|
+
raise ValueError(
|
|
2845
|
+
f"Invalid item_id: {item_id}. "
|
|
2846
|
+
"Item ID must start with 'PVTI_' (e.g., PVTI_kwDOABCD5678). "
|
|
2847
|
+
"Note: This is the project item ID, not the issue ID. "
|
|
2848
|
+
"Use project_get_issues() to get the item ID for an issue."
|
|
2849
|
+
)
|
|
2850
|
+
|
|
2851
|
+
try:
|
|
2852
|
+
# Execute REMOVE_PROJECT_ITEM_MUTATION
|
|
2853
|
+
from .queries import REMOVE_PROJECT_ITEM_MUTATION
|
|
2854
|
+
|
|
2855
|
+
data = await self.gh_client.execute_graphql(
|
|
2856
|
+
query=REMOVE_PROJECT_ITEM_MUTATION,
|
|
2857
|
+
variables={
|
|
2858
|
+
"projectId": project_id,
|
|
2859
|
+
"itemId": item_id,
|
|
2860
|
+
},
|
|
2861
|
+
)
|
|
2862
|
+
|
|
2863
|
+
# Check for successful removal
|
|
2864
|
+
delete_result = data.get("deleteProjectV2Item", {})
|
|
2865
|
+
deleted_item_id = delete_result.get("deletedItemId")
|
|
2866
|
+
|
|
2867
|
+
if deleted_item_id:
|
|
2868
|
+
logger.info(
|
|
2869
|
+
f"Successfully removed item {item_id} from project {project_id}"
|
|
2870
|
+
)
|
|
2871
|
+
return True
|
|
2872
|
+
else:
|
|
2873
|
+
logger.warning(
|
|
2874
|
+
f"Failed to remove item {item_id} from project {project_id}: "
|
|
2875
|
+
"No deleted item ID returned"
|
|
2876
|
+
)
|
|
2877
|
+
return False
|
|
2878
|
+
|
|
2879
|
+
except Exception as e:
|
|
2880
|
+
error_msg = str(e).lower()
|
|
2881
|
+
|
|
2882
|
+
# Handle "not found" errors gracefully
|
|
2883
|
+
if "not found" in error_msg or "does not exist" in error_msg:
|
|
2884
|
+
logger.warning(
|
|
2885
|
+
f"Item {item_id} not found in project {project_id} "
|
|
2886
|
+
"(may have been already removed)"
|
|
2887
|
+
)
|
|
2888
|
+
return False
|
|
2889
|
+
|
|
2890
|
+
# Log and re-raise other errors
|
|
2891
|
+
logger.error(
|
|
2892
|
+
f"Failed to remove item {item_id} from project {project_id}: {e}"
|
|
2893
|
+
)
|
|
2894
|
+
raise RuntimeError(f"Failed to remove issue from project: {e}") from e
|
|
2895
|
+
|
|
2896
|
+
async def project_get_issues(
|
|
2897
|
+
self,
|
|
2898
|
+
project_id: str,
|
|
2899
|
+
state: str | None = None,
|
|
2900
|
+
limit: int = 10,
|
|
2901
|
+
cursor: str | None = None,
|
|
2902
|
+
) -> list[Task]:
|
|
2903
|
+
"""Get issues in a GitHub Projects V2 project.
|
|
2904
|
+
|
|
2905
|
+
Args:
|
|
2906
|
+
----
|
|
2907
|
+
project_id: Project node ID (PVT_kwDOABCD...)
|
|
2908
|
+
state: Filter by issue state ("OPEN", "CLOSED", None for all)
|
|
2909
|
+
limit: Maximum number of issues to return (default 10)
|
|
2910
|
+
cursor: Pagination cursor for next page
|
|
2911
|
+
|
|
2912
|
+
Returns:
|
|
2913
|
+
-------
|
|
2914
|
+
List of Task objects representing issues in the project
|
|
2915
|
+
|
|
2916
|
+
Raises:
|
|
2917
|
+
------
|
|
2918
|
+
ValueError: If project_id is invalid
|
|
2919
|
+
RuntimeError: If GraphQL query fails
|
|
2920
|
+
|
|
2921
|
+
Example:
|
|
2922
|
+
-------
|
|
2923
|
+
# Get all open issues
|
|
2924
|
+
issues = await adapter.project_get_issues(
|
|
2925
|
+
project_id="PVT_kwDOABCD1234",
|
|
2926
|
+
state="OPEN",
|
|
2927
|
+
limit=20
|
|
2928
|
+
)
|
|
2929
|
+
|
|
2930
|
+
# Get next page
|
|
2931
|
+
issues = await adapter.project_get_issues(
|
|
2932
|
+
project_id="PVT_kwDOABCD1234",
|
|
2933
|
+
cursor=last_cursor
|
|
2934
|
+
)
|
|
2935
|
+
|
|
2936
|
+
Note:
|
|
2937
|
+
----
|
|
2938
|
+
Returns Task objects with additional project context:
|
|
2939
|
+
- task.metadata["project_item_id"]: ID for removal operations
|
|
2940
|
+
- task.metadata["project_number"]: Project number
|
|
2941
|
+
|
|
2942
|
+
"""
|
|
2943
|
+
# Validate project_id format
|
|
2944
|
+
if not project_id or not project_id.startswith("PVT_"):
|
|
2945
|
+
raise ValueError(
|
|
2946
|
+
f"Invalid project_id: {project_id}. "
|
|
2947
|
+
"Project ID must start with 'PVT_' (e.g., PVT_kwDOABCD1234)"
|
|
2948
|
+
)
|
|
2949
|
+
|
|
2950
|
+
try:
|
|
2951
|
+
# Execute PROJECT_ITEMS_QUERY
|
|
2952
|
+
from .queries import PROJECT_ITEMS_QUERY
|
|
2953
|
+
|
|
2954
|
+
data = await self.gh_client.execute_graphql(
|
|
2955
|
+
query=PROJECT_ITEMS_QUERY,
|
|
2956
|
+
variables={
|
|
2957
|
+
"projectId": project_id,
|
|
2958
|
+
"first": limit,
|
|
2959
|
+
"after": cursor,
|
|
2960
|
+
},
|
|
2961
|
+
)
|
|
2962
|
+
|
|
2963
|
+
# Parse response and extract items array
|
|
2964
|
+
project_node = data.get("node")
|
|
2965
|
+
if not project_node:
|
|
2966
|
+
logger.warning(f"Project {project_id} not found")
|
|
2967
|
+
return []
|
|
2968
|
+
|
|
2969
|
+
items_data = project_node.get("items", {})
|
|
2970
|
+
item_nodes = items_data.get("nodes", [])
|
|
2971
|
+
|
|
2972
|
+
# Filter items by content type (only "Issue", skip "PullRequest", "DraftIssue")
|
|
2973
|
+
tasks = []
|
|
2974
|
+
for item in item_nodes:
|
|
2975
|
+
content = item.get("content")
|
|
2976
|
+
if not content:
|
|
2977
|
+
# Skip archived items without content
|
|
2978
|
+
logger.debug(f"Skipping item {item.get('id')} without content")
|
|
2979
|
+
continue
|
|
2980
|
+
|
|
2981
|
+
content_type = content.get("__typename")
|
|
2982
|
+
|
|
2983
|
+
# Only process Issues
|
|
2984
|
+
if content_type != "Issue":
|
|
2985
|
+
logger.debug(f"Skipping {content_type} item {item.get('id')}")
|
|
2986
|
+
continue
|
|
2987
|
+
|
|
2988
|
+
# Map GitHub issue to Task using existing mapper
|
|
2989
|
+
from .mappers import map_github_issue_to_task
|
|
2990
|
+
|
|
2991
|
+
# Convert GraphQL format to format expected by mapper
|
|
2992
|
+
issue_dict = {
|
|
2993
|
+
"number": content.get("number"),
|
|
2994
|
+
"title": content.get("title"),
|
|
2995
|
+
"state": content.get("state", "").lower(),
|
|
2996
|
+
"labels": content.get("labels", {}),
|
|
2997
|
+
# Note: PROJECT_ITEMS_QUERY doesn't include all issue fields
|
|
2998
|
+
# Only basic fields are available
|
|
2999
|
+
}
|
|
3000
|
+
|
|
3001
|
+
task = map_github_issue_to_task(issue_dict, self.custom_priority_scheme)
|
|
3002
|
+
|
|
3003
|
+
# Add project context to metadata
|
|
3004
|
+
if "github" not in task.metadata:
|
|
3005
|
+
task.metadata["github"] = {}
|
|
3006
|
+
|
|
3007
|
+
task.metadata["github"]["project_item_id"] = item["id"]
|
|
3008
|
+
task.metadata["github"]["project_id"] = project_id
|
|
3009
|
+
|
|
3010
|
+
# Extract project number from project_id if needed
|
|
3011
|
+
# Project node ID format: PVT_kwDO... but we don't have number here
|
|
3012
|
+
# We'll need to query the project separately or store it
|
|
3013
|
+
|
|
3014
|
+
tasks.append(task)
|
|
3015
|
+
|
|
3016
|
+
# Filter by state if provided (post-query filtering)
|
|
3017
|
+
if state:
|
|
3018
|
+
state_lower = state.lower()
|
|
3019
|
+
tasks = [
|
|
3020
|
+
task
|
|
3021
|
+
for task in tasks
|
|
3022
|
+
if (isinstance(task.state, str) and task.state == state_lower)
|
|
3023
|
+
or (
|
|
3024
|
+
hasattr(task.state, "value") and task.state.value == state_lower
|
|
3025
|
+
)
|
|
3026
|
+
or (
|
|
3027
|
+
state_lower == "open"
|
|
3028
|
+
and (
|
|
3029
|
+
(
|
|
3030
|
+
isinstance(task.state, str)
|
|
3031
|
+
and task.state
|
|
3032
|
+
in ["open", "in_progress", "blocked", "waiting"]
|
|
3033
|
+
)
|
|
3034
|
+
or (
|
|
3035
|
+
hasattr(task.state, "value")
|
|
3036
|
+
and task.state.value
|
|
3037
|
+
in ["open", "in_progress", "blocked", "waiting"]
|
|
3038
|
+
)
|
|
3039
|
+
)
|
|
3040
|
+
)
|
|
3041
|
+
or (
|
|
3042
|
+
state_lower == "closed"
|
|
3043
|
+
and (
|
|
3044
|
+
(
|
|
3045
|
+
isinstance(task.state, str)
|
|
3046
|
+
and task.state in ["done", "closed"]
|
|
3047
|
+
)
|
|
3048
|
+
or (
|
|
3049
|
+
hasattr(task.state, "value")
|
|
3050
|
+
and task.state.value in ["done", "closed"]
|
|
3051
|
+
)
|
|
3052
|
+
)
|
|
3053
|
+
)
|
|
3054
|
+
]
|
|
3055
|
+
|
|
3056
|
+
logger.info(
|
|
3057
|
+
f"Retrieved {len(tasks)} issues from project {project_id} "
|
|
3058
|
+
f"(filtered by state={state})"
|
|
3059
|
+
)
|
|
3060
|
+
|
|
3061
|
+
return tasks
|
|
3062
|
+
|
|
3063
|
+
except Exception as e:
|
|
3064
|
+
logger.error(f"Failed to get issues from project {project_id}: {e}")
|
|
3065
|
+
raise RuntimeError(f"Failed to get project issues: {e}") from e
|
|
3066
|
+
|
|
3067
|
+
async def project_get_statistics(
|
|
3068
|
+
self,
|
|
3069
|
+
project_id: str,
|
|
3070
|
+
) -> ProjectStatistics:
|
|
3071
|
+
"""Get comprehensive statistics for a GitHub Projects V2 project.
|
|
3072
|
+
|
|
3073
|
+
Calculates issue state breakdown, priority distribution, and health status
|
|
3074
|
+
by analyzing all issues in the project. Priority is determined from issue
|
|
3075
|
+
labels (priority:low, priority/medium, etc.), and blocked status is detected
|
|
3076
|
+
from "blocked" or "blocker" labels.
|
|
3077
|
+
|
|
3078
|
+
Health Scoring Logic:
|
|
3079
|
+
- on_track: >70% complete AND <10% blocked
|
|
3080
|
+
- at_risk: >40% complete AND <30% blocked
|
|
3081
|
+
- off_track: Otherwise (low completion or high blocked rate)
|
|
3082
|
+
|
|
3083
|
+
Args:
|
|
3084
|
+
----
|
|
3085
|
+
project_id: Project node ID (PVT_kwDOABCD...)
|
|
3086
|
+
|
|
3087
|
+
Returns:
|
|
3088
|
+
-------
|
|
3089
|
+
ProjectStatistics with metrics and health scoring
|
|
3090
|
+
|
|
3091
|
+
Raises:
|
|
3092
|
+
------
|
|
3093
|
+
ValueError: If project_id is invalid format
|
|
3094
|
+
RuntimeError: If statistics calculation fails
|
|
3095
|
+
|
|
3096
|
+
Example:
|
|
3097
|
+
-------
|
|
3098
|
+
stats = await adapter.project_get_statistics("PVT_kwDOABCD1234")
|
|
3099
|
+
print(f"Health: {stats.health}, Progress: {stats.progress_percentage}%")
|
|
3100
|
+
print(f"Priority breakdown: H={stats.priority_high_count}, "
|
|
3101
|
+
f"M={stats.priority_medium_count}")
|
|
3102
|
+
|
|
3103
|
+
Note:
|
|
3104
|
+
----
|
|
3105
|
+
Fetches up to 1000 issues for reasonable performance. For projects
|
|
3106
|
+
with >1000 issues, statistics may be based on a sample.
|
|
3107
|
+
|
|
3108
|
+
"""
|
|
3109
|
+
from ...core.models import ProjectStatistics
|
|
3110
|
+
|
|
3111
|
+
# Validate project_id format
|
|
3112
|
+
if not project_id or not project_id.startswith("PVT_"):
|
|
3113
|
+
raise ValueError(
|
|
3114
|
+
f"Invalid project_id: {project_id}. "
|
|
3115
|
+
"Project ID must start with 'PVT_' (e.g., PVT_kwDOABCD1234)"
|
|
3116
|
+
)
|
|
3117
|
+
|
|
3118
|
+
logger.debug(f"Calculating statistics for project {project_id}")
|
|
3119
|
+
|
|
3120
|
+
try:
|
|
3121
|
+
# Fetch all issues (limit 1000 for reasonable performance)
|
|
3122
|
+
issues = await self.project_get_issues(project_id=project_id, limit=1000)
|
|
3123
|
+
except Exception as e:
|
|
3124
|
+
logger.error(f"Failed to fetch issues for statistics: {e}")
|
|
3125
|
+
raise RuntimeError(f"Failed to calculate project statistics: {e}") from e
|
|
3126
|
+
|
|
3127
|
+
# Calculate basic counts
|
|
3128
|
+
total = len(issues)
|
|
3129
|
+
open_count = 0
|
|
3130
|
+
closed_count = 0
|
|
3131
|
+
in_progress_count = 0
|
|
3132
|
+
|
|
3133
|
+
# Count by priority (from labels)
|
|
3134
|
+
priority_counts = {"low": 0, "medium": 0, "high": 0, "critical": 0}
|
|
3135
|
+
blocked_count = 0
|
|
3136
|
+
|
|
3137
|
+
for issue in issues:
|
|
3138
|
+
# Count by state (GitHub only has OPEN/CLOSED)
|
|
3139
|
+
# We map based on state enum value
|
|
3140
|
+
state_value = (
|
|
3141
|
+
issue.state.value if hasattr(issue.state, "value") else str(issue.state)
|
|
3142
|
+
)
|
|
3143
|
+
|
|
3144
|
+
if state_value in ["open", "in_progress", "blocked", "waiting"]:
|
|
3145
|
+
if state_value == "in_progress":
|
|
3146
|
+
in_progress_count += 1
|
|
3147
|
+
else:
|
|
3148
|
+
open_count += 1
|
|
3149
|
+
elif state_value in ["done", "closed"]:
|
|
3150
|
+
closed_count += 1
|
|
3151
|
+
else:
|
|
3152
|
+
# Default unrecognized states to open
|
|
3153
|
+
open_count += 1
|
|
3154
|
+
|
|
3155
|
+
# Check tags (labels) for priority and blocked status
|
|
3156
|
+
for tag in issue.tags:
|
|
3157
|
+
tag_lower = tag.lower()
|
|
3158
|
+
|
|
3159
|
+
# Priority detection (priority:high, priority/low, etc.)
|
|
3160
|
+
if "priority:" in tag_lower or "priority/" in tag_lower:
|
|
3161
|
+
# Extract priority level
|
|
3162
|
+
priority = (
|
|
3163
|
+
tag_lower.replace("priority:", "")
|
|
3164
|
+
.replace("priority/", "")
|
|
3165
|
+
.strip()
|
|
3166
|
+
)
|
|
3167
|
+
if priority in priority_counts:
|
|
3168
|
+
priority_counts[priority] += 1
|
|
3169
|
+
elif "crit" in priority or "p0" in priority:
|
|
3170
|
+
priority_counts["critical"] += 1
|
|
3171
|
+
elif "high" in priority or "p1" in priority:
|
|
3172
|
+
priority_counts["high"] += 1
|
|
3173
|
+
elif "med" in priority or "p2" in priority:
|
|
3174
|
+
priority_counts["medium"] += 1
|
|
3175
|
+
elif "low" in priority or "p3" in priority:
|
|
3176
|
+
priority_counts["low"] += 1
|
|
3177
|
+
|
|
3178
|
+
# Blocked detection
|
|
3179
|
+
if "blocked" in tag_lower or "blocker" in tag_lower:
|
|
3180
|
+
blocked_count += 1
|
|
3181
|
+
|
|
3182
|
+
# Calculate health and progress
|
|
3183
|
+
if total == 0:
|
|
3184
|
+
health = "on_track"
|
|
3185
|
+
progress_pct = 0.0
|
|
3186
|
+
else:
|
|
3187
|
+
completed_pct = (closed_count / total) * 100
|
|
3188
|
+
blocked_pct = (blocked_count / total) * 100
|
|
3189
|
+
|
|
3190
|
+
# Health scoring logic
|
|
3191
|
+
if completed_pct > 70 and blocked_pct < 10:
|
|
3192
|
+
health = "on_track"
|
|
3193
|
+
elif completed_pct > 40 and blocked_pct < 30:
|
|
3194
|
+
health = "at_risk"
|
|
3195
|
+
else:
|
|
3196
|
+
health = "off_track"
|
|
3197
|
+
|
|
3198
|
+
progress_pct = completed_pct
|
|
3199
|
+
|
|
3200
|
+
# Create statistics object
|
|
3201
|
+
stats = ProjectStatistics(
|
|
3202
|
+
total_count=total,
|
|
3203
|
+
open_count=open_count,
|
|
3204
|
+
in_progress_count=in_progress_count,
|
|
3205
|
+
completed_count=closed_count,
|
|
3206
|
+
blocked_count=blocked_count,
|
|
3207
|
+
priority_low_count=priority_counts["low"],
|
|
3208
|
+
priority_medium_count=priority_counts["medium"],
|
|
3209
|
+
priority_high_count=priority_counts["high"],
|
|
3210
|
+
priority_critical_count=priority_counts["critical"],
|
|
3211
|
+
health=health,
|
|
3212
|
+
progress_percentage=round(progress_pct, 1),
|
|
3213
|
+
)
|
|
3214
|
+
|
|
3215
|
+
logger.info(
|
|
3216
|
+
f"Statistics for {project_id}: {total} issues, "
|
|
3217
|
+
f"{health} health, {progress_pct:.1f}% complete, "
|
|
3218
|
+
f"{blocked_count} blocked"
|
|
3219
|
+
)
|
|
3220
|
+
|
|
3221
|
+
return stats
|
|
3222
|
+
|
|
3223
|
+
async def close(self) -> None:
|
|
3224
|
+
"""Close the HTTP client connection."""
|
|
3225
|
+
await self.client.aclose()
|
|
3226
|
+
|
|
3227
|
+
|
|
3228
|
+
# Register the adapter
|
|
3229
|
+
AdapterRegistry.register("github", GitHubAdapter)
|