mcp-ticketer 2.0.1__py3-none-any.whl → 2.2.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__version__.py +1 -1
- mcp_ticketer/_version_scm.py +1 -0
- mcp_ticketer/adapters/aitrackdown.py +122 -0
- mcp_ticketer/adapters/asana/adapter.py +121 -0
- mcp_ticketer/adapters/github/__init__.py +26 -0
- mcp_ticketer/adapters/{github.py → github/adapter.py} +1506 -365
- mcp_ticketer/adapters/github/client.py +335 -0
- mcp_ticketer/adapters/github/mappers.py +797 -0
- mcp_ticketer/adapters/github/queries.py +692 -0
- mcp_ticketer/adapters/github/types.py +460 -0
- mcp_ticketer/adapters/jira/__init__.py +35 -0
- mcp_ticketer/adapters/{jira.py → jira/adapter.py} +250 -678
- mcp_ticketer/adapters/jira/client.py +271 -0
- mcp_ticketer/adapters/jira/mappers.py +246 -0
- mcp_ticketer/adapters/jira/queries.py +216 -0
- mcp_ticketer/adapters/jira/types.py +304 -0
- mcp_ticketer/adapters/linear/adapter.py +1000 -92
- mcp_ticketer/adapters/linear/client.py +91 -1
- mcp_ticketer/adapters/linear/mappers.py +107 -0
- mcp_ticketer/adapters/linear/queries.py +112 -2
- mcp_ticketer/adapters/linear/types.py +50 -10
- mcp_ticketer/cli/configure.py +524 -89
- mcp_ticketer/cli/install_mcp_server.py +418 -0
- mcp_ticketer/cli/main.py +10 -0
- mcp_ticketer/cli/mcp_configure.py +177 -49
- mcp_ticketer/cli/platform_installer.py +9 -0
- mcp_ticketer/cli/setup_command.py +157 -1
- mcp_ticketer/cli/ticket_commands.py +443 -81
- mcp_ticketer/cli/utils.py +113 -0
- mcp_ticketer/core/__init__.py +28 -0
- mcp_ticketer/core/adapter.py +367 -1
- mcp_ticketer/core/milestone_manager.py +252 -0
- mcp_ticketer/core/models.py +345 -0
- mcp_ticketer/core/project_utils.py +281 -0
- mcp_ticketer/core/project_validator.py +376 -0
- mcp_ticketer/core/session_state.py +6 -1
- mcp_ticketer/core/state_matcher.py +36 -3
- mcp_ticketer/mcp/server/__main__.py +2 -1
- mcp_ticketer/mcp/server/routing.py +68 -0
- mcp_ticketer/mcp/server/tools/__init__.py +7 -4
- mcp_ticketer/mcp/server/tools/attachment_tools.py +3 -1
- mcp_ticketer/mcp/server/tools/config_tools.py +233 -35
- mcp_ticketer/mcp/server/tools/milestone_tools.py +338 -0
- mcp_ticketer/mcp/server/tools/search_tools.py +30 -1
- mcp_ticketer/mcp/server/tools/ticket_tools.py +37 -1
- mcp_ticketer/queue/queue.py +68 -0
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/METADATA +33 -3
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/RECORD +72 -36
- mcp_ticketer-2.2.13.dist-info/top_level.txt +2 -0
- py_mcp_installer/examples/phase3_demo.py +178 -0
- py_mcp_installer/scripts/manage_version.py +54 -0
- py_mcp_installer/setup.py +6 -0
- py_mcp_installer/src/py_mcp_installer/__init__.py +153 -0
- py_mcp_installer/src/py_mcp_installer/command_builder.py +445 -0
- py_mcp_installer/src/py_mcp_installer/config_manager.py +541 -0
- py_mcp_installer/src/py_mcp_installer/exceptions.py +243 -0
- py_mcp_installer/src/py_mcp_installer/installation_strategy.py +617 -0
- py_mcp_installer/src/py_mcp_installer/installer.py +656 -0
- py_mcp_installer/src/py_mcp_installer/mcp_inspector.py +750 -0
- py_mcp_installer/src/py_mcp_installer/platform_detector.py +451 -0
- py_mcp_installer/src/py_mcp_installer/platforms/__init__.py +26 -0
- py_mcp_installer/src/py_mcp_installer/platforms/claude_code.py +225 -0
- py_mcp_installer/src/py_mcp_installer/platforms/codex.py +181 -0
- py_mcp_installer/src/py_mcp_installer/platforms/cursor.py +191 -0
- py_mcp_installer/src/py_mcp_installer/types.py +222 -0
- py_mcp_installer/src/py_mcp_installer/utils.py +463 -0
- py_mcp_installer/tests/__init__.py +0 -0
- py_mcp_installer/tests/platforms/__init__.py +0 -0
- py_mcp_installer/tests/test_platform_detector.py +17 -0
- mcp_ticketer-2.0.1.dist-info/top_level.txt +0 -1
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/WHEEL +0 -0
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-2.0.1.dist-info → mcp_ticketer-2.2.13.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,163 +1,63 @@
|
|
|
1
1
|
"""GitHub adapter implementation using REST API v3 and GraphQL API v4."""
|
|
2
2
|
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
3
5
|
import builtins
|
|
4
6
|
import logging
|
|
5
7
|
import re
|
|
6
|
-
from datetime import datetime
|
|
8
|
+
from datetime import date, datetime
|
|
7
9
|
from pathlib import Path
|
|
8
10
|
from typing import Any
|
|
9
11
|
|
|
10
12
|
import httpx
|
|
11
13
|
|
|
12
|
-
from
|
|
13
|
-
from
|
|
14
|
-
from
|
|
15
|
-
from
|
|
14
|
+
from ...cache.memory import MemoryCache
|
|
15
|
+
from ...core.adapter import BaseAdapter
|
|
16
|
+
from ...core.env_loader import load_adapter_config, validate_adapter_config
|
|
17
|
+
from ...core.models import (
|
|
18
|
+
Comment,
|
|
19
|
+
Epic,
|
|
20
|
+
Milestone,
|
|
21
|
+
Priority,
|
|
22
|
+
Project,
|
|
23
|
+
ProjectScope,
|
|
24
|
+
ProjectState,
|
|
25
|
+
ProjectStatistics,
|
|
26
|
+
SearchQuery,
|
|
27
|
+
Task,
|
|
28
|
+
TicketState,
|
|
29
|
+
)
|
|
30
|
+
from ...core.registry import AdapterRegistry
|
|
31
|
+
from .client import GitHubClient
|
|
32
|
+
from .mappers import (
|
|
33
|
+
map_github_issue_to_task,
|
|
34
|
+
map_github_milestone_to_epic,
|
|
35
|
+
map_github_milestone_to_milestone,
|
|
36
|
+
map_github_projectv2_to_project,
|
|
37
|
+
)
|
|
38
|
+
from .queries import (
|
|
39
|
+
CREATE_PROJECT_MUTATION,
|
|
40
|
+
DELETE_PROJECT_MUTATION,
|
|
41
|
+
GET_PROJECT_BY_ID_QUERY,
|
|
42
|
+
GET_PROJECT_ITERATIONS,
|
|
43
|
+
GET_PROJECT_QUERY,
|
|
44
|
+
ISSUE_FRAGMENT,
|
|
45
|
+
LIST_PROJECTS_QUERY,
|
|
46
|
+
SEARCH_ISSUES,
|
|
47
|
+
UPDATE_PROJECT_MUTATION,
|
|
48
|
+
)
|
|
49
|
+
from .types import (
|
|
50
|
+
GitHubStateMapping,
|
|
51
|
+
extract_state_from_issue,
|
|
52
|
+
get_github_state,
|
|
53
|
+
get_priority_from_labels,
|
|
54
|
+
get_priority_label,
|
|
55
|
+
get_state_label,
|
|
56
|
+
)
|
|
16
57
|
|
|
17
58
|
logger = logging.getLogger(__name__)
|
|
18
59
|
|
|
19
60
|
|
|
20
|
-
class GitHubStateMapping:
|
|
21
|
-
"""GitHub issue states and label-based extended states."""
|
|
22
|
-
|
|
23
|
-
# GitHub native states
|
|
24
|
-
OPEN = "open"
|
|
25
|
-
CLOSED = "closed"
|
|
26
|
-
|
|
27
|
-
# Extended states via labels
|
|
28
|
-
STATE_LABELS = {
|
|
29
|
-
TicketState.IN_PROGRESS: "in-progress",
|
|
30
|
-
TicketState.READY: "ready",
|
|
31
|
-
TicketState.TESTED: "tested",
|
|
32
|
-
TicketState.WAITING: "waiting",
|
|
33
|
-
TicketState.BLOCKED: "blocked",
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
# Priority labels
|
|
37
|
-
PRIORITY_LABELS = {
|
|
38
|
-
Priority.CRITICAL: ["P0", "critical", "urgent"],
|
|
39
|
-
Priority.HIGH: ["P1", "high"],
|
|
40
|
-
Priority.MEDIUM: ["P2", "medium"],
|
|
41
|
-
Priority.LOW: ["P3", "low"],
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
class GitHubGraphQLQueries:
|
|
46
|
-
"""GraphQL queries for GitHub API v4."""
|
|
47
|
-
|
|
48
|
-
ISSUE_FRAGMENT = """
|
|
49
|
-
fragment IssueFields on Issue {
|
|
50
|
-
id
|
|
51
|
-
number
|
|
52
|
-
title
|
|
53
|
-
body
|
|
54
|
-
state
|
|
55
|
-
createdAt
|
|
56
|
-
updatedAt
|
|
57
|
-
url
|
|
58
|
-
author {
|
|
59
|
-
login
|
|
60
|
-
}
|
|
61
|
-
assignees(first: 10) {
|
|
62
|
-
nodes {
|
|
63
|
-
login
|
|
64
|
-
email
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
labels(first: 20) {
|
|
68
|
-
nodes {
|
|
69
|
-
name
|
|
70
|
-
color
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
milestone {
|
|
74
|
-
id
|
|
75
|
-
number
|
|
76
|
-
title
|
|
77
|
-
state
|
|
78
|
-
description
|
|
79
|
-
}
|
|
80
|
-
projectCards(first: 10) {
|
|
81
|
-
nodes {
|
|
82
|
-
project {
|
|
83
|
-
name
|
|
84
|
-
url
|
|
85
|
-
}
|
|
86
|
-
column {
|
|
87
|
-
name
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
}
|
|
91
|
-
comments(first: 100) {
|
|
92
|
-
nodes {
|
|
93
|
-
id
|
|
94
|
-
body
|
|
95
|
-
author {
|
|
96
|
-
login
|
|
97
|
-
}
|
|
98
|
-
createdAt
|
|
99
|
-
}
|
|
100
|
-
}
|
|
101
|
-
reactions(first: 10) {
|
|
102
|
-
nodes {
|
|
103
|
-
content
|
|
104
|
-
user {
|
|
105
|
-
login
|
|
106
|
-
}
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
"""
|
|
111
|
-
|
|
112
|
-
GET_ISSUE = """
|
|
113
|
-
query GetIssue($owner: String!, $repo: String!, $number: Int!) {
|
|
114
|
-
repository(owner: $owner, name: $repo) {
|
|
115
|
-
issue(number: $number) {
|
|
116
|
-
...IssueFields
|
|
117
|
-
}
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
"""
|
|
121
|
-
|
|
122
|
-
SEARCH_ISSUES = """
|
|
123
|
-
query SearchIssues($query: String!, $first: Int!, $after: String) {
|
|
124
|
-
search(query: $query, type: ISSUE, first: $first, after: $after) {
|
|
125
|
-
issueCount
|
|
126
|
-
pageInfo {
|
|
127
|
-
hasNextPage
|
|
128
|
-
endCursor
|
|
129
|
-
}
|
|
130
|
-
nodes {
|
|
131
|
-
... on Issue {
|
|
132
|
-
...IssueFields
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
"""
|
|
138
|
-
|
|
139
|
-
GET_PROJECT_ITERATIONS = """
|
|
140
|
-
query GetProjectIterations($projectId: ID!, $first: Int!, $after: String) {
|
|
141
|
-
node(id: $projectId) {
|
|
142
|
-
... on ProjectV2 {
|
|
143
|
-
iterations(first: $first, after: $after) {
|
|
144
|
-
nodes {
|
|
145
|
-
id
|
|
146
|
-
title
|
|
147
|
-
startDate
|
|
148
|
-
duration
|
|
149
|
-
}
|
|
150
|
-
pageInfo {
|
|
151
|
-
hasNextPage
|
|
152
|
-
endCursor
|
|
153
|
-
}
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
}
|
|
158
|
-
"""
|
|
159
|
-
|
|
160
|
-
|
|
161
61
|
class GitHubAdapter(BaseAdapter[Task]):
|
|
162
62
|
"""Adapter for GitHub Issues tracking system."""
|
|
163
63
|
|
|
@@ -173,6 +73,7 @@ class GitHubAdapter(BaseAdapter[Task]):
|
|
|
173
73
|
- api_url: Optional API URL for GitHub Enterprise
|
|
174
74
|
- use_projects_v2: Enable Projects v2 (default: False)
|
|
175
75
|
- custom_priority_scheme: Custom priority label mapping
|
|
76
|
+
- labels_ttl: Label cache TTL in seconds (default: 300.0)
|
|
176
77
|
|
|
177
78
|
"""
|
|
178
79
|
super().__init__(config)
|
|
@@ -211,23 +112,25 @@ class GitHubAdapter(BaseAdapter[Task]):
|
|
|
211
112
|
self.use_projects_v2 = config.get("use_projects_v2", False)
|
|
212
113
|
self.custom_priority_scheme = config.get("custom_priority_scheme", {})
|
|
213
114
|
|
|
214
|
-
#
|
|
215
|
-
self.
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
self.client = httpx.AsyncClient(
|
|
222
|
-
base_url=self.api_url,
|
|
223
|
-
headers=self.headers,
|
|
115
|
+
# Initialize GitHub API client
|
|
116
|
+
self.gh_client = GitHubClient(
|
|
117
|
+
token=self.token,
|
|
118
|
+
owner=self.owner,
|
|
119
|
+
repo=self.repo,
|
|
120
|
+
api_url=self.api_url,
|
|
224
121
|
timeout=30.0,
|
|
225
122
|
)
|
|
226
123
|
|
|
227
|
-
#
|
|
228
|
-
|
|
124
|
+
# Keep legacy client reference for backward compatibility
|
|
125
|
+
# TODO: Gradually migrate all direct self.client usage to self.gh_client
|
|
126
|
+
self.client = self.gh_client.client
|
|
127
|
+
self.headers = self.gh_client.headers
|
|
128
|
+
self.graphql_url = self.gh_client.graphql_url
|
|
129
|
+
|
|
130
|
+
# Initialize TTL-based cache
|
|
131
|
+
self._labels_ttl = config.get("labels_ttl", 300.0) # 5 min default
|
|
132
|
+
self._labels_cache = MemoryCache(default_ttl=self._labels_ttl)
|
|
229
133
|
self._milestones_cache: list[dict[str, Any]] | None = None
|
|
230
|
-
self._rate_limit: dict[str, Any] = {}
|
|
231
134
|
|
|
232
135
|
def validate_credentials(self) -> tuple[bool, str]:
|
|
233
136
|
"""Validate that required credentials are present.
|
|
@@ -259,225 +162,48 @@ class GitHubAdapter(BaseAdapter[Task]):
|
|
|
259
162
|
return True, ""
|
|
260
163
|
|
|
261
164
|
def _get_state_mapping(self) -> dict[TicketState, str]:
|
|
262
|
-
"""Map universal states to GitHub states."""
|
|
263
|
-
return {
|
|
264
|
-
TicketState.OPEN: GitHubStateMapping.OPEN,
|
|
265
|
-
TicketState.IN_PROGRESS: GitHubStateMapping.OPEN, # with label
|
|
266
|
-
TicketState.READY: GitHubStateMapping.OPEN, # with label
|
|
267
|
-
TicketState.TESTED: GitHubStateMapping.OPEN, # with label
|
|
268
|
-
TicketState.DONE: GitHubStateMapping.CLOSED,
|
|
269
|
-
TicketState.WAITING: GitHubStateMapping.OPEN, # with label
|
|
270
|
-
TicketState.BLOCKED: GitHubStateMapping.OPEN, # with label
|
|
271
|
-
TicketState.CLOSED: GitHubStateMapping.CLOSED,
|
|
272
|
-
}
|
|
165
|
+
"""Map universal states to GitHub states (delegated to types module)."""
|
|
166
|
+
return {state: get_github_state(state) for state in TicketState}
|
|
273
167
|
|
|
274
168
|
def _get_state_label(self, state: TicketState) -> str | None:
|
|
275
|
-
"""Get the label name for extended states."""
|
|
276
|
-
return
|
|
169
|
+
"""Get the label name for extended states (delegated to types module)."""
|
|
170
|
+
return get_state_label(state)
|
|
277
171
|
|
|
278
172
|
def _get_priority_from_labels(self, labels: list[str]) -> Priority:
|
|
279
|
-
"""Extract priority from issue labels."""
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
# Check custom priority scheme first
|
|
283
|
-
if self.custom_priority_scheme:
|
|
284
|
-
for priority_str, label_patterns in self.custom_priority_scheme.items():
|
|
285
|
-
for pattern in label_patterns:
|
|
286
|
-
if any(pattern.lower() in label for label in label_names):
|
|
287
|
-
return Priority(priority_str)
|
|
288
|
-
|
|
289
|
-
# Check default priority labels
|
|
290
|
-
for priority, priority_labels in GitHubStateMapping.PRIORITY_LABELS.items():
|
|
291
|
-
for priority_label in priority_labels:
|
|
292
|
-
if priority_label.lower() in label_names:
|
|
293
|
-
return priority
|
|
294
|
-
|
|
295
|
-
return Priority.MEDIUM
|
|
173
|
+
"""Extract priority from issue labels (delegated to types module)."""
|
|
174
|
+
return get_priority_from_labels(labels, self.custom_priority_scheme)
|
|
296
175
|
|
|
297
176
|
def _get_priority_label(self, priority: Priority) -> str:
|
|
298
|
-
"""Get label name for a priority level."""
|
|
299
|
-
|
|
300
|
-
if self.custom_priority_scheme:
|
|
301
|
-
labels = self.custom_priority_scheme.get(priority.value, [])
|
|
302
|
-
if labels:
|
|
303
|
-
return labels[0]
|
|
304
|
-
|
|
305
|
-
# Use default labels
|
|
306
|
-
labels = GitHubStateMapping.PRIORITY_LABELS.get(priority, [])
|
|
307
|
-
return (
|
|
308
|
-
labels[0]
|
|
309
|
-
if labels
|
|
310
|
-
else f"P{['0', '1', '2', '3'][list(Priority).index(priority)]}"
|
|
311
|
-
)
|
|
177
|
+
"""Get label name for a priority level (delegated to types module)."""
|
|
178
|
+
return get_priority_label(priority, self.custom_priority_scheme)
|
|
312
179
|
|
|
313
180
|
def _milestone_to_epic(self, milestone: dict[str, Any]) -> Epic:
|
|
314
|
-
"""Convert GitHub milestone to Epic model.
|
|
315
|
-
|
|
316
|
-
Args:
|
|
317
|
-
----
|
|
318
|
-
milestone: GitHub milestone data
|
|
319
|
-
|
|
320
|
-
Returns:
|
|
321
|
-
-------
|
|
322
|
-
Epic instance
|
|
323
|
-
|
|
324
|
-
"""
|
|
325
|
-
return Epic(
|
|
326
|
-
id=str(milestone["number"]),
|
|
327
|
-
title=milestone["title"],
|
|
328
|
-
description=milestone.get("description", ""),
|
|
329
|
-
state=(
|
|
330
|
-
TicketState.OPEN if milestone["state"] == "open" else TicketState.CLOSED
|
|
331
|
-
),
|
|
332
|
-
created_at=datetime.fromisoformat(
|
|
333
|
-
milestone["created_at"].replace("Z", "+00:00")
|
|
334
|
-
),
|
|
335
|
-
updated_at=datetime.fromisoformat(
|
|
336
|
-
milestone["updated_at"].replace("Z", "+00:00")
|
|
337
|
-
),
|
|
338
|
-
metadata={
|
|
339
|
-
"github": {
|
|
340
|
-
"number": milestone["number"],
|
|
341
|
-
"url": milestone.get("html_url"),
|
|
342
|
-
"open_issues": milestone.get("open_issues", 0),
|
|
343
|
-
"closed_issues": milestone.get("closed_issues", 0),
|
|
344
|
-
}
|
|
345
|
-
},
|
|
346
|
-
)
|
|
181
|
+
"""Convert GitHub milestone to Epic model (delegated to mappers module)."""
|
|
182
|
+
return map_github_milestone_to_epic(milestone)
|
|
347
183
|
|
|
348
184
|
def _extract_state_from_issue(self, issue: dict[str, Any]) -> TicketState:
|
|
349
|
-
"""Extract ticket state from GitHub issue data."""
|
|
350
|
-
|
|
351
|
-
if issue["state"] == "closed":
|
|
352
|
-
return TicketState.CLOSED
|
|
353
|
-
|
|
354
|
-
# Check labels for extended states
|
|
355
|
-
labels = []
|
|
356
|
-
if "labels" in issue:
|
|
357
|
-
if isinstance(issue["labels"], list):
|
|
358
|
-
labels = [
|
|
359
|
-
label.get("name", "") if isinstance(label, dict) else str(label)
|
|
360
|
-
for label in issue["labels"]
|
|
361
|
-
]
|
|
362
|
-
elif isinstance(issue["labels"], dict) and "nodes" in issue["labels"]:
|
|
363
|
-
labels = [label["name"] for label in issue["labels"]["nodes"]]
|
|
364
|
-
|
|
365
|
-
label_names = [label.lower() for label in labels]
|
|
366
|
-
|
|
367
|
-
# Check for extended state labels
|
|
368
|
-
for state, label_name in GitHubStateMapping.STATE_LABELS.items():
|
|
369
|
-
if label_name.lower() in label_names:
|
|
370
|
-
return state
|
|
371
|
-
|
|
372
|
-
return TicketState.OPEN
|
|
185
|
+
"""Extract ticket state from GitHub issue data (delegated to types module)."""
|
|
186
|
+
return extract_state_from_issue(issue)
|
|
373
187
|
|
|
374
188
|
def _task_from_github_issue(self, issue: dict[str, Any]) -> Task:
|
|
375
|
-
"""Convert GitHub issue to universal Task."""
|
|
376
|
-
|
|
377
|
-
labels = []
|
|
378
|
-
if "labels" in issue:
|
|
379
|
-
if isinstance(issue["labels"], list):
|
|
380
|
-
labels = [
|
|
381
|
-
label.get("name", "") if isinstance(label, dict) else str(label)
|
|
382
|
-
for label in issue["labels"]
|
|
383
|
-
]
|
|
384
|
-
elif isinstance(issue["labels"], dict) and "nodes" in issue["labels"]:
|
|
385
|
-
labels = [label["name"] for label in issue["labels"]["nodes"]]
|
|
386
|
-
|
|
387
|
-
# Extract state
|
|
388
|
-
state = self._extract_state_from_issue(issue)
|
|
389
|
-
|
|
390
|
-
# Extract priority
|
|
391
|
-
priority = self._get_priority_from_labels(labels)
|
|
392
|
-
|
|
393
|
-
# Extract assignee
|
|
394
|
-
assignee = None
|
|
395
|
-
if "assignees" in issue:
|
|
396
|
-
if isinstance(issue["assignees"], list) and issue["assignees"]:
|
|
397
|
-
assignee = issue["assignees"][0].get("login")
|
|
398
|
-
elif isinstance(issue["assignees"], dict) and "nodes" in issue["assignees"]:
|
|
399
|
-
nodes = issue["assignees"]["nodes"]
|
|
400
|
-
if nodes:
|
|
401
|
-
assignee = nodes[0].get("login")
|
|
402
|
-
elif "assignee" in issue and issue["assignee"]:
|
|
403
|
-
assignee = issue["assignee"].get("login")
|
|
404
|
-
|
|
405
|
-
# Extract parent epic (milestone)
|
|
406
|
-
parent_epic = None
|
|
407
|
-
if issue.get("milestone"):
|
|
408
|
-
parent_epic = str(issue["milestone"]["number"])
|
|
409
|
-
|
|
410
|
-
# Parse dates
|
|
411
|
-
created_at = None
|
|
412
|
-
if issue.get("created_at"):
|
|
413
|
-
created_at = datetime.fromisoformat(
|
|
414
|
-
issue["created_at"].replace("Z", "+00:00")
|
|
415
|
-
)
|
|
416
|
-
elif issue.get("createdAt"):
|
|
417
|
-
created_at = datetime.fromisoformat(
|
|
418
|
-
issue["createdAt"].replace("Z", "+00:00")
|
|
419
|
-
)
|
|
420
|
-
|
|
421
|
-
updated_at = None
|
|
422
|
-
if issue.get("updated_at"):
|
|
423
|
-
updated_at = datetime.fromisoformat(
|
|
424
|
-
issue["updated_at"].replace("Z", "+00:00")
|
|
425
|
-
)
|
|
426
|
-
elif issue.get("updatedAt"):
|
|
427
|
-
updated_at = datetime.fromisoformat(
|
|
428
|
-
issue["updatedAt"].replace("Z", "+00:00")
|
|
429
|
-
)
|
|
430
|
-
|
|
431
|
-
# Build metadata
|
|
432
|
-
metadata = {
|
|
433
|
-
"github": {
|
|
434
|
-
"number": issue.get("number"),
|
|
435
|
-
"url": issue.get("url") or issue.get("html_url"),
|
|
436
|
-
"author": (
|
|
437
|
-
issue.get("user", {}).get("login")
|
|
438
|
-
if "user" in issue
|
|
439
|
-
else issue.get("author", {}).get("login")
|
|
440
|
-
),
|
|
441
|
-
"labels": labels,
|
|
442
|
-
}
|
|
443
|
-
}
|
|
444
|
-
|
|
445
|
-
# Add projects v2 info if available
|
|
446
|
-
if "projectCards" in issue and issue["projectCards"].get("nodes"):
|
|
447
|
-
metadata["github"]["projects"] = [
|
|
448
|
-
{
|
|
449
|
-
"name": card["project"]["name"],
|
|
450
|
-
"column": card["column"]["name"],
|
|
451
|
-
"url": card["project"]["url"],
|
|
452
|
-
}
|
|
453
|
-
for card in issue["projectCards"]["nodes"]
|
|
454
|
-
]
|
|
455
|
-
|
|
456
|
-
return Task(
|
|
457
|
-
id=str(issue["number"]),
|
|
458
|
-
title=issue["title"],
|
|
459
|
-
description=issue.get("body") or issue.get("bodyText"),
|
|
460
|
-
state=state,
|
|
461
|
-
priority=priority,
|
|
462
|
-
tags=labels,
|
|
463
|
-
parent_epic=parent_epic,
|
|
464
|
-
assignee=assignee,
|
|
465
|
-
created_at=created_at,
|
|
466
|
-
updated_at=updated_at,
|
|
467
|
-
metadata=metadata,
|
|
468
|
-
)
|
|
189
|
+
"""Convert GitHub issue to universal Task (delegated to mappers module)."""
|
|
190
|
+
return map_github_issue_to_task(issue, self.custom_priority_scheme)
|
|
469
191
|
|
|
470
192
|
async def _ensure_label_exists(
|
|
471
193
|
self, label_name: str, color: str = "0366d6"
|
|
472
194
|
) -> None:
|
|
473
195
|
"""Ensure a label exists in the repository."""
|
|
474
|
-
|
|
196
|
+
cache_key = "github_labels"
|
|
197
|
+
cached_labels = await self._labels_cache.get(cache_key)
|
|
198
|
+
|
|
199
|
+
if cached_labels is None:
|
|
475
200
|
response = await self.client.get(f"/repos/{self.owner}/{self.repo}/labels")
|
|
476
201
|
response.raise_for_status()
|
|
477
|
-
|
|
202
|
+
cached_labels = response.json()
|
|
203
|
+
await self._labels_cache.set(cache_key, cached_labels)
|
|
478
204
|
|
|
479
205
|
# Check if label exists
|
|
480
|
-
existing_labels = [label["name"].lower() for label in
|
|
206
|
+
existing_labels = [label["name"].lower() for label in cached_labels]
|
|
481
207
|
if label_name.lower() not in existing_labels:
|
|
482
208
|
# Create the label
|
|
483
209
|
response = await self.client.post(
|
|
@@ -485,7 +211,8 @@ class GitHubAdapter(BaseAdapter[Task]):
|
|
|
485
211
|
json={"name": label_name, "color": color},
|
|
486
212
|
)
|
|
487
213
|
if response.status_code == 201:
|
|
488
|
-
|
|
214
|
+
cached_labels.append(response.json())
|
|
215
|
+
await self._labels_cache.set(cache_key, cached_labels)
|
|
489
216
|
|
|
490
217
|
async def _graphql_request(
|
|
491
218
|
self, query: str, variables: dict[str, Any]
|
|
@@ -898,9 +625,7 @@ class GitHubAdapter(BaseAdapter[Task]):
|
|
|
898
625
|
github_query = " ".join(search_parts)
|
|
899
626
|
|
|
900
627
|
# Use GraphQL for better search capabilities
|
|
901
|
-
full_query =
|
|
902
|
-
GitHubGraphQLQueries.ISSUE_FRAGMENT + GitHubGraphQLQueries.SEARCH_ISSUES
|
|
903
|
-
)
|
|
628
|
+
full_query = ISSUE_FRAGMENT + SEARCH_ISSUES
|
|
904
629
|
|
|
905
630
|
variables = {
|
|
906
631
|
"query": github_query,
|
|
@@ -1449,8 +1174,10 @@ Fixes #{issue_number}
|
|
|
1449
1174
|
List of label dictionaries with 'id', 'name', and 'color' fields
|
|
1450
1175
|
|
|
1451
1176
|
"""
|
|
1452
|
-
|
|
1453
|
-
|
|
1177
|
+
cache_key = "github_labels"
|
|
1178
|
+
cached = await self._labels_cache.get(cache_key)
|
|
1179
|
+
if cached is not None:
|
|
1180
|
+
return cached
|
|
1454
1181
|
|
|
1455
1182
|
response = await self.client.get(f"/repos/{self.owner}/{self.repo}/labels")
|
|
1456
1183
|
response.raise_for_status()
|
|
@@ -1462,7 +1189,7 @@ Fixes #{issue_number}
|
|
|
1462
1189
|
for label in labels
|
|
1463
1190
|
]
|
|
1464
1191
|
|
|
1465
|
-
self._labels_cache
|
|
1192
|
+
await self._labels_cache.set(cache_key, standardized_labels)
|
|
1466
1193
|
return standardized_labels
|
|
1467
1194
|
|
|
1468
1195
|
async def update_milestone(
|
|
@@ -1769,7 +1496,7 @@ Fixes #{issue_number}
|
|
|
1769
1496
|
)
|
|
1770
1497
|
|
|
1771
1498
|
# Execute GraphQL query to fetch iterations
|
|
1772
|
-
query =
|
|
1499
|
+
query = GET_PROJECT_ITERATIONS
|
|
1773
1500
|
variables = {"projectId": project_id, "first": min(limit, 100), "after": None}
|
|
1774
1501
|
|
|
1775
1502
|
try:
|
|
@@ -2079,6 +1806,1420 @@ Fixes #{issue_number}
|
|
|
2079
1806
|
except httpx.HTTPError as e:
|
|
2080
1807
|
raise ValueError(f"Failed to list project labels: {e}") from e
|
|
2081
1808
|
|
|
1809
|
+
# ========================================================================
|
|
1810
|
+
# New Milestone Methods (Phase 2 - GitHub Native Support)
|
|
1811
|
+
# ========================================================================
|
|
1812
|
+
|
|
1813
|
+
async def milestone_create(
|
|
1814
|
+
self,
|
|
1815
|
+
name: str,
|
|
1816
|
+
target_date: date | None = None,
|
|
1817
|
+
labels: list[str] | None = None,
|
|
1818
|
+
description: str = "",
|
|
1819
|
+
project_id: str | None = None,
|
|
1820
|
+
) -> Milestone:
|
|
1821
|
+
"""Create milestone using GitHub Milestones API.
|
|
1822
|
+
|
|
1823
|
+
GitHub milestones are repository-scoped and natively supported.
|
|
1824
|
+
|
|
1825
|
+
Args:
|
|
1826
|
+
----
|
|
1827
|
+
name: Milestone name/title
|
|
1828
|
+
target_date: Target completion date (optional)
|
|
1829
|
+
labels: Labels for local storage (GitHub doesn't store labels on milestones)
|
|
1830
|
+
description: Milestone description
|
|
1831
|
+
project_id: Project ID (ignored for GitHub, repo-scoped)
|
|
1832
|
+
|
|
1833
|
+
Returns:
|
|
1834
|
+
-------
|
|
1835
|
+
Created Milestone object
|
|
1836
|
+
|
|
1837
|
+
Raises:
|
|
1838
|
+
------
|
|
1839
|
+
ValueError: If repository is not configured
|
|
1840
|
+
httpx.HTTPError: If API request fails
|
|
1841
|
+
|
|
1842
|
+
"""
|
|
1843
|
+
from datetime import datetime as dt
|
|
1844
|
+
|
|
1845
|
+
if not self.repo:
|
|
1846
|
+
raise ValueError("Repository required for GitHub milestone operations")
|
|
1847
|
+
|
|
1848
|
+
# GitHub API expects ISO 8601 datetime for due_on
|
|
1849
|
+
due_on = None
|
|
1850
|
+
if target_date:
|
|
1851
|
+
due_on = dt.combine(target_date, dt.min.time()).isoformat() + "Z"
|
|
1852
|
+
|
|
1853
|
+
milestone_data = {
|
|
1854
|
+
"title": name,
|
|
1855
|
+
"description": description,
|
|
1856
|
+
"state": "open",
|
|
1857
|
+
}
|
|
1858
|
+
|
|
1859
|
+
if due_on:
|
|
1860
|
+
milestone_data["due_on"] = due_on
|
|
1861
|
+
|
|
1862
|
+
# Create milestone via GitHub API
|
|
1863
|
+
response = await self.client.post(
|
|
1864
|
+
f"/repos/{self.owner}/{self.repo}/milestones",
|
|
1865
|
+
json=milestone_data,
|
|
1866
|
+
)
|
|
1867
|
+
response.raise_for_status()
|
|
1868
|
+
|
|
1869
|
+
gh_milestone = response.json()
|
|
1870
|
+
|
|
1871
|
+
# Convert to Milestone model
|
|
1872
|
+
milestone = self._github_milestone_to_milestone(gh_milestone, labels)
|
|
1873
|
+
|
|
1874
|
+
# Save to local storage for label tracking
|
|
1875
|
+
from pathlib import Path
|
|
1876
|
+
|
|
1877
|
+
from ...core.milestone_manager import MilestoneManager
|
|
1878
|
+
|
|
1879
|
+
config_dir = Path.home() / ".mcp-ticketer"
|
|
1880
|
+
manager = MilestoneManager(config_dir)
|
|
1881
|
+
manager.save_milestone(milestone)
|
|
1882
|
+
|
|
1883
|
+
logger.info(f"Created GitHub milestone: {milestone.id} ({milestone.name})")
|
|
1884
|
+
return milestone
|
|
1885
|
+
|
|
1886
|
+
async def milestone_get(self, milestone_id: str) -> Milestone | None:
|
|
1887
|
+
"""Get milestone by ID (milestone number in GitHub).
|
|
1888
|
+
|
|
1889
|
+
Args:
|
|
1890
|
+
----
|
|
1891
|
+
milestone_id: Milestone number as string
|
|
1892
|
+
|
|
1893
|
+
Returns:
|
|
1894
|
+
-------
|
|
1895
|
+
Milestone object or None if not found
|
|
1896
|
+
|
|
1897
|
+
Raises:
|
|
1898
|
+
------
|
|
1899
|
+
ValueError: If repository is not configured
|
|
1900
|
+
|
|
1901
|
+
"""
|
|
1902
|
+
from pathlib import Path
|
|
1903
|
+
|
|
1904
|
+
from ...core.milestone_manager import MilestoneManager
|
|
1905
|
+
|
|
1906
|
+
if not self.repo:
|
|
1907
|
+
raise ValueError("Repository required for GitHub milestone operations")
|
|
1908
|
+
|
|
1909
|
+
try:
|
|
1910
|
+
# milestone_id is the milestone number in GitHub
|
|
1911
|
+
response = await self.client.get(
|
|
1912
|
+
f"/repos/{self.owner}/{self.repo}/milestones/{milestone_id}"
|
|
1913
|
+
)
|
|
1914
|
+
|
|
1915
|
+
if response.status_code == 404:
|
|
1916
|
+
return None
|
|
1917
|
+
|
|
1918
|
+
response.raise_for_status()
|
|
1919
|
+
gh_milestone = response.json()
|
|
1920
|
+
|
|
1921
|
+
# Load labels from local storage
|
|
1922
|
+
config_dir = Path.home() / ".mcp-ticketer"
|
|
1923
|
+
manager = MilestoneManager(config_dir)
|
|
1924
|
+
local_milestone = manager.get_milestone(milestone_id)
|
|
1925
|
+
labels = local_milestone.labels if local_milestone else []
|
|
1926
|
+
|
|
1927
|
+
return self._github_milestone_to_milestone(gh_milestone, labels)
|
|
1928
|
+
|
|
1929
|
+
except httpx.HTTPError as e:
|
|
1930
|
+
logger.error(f"Failed to get milestone {milestone_id}: {e}")
|
|
1931
|
+
return None
|
|
1932
|
+
|
|
1933
|
+
async def milestone_list(
|
|
1934
|
+
self,
|
|
1935
|
+
project_id: str | None = None,
|
|
1936
|
+
state: str | None = None,
|
|
1937
|
+
) -> list[Milestone]:
|
|
1938
|
+
"""List milestones from GitHub repository.
|
|
1939
|
+
|
|
1940
|
+
Note: project_id is ignored for GitHub (repo-scoped).
|
|
1941
|
+
|
|
1942
|
+
Args:
|
|
1943
|
+
----
|
|
1944
|
+
project_id: Project ID (ignored, GitHub is repo-scoped)
|
|
1945
|
+
state: Filter by state (open, active, closed, completed)
|
|
1946
|
+
|
|
1947
|
+
Returns:
|
|
1948
|
+
-------
|
|
1949
|
+
List of Milestone objects
|
|
1950
|
+
|
|
1951
|
+
Raises:
|
|
1952
|
+
------
|
|
1953
|
+
ValueError: If repository is not configured
|
|
1954
|
+
|
|
1955
|
+
"""
|
|
1956
|
+
from pathlib import Path
|
|
1957
|
+
|
|
1958
|
+
from ...core.milestone_manager import MilestoneManager
|
|
1959
|
+
|
|
1960
|
+
if not self.repo:
|
|
1961
|
+
raise ValueError("Repository required for GitHub milestone operations")
|
|
1962
|
+
|
|
1963
|
+
# Map our states to GitHub states
|
|
1964
|
+
github_state = "all"
|
|
1965
|
+
if state in ["open", "active"]:
|
|
1966
|
+
github_state = "open"
|
|
1967
|
+
elif state in ["completed", "closed"]:
|
|
1968
|
+
github_state = "closed"
|
|
1969
|
+
|
|
1970
|
+
params = {
|
|
1971
|
+
"state": github_state,
|
|
1972
|
+
"sort": "due_on",
|
|
1973
|
+
"direction": "asc",
|
|
1974
|
+
"per_page": 100,
|
|
1975
|
+
}
|
|
1976
|
+
|
|
1977
|
+
response = await self.client.get(
|
|
1978
|
+
f"/repos/{self.owner}/{self.repo}/milestones",
|
|
1979
|
+
params=params,
|
|
1980
|
+
)
|
|
1981
|
+
response.raise_for_status()
|
|
1982
|
+
|
|
1983
|
+
# Load labels from local storage
|
|
1984
|
+
config_dir = Path.home() / ".mcp-ticketer"
|
|
1985
|
+
manager = MilestoneManager(config_dir)
|
|
1986
|
+
|
|
1987
|
+
milestones = []
|
|
1988
|
+
for gh_milestone in response.json():
|
|
1989
|
+
milestone_id = str(gh_milestone["number"])
|
|
1990
|
+
local_milestone = manager.get_milestone(milestone_id)
|
|
1991
|
+
labels = local_milestone.labels if local_milestone else []
|
|
1992
|
+
|
|
1993
|
+
milestone = self._github_milestone_to_milestone(gh_milestone, labels)
|
|
1994
|
+
milestones.append(milestone)
|
|
1995
|
+
|
|
1996
|
+
logger.info(
|
|
1997
|
+
f"Listed {len(milestones)} GitHub milestones (state={github_state})"
|
|
1998
|
+
)
|
|
1999
|
+
return milestones
|
|
2000
|
+
|
|
2001
|
+
async def milestone_update(
|
|
2002
|
+
self,
|
|
2003
|
+
milestone_id: str,
|
|
2004
|
+
name: str | None = None,
|
|
2005
|
+
target_date: date | None = None,
|
|
2006
|
+
state: str | None = None,
|
|
2007
|
+
labels: list[str] | None = None,
|
|
2008
|
+
description: str | None = None,
|
|
2009
|
+
) -> Milestone | None:
|
|
2010
|
+
"""Update milestone properties.
|
|
2011
|
+
|
|
2012
|
+
Args:
|
|
2013
|
+
----
|
|
2014
|
+
milestone_id: Milestone number as string
|
|
2015
|
+
name: New milestone name
|
|
2016
|
+
target_date: New target date
|
|
2017
|
+
state: New state (open, closed)
|
|
2018
|
+
labels: New labels (stored locally)
|
|
2019
|
+
description: New description
|
|
2020
|
+
|
|
2021
|
+
Returns:
|
|
2022
|
+
-------
|
|
2023
|
+
Updated Milestone object or None if not found
|
|
2024
|
+
|
|
2025
|
+
Raises:
|
|
2026
|
+
------
|
|
2027
|
+
ValueError: If repository is not configured
|
|
2028
|
+
|
|
2029
|
+
"""
|
|
2030
|
+
from datetime import datetime as dt
|
|
2031
|
+
from pathlib import Path
|
|
2032
|
+
|
|
2033
|
+
from ...core.milestone_manager import MilestoneManager
|
|
2034
|
+
|
|
2035
|
+
if not self.repo:
|
|
2036
|
+
raise ValueError("Repository required for GitHub milestone operations")
|
|
2037
|
+
|
|
2038
|
+
update_data = {}
|
|
2039
|
+
|
|
2040
|
+
if name:
|
|
2041
|
+
update_data["title"] = name
|
|
2042
|
+
if description is not None:
|
|
2043
|
+
update_data["description"] = description
|
|
2044
|
+
if target_date:
|
|
2045
|
+
due_on = dt.combine(target_date, dt.min.time()).isoformat() + "Z"
|
|
2046
|
+
update_data["due_on"] = due_on
|
|
2047
|
+
if state:
|
|
2048
|
+
# Map our states to GitHub states
|
|
2049
|
+
if state in ["completed", "closed"]:
|
|
2050
|
+
update_data["state"] = "closed"
|
|
2051
|
+
elif state in ["open", "active"]:
|
|
2052
|
+
update_data["state"] = "open"
|
|
2053
|
+
|
|
2054
|
+
if not update_data and labels is None:
|
|
2055
|
+
raise ValueError("At least one field must be updated")
|
|
2056
|
+
|
|
2057
|
+
# Update milestone via GitHub API
|
|
2058
|
+
if update_data:
|
|
2059
|
+
response = await self.client.patch(
|
|
2060
|
+
f"/repos/{self.owner}/{self.repo}/milestones/{milestone_id}",
|
|
2061
|
+
json=update_data,
|
|
2062
|
+
)
|
|
2063
|
+
response.raise_for_status()
|
|
2064
|
+
gh_milestone = response.json()
|
|
2065
|
+
else:
|
|
2066
|
+
# Only labels updated, fetch current milestone
|
|
2067
|
+
response = await self.client.get(
|
|
2068
|
+
f"/repos/{self.owner}/{self.repo}/milestones/{milestone_id}"
|
|
2069
|
+
)
|
|
2070
|
+
response.raise_for_status()
|
|
2071
|
+
gh_milestone = response.json()
|
|
2072
|
+
|
|
2073
|
+
# Update labels in local storage
|
|
2074
|
+
config_dir = Path.home() / ".mcp-ticketer"
|
|
2075
|
+
manager = MilestoneManager(config_dir)
|
|
2076
|
+
|
|
2077
|
+
if labels is not None:
|
|
2078
|
+
milestone = self._github_milestone_to_milestone(gh_milestone, labels)
|
|
2079
|
+
manager.save_milestone(milestone)
|
|
2080
|
+
logger.info(f"Updated GitHub milestone: {milestone_id} (including labels)")
|
|
2081
|
+
return milestone
|
|
2082
|
+
|
|
2083
|
+
# Load existing labels
|
|
2084
|
+
local_milestone = manager.get_milestone(milestone_id)
|
|
2085
|
+
existing_labels = local_milestone.labels if local_milestone else []
|
|
2086
|
+
|
|
2087
|
+
milestone = self._github_milestone_to_milestone(gh_milestone, existing_labels)
|
|
2088
|
+
logger.info(f"Updated GitHub milestone: {milestone_id}")
|
|
2089
|
+
return milestone
|
|
2090
|
+
|
|
2091
|
+
async def milestone_delete(self, milestone_id: str) -> bool:
|
|
2092
|
+
"""Delete milestone from GitHub repository.
|
|
2093
|
+
|
|
2094
|
+
Args:
|
|
2095
|
+
----
|
|
2096
|
+
milestone_id: Milestone number as string
|
|
2097
|
+
|
|
2098
|
+
Returns:
|
|
2099
|
+
-------
|
|
2100
|
+
True if deleted, False if not found
|
|
2101
|
+
|
|
2102
|
+
Raises:
|
|
2103
|
+
------
|
|
2104
|
+
ValueError: If repository is not configured
|
|
2105
|
+
|
|
2106
|
+
"""
|
|
2107
|
+
from pathlib import Path
|
|
2108
|
+
|
|
2109
|
+
from ...core.milestone_manager import MilestoneManager
|
|
2110
|
+
|
|
2111
|
+
if not self.repo:
|
|
2112
|
+
raise ValueError("Repository required for GitHub milestone operations")
|
|
2113
|
+
|
|
2114
|
+
try:
|
|
2115
|
+
response = await self.client.delete(
|
|
2116
|
+
f"/repos/{self.owner}/{self.repo}/milestones/{milestone_id}"
|
|
2117
|
+
)
|
|
2118
|
+
|
|
2119
|
+
# GitHub returns 204 No Content on successful deletion
|
|
2120
|
+
if response.status_code == 204:
|
|
2121
|
+
# Remove from local storage
|
|
2122
|
+
config_dir = Path.home() / ".mcp-ticketer"
|
|
2123
|
+
manager = MilestoneManager(config_dir)
|
|
2124
|
+
manager.delete_milestone(milestone_id)
|
|
2125
|
+
|
|
2126
|
+
logger.info(f"Deleted GitHub milestone: {milestone_id}")
|
|
2127
|
+
return True
|
|
2128
|
+
|
|
2129
|
+
# Handle 404 errors gracefully
|
|
2130
|
+
if response.status_code == 404:
|
|
2131
|
+
logger.warning(f"Milestone {milestone_id} not found for deletion")
|
|
2132
|
+
return False
|
|
2133
|
+
|
|
2134
|
+
response.raise_for_status()
|
|
2135
|
+
return True
|
|
2136
|
+
|
|
2137
|
+
except httpx.HTTPError as e:
|
|
2138
|
+
logger.error(f"Failed to delete milestone {milestone_id}: {e}")
|
|
2139
|
+
return False
|
|
2140
|
+
|
|
2141
|
+
async def milestone_get_issues(
|
|
2142
|
+
self,
|
|
2143
|
+
milestone_id: str,
|
|
2144
|
+
state: str | None = None,
|
|
2145
|
+
) -> list[dict[str, Any]]:
|
|
2146
|
+
"""Get issues in milestone.
|
|
2147
|
+
|
|
2148
|
+
Args:
|
|
2149
|
+
----
|
|
2150
|
+
milestone_id: Milestone number as string
|
|
2151
|
+
state: Filter by state (open, closed, all)
|
|
2152
|
+
|
|
2153
|
+
Returns:
|
|
2154
|
+
-------
|
|
2155
|
+
List of issue dictionaries
|
|
2156
|
+
|
|
2157
|
+
Raises:
|
|
2158
|
+
------
|
|
2159
|
+
ValueError: If repository is not configured
|
|
2160
|
+
|
|
2161
|
+
"""
|
|
2162
|
+
if not self.repo:
|
|
2163
|
+
raise ValueError("Repository required for GitHub milestone operations")
|
|
2164
|
+
|
|
2165
|
+
params = {
|
|
2166
|
+
"milestone": milestone_id,
|
|
2167
|
+
"state": state or "all",
|
|
2168
|
+
"per_page": 100,
|
|
2169
|
+
}
|
|
2170
|
+
|
|
2171
|
+
response = await self.client.get(
|
|
2172
|
+
f"/repos/{self.owner}/{self.repo}/issues",
|
|
2173
|
+
params=params,
|
|
2174
|
+
)
|
|
2175
|
+
response.raise_for_status()
|
|
2176
|
+
|
|
2177
|
+
# Convert GitHub issues to our format
|
|
2178
|
+
issues = []
|
|
2179
|
+
for gh_issue in response.json():
|
|
2180
|
+
# Skip pull requests (GitHub includes them in issues endpoint)
|
|
2181
|
+
if "pull_request" in gh_issue:
|
|
2182
|
+
continue
|
|
2183
|
+
|
|
2184
|
+
issues.append(
|
|
2185
|
+
{
|
|
2186
|
+
"id": str(gh_issue["number"]),
|
|
2187
|
+
"identifier": f"#{gh_issue['number']}",
|
|
2188
|
+
"title": gh_issue["title"],
|
|
2189
|
+
"state": gh_issue["state"],
|
|
2190
|
+
"labels": [label["name"] for label in gh_issue.get("labels", [])],
|
|
2191
|
+
"created_at": gh_issue["created_at"],
|
|
2192
|
+
"updated_at": gh_issue["updated_at"],
|
|
2193
|
+
}
|
|
2194
|
+
)
|
|
2195
|
+
|
|
2196
|
+
logger.info(f"Retrieved {len(issues)} issues from milestone {milestone_id}")
|
|
2197
|
+
return issues
|
|
2198
|
+
|
|
2199
|
+
def _github_milestone_to_milestone(
|
|
2200
|
+
self,
|
|
2201
|
+
gh_milestone: dict[str, Any],
|
|
2202
|
+
labels: list[str] | None = None,
|
|
2203
|
+
) -> Milestone:
|
|
2204
|
+
"""Convert GitHub Milestone to universal Milestone model (delegated to mappers module)."""
|
|
2205
|
+
return map_github_milestone_to_milestone(gh_milestone, self.repo, labels)
|
|
2206
|
+
|
|
2207
|
+
# =============================================================================
|
|
2208
|
+
# GitHub Projects V2 Operations (Week 2: Core CRUD)
|
|
2209
|
+
# =============================================================================
|
|
2210
|
+
|
|
2211
|
+
async def project_list(
|
|
2212
|
+
self,
|
|
2213
|
+
owner: str | None = None,
|
|
2214
|
+
scope: ProjectScope = ProjectScope.ORGANIZATION,
|
|
2215
|
+
state: ProjectState | None = None,
|
|
2216
|
+
limit: int = 10,
|
|
2217
|
+
cursor: str | None = None,
|
|
2218
|
+
) -> list[Project]:
|
|
2219
|
+
"""List projects for an organization or user.
|
|
2220
|
+
|
|
2221
|
+
Args:
|
|
2222
|
+
----
|
|
2223
|
+
owner: Organization or user login (defaults to configured owner)
|
|
2224
|
+
scope: Project scope (ORGANIZATION or USER)
|
|
2225
|
+
state: Filter by project state (ACTIVE, COMPLETED, ARCHIVED)
|
|
2226
|
+
limit: Maximum number of projects to return (default: 10)
|
|
2227
|
+
cursor: Pagination cursor for next page
|
|
2228
|
+
|
|
2229
|
+
Returns:
|
|
2230
|
+
-------
|
|
2231
|
+
List of Project objects
|
|
2232
|
+
|
|
2233
|
+
Raises:
|
|
2234
|
+
------
|
|
2235
|
+
ValueError: If owner not provided and not configured
|
|
2236
|
+
RuntimeError: If GraphQL query fails
|
|
2237
|
+
|
|
2238
|
+
Example:
|
|
2239
|
+
-------
|
|
2240
|
+
projects = await adapter.project_list(owner="myorg", limit=20)
|
|
2241
|
+
|
|
2242
|
+
"""
|
|
2243
|
+
# Validate owner (use self.owner if not provided)
|
|
2244
|
+
owner = owner or self.owner
|
|
2245
|
+
if not owner:
|
|
2246
|
+
raise ValueError("Owner required for GitHub project operations")
|
|
2247
|
+
|
|
2248
|
+
# Build GraphQL variables
|
|
2249
|
+
variables = {
|
|
2250
|
+
"owner": owner,
|
|
2251
|
+
"first": limit,
|
|
2252
|
+
"after": cursor,
|
|
2253
|
+
}
|
|
2254
|
+
|
|
2255
|
+
try:
|
|
2256
|
+
# Execute LIST_PROJECTS_QUERY
|
|
2257
|
+
data = await self.gh_client.execute_graphql(
|
|
2258
|
+
query=LIST_PROJECTS_QUERY,
|
|
2259
|
+
variables=variables,
|
|
2260
|
+
)
|
|
2261
|
+
|
|
2262
|
+
# Parse response and extract projects array
|
|
2263
|
+
org_data = data.get("organization")
|
|
2264
|
+
if not org_data:
|
|
2265
|
+
logger.warning(f"Organization {owner} not found")
|
|
2266
|
+
return []
|
|
2267
|
+
|
|
2268
|
+
projects_data = org_data.get("projectsV2", {})
|
|
2269
|
+
project_nodes = projects_data.get("nodes", [])
|
|
2270
|
+
|
|
2271
|
+
# Map each project using mapper
|
|
2272
|
+
projects = []
|
|
2273
|
+
for project_data in project_nodes:
|
|
2274
|
+
project = map_github_projectv2_to_project(project_data, owner)
|
|
2275
|
+
|
|
2276
|
+
# Filter by state if provided (post-query filtering)
|
|
2277
|
+
if state is None or project.state == state:
|
|
2278
|
+
projects.append(project)
|
|
2279
|
+
|
|
2280
|
+
logger.info(f"Retrieved {len(projects)} projects for {owner}")
|
|
2281
|
+
return projects
|
|
2282
|
+
|
|
2283
|
+
except Exception as e:
|
|
2284
|
+
logger.error(f"Failed to list projects for {owner}: {e}")
|
|
2285
|
+
raise RuntimeError(f"Failed to list projects: {e}") from e
|
|
2286
|
+
|
|
2287
|
+
async def project_get(
|
|
2288
|
+
self,
|
|
2289
|
+
project_id: str,
|
|
2290
|
+
owner: str | None = None,
|
|
2291
|
+
) -> Project | None:
|
|
2292
|
+
"""Get a single project by ID or number.
|
|
2293
|
+
|
|
2294
|
+
Automatically detects ID format:
|
|
2295
|
+
- Node ID format: "PVT_kwDOABCD..." (starts with PVT_)
|
|
2296
|
+
- Number format: "123" (numeric string)
|
|
2297
|
+
|
|
2298
|
+
Args:
|
|
2299
|
+
----
|
|
2300
|
+
project_id: Project node ID or number
|
|
2301
|
+
owner: Organization or user login (defaults to configured owner)
|
|
2302
|
+
|
|
2303
|
+
Returns:
|
|
2304
|
+
-------
|
|
2305
|
+
Project object if found, None otherwise
|
|
2306
|
+
|
|
2307
|
+
Raises:
|
|
2308
|
+
------
|
|
2309
|
+
ValueError: If owner not provided for number-based lookup
|
|
2310
|
+
RuntimeError: If GraphQL query fails
|
|
2311
|
+
|
|
2312
|
+
Example:
|
|
2313
|
+
-------
|
|
2314
|
+
# By number
|
|
2315
|
+
project = await adapter.project_get("42", owner="myorg")
|
|
2316
|
+
|
|
2317
|
+
# By node ID
|
|
2318
|
+
project = await adapter.project_get("PVT_kwDOABCD1234")
|
|
2319
|
+
|
|
2320
|
+
"""
|
|
2321
|
+
try:
|
|
2322
|
+
# Auto-detect ID format
|
|
2323
|
+
if project_id.startswith("PVT_"):
|
|
2324
|
+
# Use GET_PROJECT_BY_ID_QUERY for node IDs
|
|
2325
|
+
data = await self.gh_client.execute_graphql(
|
|
2326
|
+
query=GET_PROJECT_BY_ID_QUERY,
|
|
2327
|
+
variables={"projectId": project_id},
|
|
2328
|
+
)
|
|
2329
|
+
|
|
2330
|
+
project_data = data.get("node")
|
|
2331
|
+
if not project_data:
|
|
2332
|
+
logger.warning(f"Project {project_id} not found")
|
|
2333
|
+
return None
|
|
2334
|
+
|
|
2335
|
+
# Extract owner from project data
|
|
2336
|
+
owner_data = project_data.get("owner", {})
|
|
2337
|
+
owner_login = owner_data.get("login", owner or self.owner)
|
|
2338
|
+
|
|
2339
|
+
project = map_github_projectv2_to_project(project_data, owner_login)
|
|
2340
|
+
logger.info(f"Retrieved project {project_id} by node ID")
|
|
2341
|
+
return project
|
|
2342
|
+
|
|
2343
|
+
else:
|
|
2344
|
+
# Numeric ID - requires owner
|
|
2345
|
+
owner = owner or self.owner
|
|
2346
|
+
if not owner:
|
|
2347
|
+
raise ValueError("Owner required for number-based project lookup")
|
|
2348
|
+
|
|
2349
|
+
# Convert to integer
|
|
2350
|
+
try:
|
|
2351
|
+
project_number = int(project_id)
|
|
2352
|
+
except ValueError as e:
|
|
2353
|
+
raise ValueError(f"Invalid project ID format: {project_id}") from e
|
|
2354
|
+
|
|
2355
|
+
# Use GET_PROJECT_QUERY for number-based lookup
|
|
2356
|
+
data = await self.gh_client.execute_graphql(
|
|
2357
|
+
query=GET_PROJECT_QUERY,
|
|
2358
|
+
variables={"owner": owner, "number": project_number},
|
|
2359
|
+
)
|
|
2360
|
+
|
|
2361
|
+
org_data = data.get("organization")
|
|
2362
|
+
if not org_data:
|
|
2363
|
+
logger.warning(f"Organization {owner} not found")
|
|
2364
|
+
return None
|
|
2365
|
+
|
|
2366
|
+
project_data = org_data.get("projectV2")
|
|
2367
|
+
if not project_data:
|
|
2368
|
+
logger.warning(f"Project {project_id} not found for {owner}")
|
|
2369
|
+
return None
|
|
2370
|
+
|
|
2371
|
+
project = map_github_projectv2_to_project(project_data, owner)
|
|
2372
|
+
logger.info(f"Retrieved project {project_id} by number")
|
|
2373
|
+
return project
|
|
2374
|
+
|
|
2375
|
+
except Exception as e:
|
|
2376
|
+
logger.error(f"Failed to get project {project_id}: {e}")
|
|
2377
|
+
raise RuntimeError(f"Failed to get project: {e}") from e
|
|
2378
|
+
|
|
2379
|
+
async def project_create(
|
|
2380
|
+
self,
|
|
2381
|
+
title: str,
|
|
2382
|
+
description: str | None = None,
|
|
2383
|
+
owner: str | None = None,
|
|
2384
|
+
scope: ProjectScope = ProjectScope.ORGANIZATION,
|
|
2385
|
+
) -> Project:
|
|
2386
|
+
"""Create a new GitHub Projects V2 project.
|
|
2387
|
+
|
|
2388
|
+
Args:
|
|
2389
|
+
----
|
|
2390
|
+
title: Project title (required)
|
|
2391
|
+
description: Project description (optional)
|
|
2392
|
+
owner: Organization or user login (defaults to configured owner)
|
|
2393
|
+
scope: Project scope (ORGANIZATION or USER)
|
|
2394
|
+
|
|
2395
|
+
Returns:
|
|
2396
|
+
-------
|
|
2397
|
+
Newly created Project object
|
|
2398
|
+
|
|
2399
|
+
Raises:
|
|
2400
|
+
------
|
|
2401
|
+
ValueError: If owner not provided
|
|
2402
|
+
RuntimeError: If creation fails (permissions, etc.)
|
|
2403
|
+
|
|
2404
|
+
Example:
|
|
2405
|
+
-------
|
|
2406
|
+
project = await adapter.project_create(
|
|
2407
|
+
title="Q4 Features",
|
|
2408
|
+
description="New features for Q4 2025",
|
|
2409
|
+
owner="myorg"
|
|
2410
|
+
)
|
|
2411
|
+
|
|
2412
|
+
"""
|
|
2413
|
+
# Validate owner
|
|
2414
|
+
owner = owner or self.owner
|
|
2415
|
+
if not owner:
|
|
2416
|
+
raise ValueError("Owner required for GitHub project creation")
|
|
2417
|
+
|
|
2418
|
+
try:
|
|
2419
|
+
# Get owner node ID (query organization)
|
|
2420
|
+
# We need to fetch the organization/user to get its node ID
|
|
2421
|
+
org_query = """
|
|
2422
|
+
query GetOrgId($login: String!) {
|
|
2423
|
+
organization(login: $login) {
|
|
2424
|
+
id
|
|
2425
|
+
}
|
|
2426
|
+
}
|
|
2427
|
+
"""
|
|
2428
|
+
|
|
2429
|
+
org_data = await self.gh_client.execute_graphql(
|
|
2430
|
+
query=org_query,
|
|
2431
|
+
variables={"login": owner},
|
|
2432
|
+
)
|
|
2433
|
+
|
|
2434
|
+
org = org_data.get("organization")
|
|
2435
|
+
if not org:
|
|
2436
|
+
raise ValueError(f"Organization {owner} not found")
|
|
2437
|
+
|
|
2438
|
+
owner_id = org.get("id")
|
|
2439
|
+
|
|
2440
|
+
# Execute CREATE_PROJECT_MUTATION
|
|
2441
|
+
data = await self.gh_client.execute_graphql(
|
|
2442
|
+
query=CREATE_PROJECT_MUTATION,
|
|
2443
|
+
variables={
|
|
2444
|
+
"ownerId": owner_id,
|
|
2445
|
+
"title": title,
|
|
2446
|
+
},
|
|
2447
|
+
)
|
|
2448
|
+
|
|
2449
|
+
# Parse response and extract created project
|
|
2450
|
+
create_result = data.get("createProjectV2", {})
|
|
2451
|
+
project_data = create_result.get("projectV2")
|
|
2452
|
+
|
|
2453
|
+
if not project_data:
|
|
2454
|
+
raise RuntimeError("Project creation returned no data")
|
|
2455
|
+
|
|
2456
|
+
# Map using mapper
|
|
2457
|
+
project = map_github_projectv2_to_project(project_data, owner)
|
|
2458
|
+
|
|
2459
|
+
# Update description if provided (requires separate mutation)
|
|
2460
|
+
if description:
|
|
2461
|
+
await self.project_update(
|
|
2462
|
+
project_id=project.id,
|
|
2463
|
+
description=description,
|
|
2464
|
+
)
|
|
2465
|
+
|
|
2466
|
+
logger.info(f"Created project: {project.id} ({title})")
|
|
2467
|
+
return project
|
|
2468
|
+
|
|
2469
|
+
except Exception as e:
|
|
2470
|
+
logger.error(f"Failed to create project '{title}': {e}")
|
|
2471
|
+
raise RuntimeError(f"Failed to create project: {e}") from e
|
|
2472
|
+
|
|
2473
|
+
async def project_update(
|
|
2474
|
+
self,
|
|
2475
|
+
project_id: str,
|
|
2476
|
+
title: str | None = None,
|
|
2477
|
+
description: str | None = None,
|
|
2478
|
+
readme: str | None = None,
|
|
2479
|
+
state: ProjectState | None = None,
|
|
2480
|
+
) -> Project | None:
|
|
2481
|
+
"""Update project metadata.
|
|
2482
|
+
|
|
2483
|
+
Supports partial updates - only provided fields are updated.
|
|
2484
|
+
|
|
2485
|
+
Args:
|
|
2486
|
+
----
|
|
2487
|
+
project_id: Project node ID (PVT_...)
|
|
2488
|
+
title: New project title (optional)
|
|
2489
|
+
description: New project description (optional)
|
|
2490
|
+
readme: New project readme (optional)
|
|
2491
|
+
state: New project state (optional)
|
|
2492
|
+
|
|
2493
|
+
Returns:
|
|
2494
|
+
-------
|
|
2495
|
+
Updated Project object
|
|
2496
|
+
|
|
2497
|
+
Raises:
|
|
2498
|
+
------
|
|
2499
|
+
ValueError: If project_id invalid or no fields to update
|
|
2500
|
+
RuntimeError: If update fails
|
|
2501
|
+
|
|
2502
|
+
Example:
|
|
2503
|
+
-------
|
|
2504
|
+
project = await adapter.project_update(
|
|
2505
|
+
project_id="PVT_kwDOABCD1234",
|
|
2506
|
+
title="Updated Title",
|
|
2507
|
+
state=ProjectState.COMPLETED
|
|
2508
|
+
)
|
|
2509
|
+
|
|
2510
|
+
"""
|
|
2511
|
+
# Validate at least one field is provided
|
|
2512
|
+
if not any([title, description, readme, state]):
|
|
2513
|
+
raise ValueError("At least one field must be provided for update")
|
|
2514
|
+
|
|
2515
|
+
try:
|
|
2516
|
+
# Build mutation variables (only include provided fields)
|
|
2517
|
+
variables: dict[str, Any] = {"projectId": project_id}
|
|
2518
|
+
|
|
2519
|
+
if title is not None:
|
|
2520
|
+
variables["title"] = title
|
|
2521
|
+
|
|
2522
|
+
if description is not None:
|
|
2523
|
+
variables["shortDescription"] = description
|
|
2524
|
+
|
|
2525
|
+
if readme is not None:
|
|
2526
|
+
variables["readme"] = readme
|
|
2527
|
+
|
|
2528
|
+
# Convert ProjectState to GitHub boolean
|
|
2529
|
+
if state is not None:
|
|
2530
|
+
# GitHub only has open/closed via the 'closed' boolean
|
|
2531
|
+
if state in (ProjectState.COMPLETED, ProjectState.ARCHIVED):
|
|
2532
|
+
variables["closed"] = True
|
|
2533
|
+
elif state == ProjectState.ACTIVE:
|
|
2534
|
+
variables["closed"] = False
|
|
2535
|
+
# PLANNED and CANCELLED don't have direct mappings
|
|
2536
|
+
# We'll keep the project open for PLANNED
|
|
2537
|
+
|
|
2538
|
+
# Execute UPDATE_PROJECT_MUTATION
|
|
2539
|
+
data = await self.gh_client.execute_graphql(
|
|
2540
|
+
query=UPDATE_PROJECT_MUTATION,
|
|
2541
|
+
variables=variables,
|
|
2542
|
+
)
|
|
2543
|
+
|
|
2544
|
+
# Parse response
|
|
2545
|
+
update_result = data.get("updateProjectV2", {})
|
|
2546
|
+
project_data = update_result.get("projectV2")
|
|
2547
|
+
|
|
2548
|
+
if not project_data:
|
|
2549
|
+
logger.warning(f"Project {project_id} not found for update")
|
|
2550
|
+
return None
|
|
2551
|
+
|
|
2552
|
+
# Extract owner from project data
|
|
2553
|
+
owner_data = project_data.get("owner", {})
|
|
2554
|
+
owner = owner_data.get("login", self.owner)
|
|
2555
|
+
|
|
2556
|
+
# Map using mapper
|
|
2557
|
+
project = map_github_projectv2_to_project(project_data, owner)
|
|
2558
|
+
|
|
2559
|
+
logger.info(f"Updated project: {project_id}")
|
|
2560
|
+
return project
|
|
2561
|
+
|
|
2562
|
+
except Exception as e:
|
|
2563
|
+
logger.error(f"Failed to update project {project_id}: {e}")
|
|
2564
|
+
raise RuntimeError(f"Failed to update project: {e}") from e
|
|
2565
|
+
|
|
2566
|
+
async def project_delete(
|
|
2567
|
+
self,
|
|
2568
|
+
project_id: str,
|
|
2569
|
+
hard_delete: bool = False,
|
|
2570
|
+
) -> bool:
|
|
2571
|
+
"""Delete a project.
|
|
2572
|
+
|
|
2573
|
+
By default performs soft delete (closes project).
|
|
2574
|
+
Set hard_delete=True to permanently delete.
|
|
2575
|
+
|
|
2576
|
+
Args:
|
|
2577
|
+
----
|
|
2578
|
+
project_id: Project node ID (PVT_...)
|
|
2579
|
+
hard_delete: If True, permanently delete; if False, soft delete (close)
|
|
2580
|
+
|
|
2581
|
+
Returns:
|
|
2582
|
+
-------
|
|
2583
|
+
True if successful, False otherwise
|
|
2584
|
+
|
|
2585
|
+
Raises:
|
|
2586
|
+
------
|
|
2587
|
+
RuntimeError: If deletion fails
|
|
2588
|
+
|
|
2589
|
+
Example:
|
|
2590
|
+
-------
|
|
2591
|
+
# Soft delete (close)
|
|
2592
|
+
await adapter.project_delete("PVT_kwDOABCD1234")
|
|
2593
|
+
|
|
2594
|
+
# Hard delete (permanent)
|
|
2595
|
+
await adapter.project_delete("PVT_kwDOABCD1234", hard_delete=True)
|
|
2596
|
+
|
|
2597
|
+
"""
|
|
2598
|
+
try:
|
|
2599
|
+
if hard_delete:
|
|
2600
|
+
# Hard delete using DELETE_PROJECT_MUTATION
|
|
2601
|
+
logger.warning(f"Permanently deleting project {project_id}")
|
|
2602
|
+
|
|
2603
|
+
data = await self.gh_client.execute_graphql(
|
|
2604
|
+
query=DELETE_PROJECT_MUTATION,
|
|
2605
|
+
variables={"projectId": project_id},
|
|
2606
|
+
)
|
|
2607
|
+
|
|
2608
|
+
delete_result = data.get("deleteProjectV2", {})
|
|
2609
|
+
deleted_project = delete_result.get("projectV2")
|
|
2610
|
+
|
|
2611
|
+
if deleted_project:
|
|
2612
|
+
logger.info(f"Permanently deleted project: {project_id}")
|
|
2613
|
+
return True
|
|
2614
|
+
else:
|
|
2615
|
+
logger.warning(f"Failed to delete project {project_id}")
|
|
2616
|
+
return False
|
|
2617
|
+
|
|
2618
|
+
else:
|
|
2619
|
+
# Soft delete by setting public=false and closed=true
|
|
2620
|
+
data = await self.gh_client.execute_graphql(
|
|
2621
|
+
query=UPDATE_PROJECT_MUTATION,
|
|
2622
|
+
variables={
|
|
2623
|
+
"projectId": project_id,
|
|
2624
|
+
"public": False,
|
|
2625
|
+
"closed": True,
|
|
2626
|
+
},
|
|
2627
|
+
)
|
|
2628
|
+
|
|
2629
|
+
update_result = data.get("updateProjectV2", {})
|
|
2630
|
+
updated_project = update_result.get("projectV2")
|
|
2631
|
+
|
|
2632
|
+
if updated_project:
|
|
2633
|
+
logger.info(f"Soft deleted (closed) project: {project_id}")
|
|
2634
|
+
return True
|
|
2635
|
+
else:
|
|
2636
|
+
logger.warning(f"Failed to close project {project_id}")
|
|
2637
|
+
return False
|
|
2638
|
+
|
|
2639
|
+
except Exception as e:
|
|
2640
|
+
logger.error(f"Failed to delete project {project_id}: {e}")
|
|
2641
|
+
raise RuntimeError(f"Failed to delete project: {e}") from e
|
|
2642
|
+
|
|
2643
|
+
async def invalidate_label_cache(self) -> None:
|
|
2644
|
+
"""Manually invalidate the label cache.
|
|
2645
|
+
|
|
2646
|
+
Useful when labels are modified externally or when you need
|
|
2647
|
+
to force a refresh of cached label data.
|
|
2648
|
+
"""
|
|
2649
|
+
await self._labels_cache.clear()
|
|
2650
|
+
|
|
2651
|
+
# =============================================================================
|
|
2652
|
+
# GitHub Projects V2 Issue Operations (Week 3)
|
|
2653
|
+
# =============================================================================
|
|
2654
|
+
|
|
2655
|
+
async def project_add_issue(
|
|
2656
|
+
self,
|
|
2657
|
+
project_id: str,
|
|
2658
|
+
issue_id: str,
|
|
2659
|
+
) -> bool:
|
|
2660
|
+
"""Add an issue to a GitHub Projects V2 project.
|
|
2661
|
+
|
|
2662
|
+
Args:
|
|
2663
|
+
----
|
|
2664
|
+
project_id: Project node ID (PVT_kwDOABCD...)
|
|
2665
|
+
issue_id: Issue node ID (I_kwDOABCD...) or issue number with owner/repo
|
|
2666
|
+
|
|
2667
|
+
Returns:
|
|
2668
|
+
-------
|
|
2669
|
+
True if issue was added successfully
|
|
2670
|
+
|
|
2671
|
+
Raises:
|
|
2672
|
+
------
|
|
2673
|
+
ValueError: If project_id or issue_id is invalid
|
|
2674
|
+
RuntimeError: If GraphQL mutation fails
|
|
2675
|
+
|
|
2676
|
+
Example:
|
|
2677
|
+
-------
|
|
2678
|
+
# Add by issue node ID
|
|
2679
|
+
success = await adapter.project_add_issue(
|
|
2680
|
+
project_id="PVT_kwDOABCD1234",
|
|
2681
|
+
issue_id="I_kwDOABCD5678"
|
|
2682
|
+
)
|
|
2683
|
+
|
|
2684
|
+
# Add by issue number (requires owner/repo context)
|
|
2685
|
+
success = await adapter.project_add_issue(
|
|
2686
|
+
project_id="PVT_kwDOABCD1234",
|
|
2687
|
+
issue_id="owner/repo#123"
|
|
2688
|
+
)
|
|
2689
|
+
|
|
2690
|
+
Note:
|
|
2691
|
+
----
|
|
2692
|
+
GitHub's addProjectV2ItemById mutation requires:
|
|
2693
|
+
- projectId: Project node ID
|
|
2694
|
+
- contentId: Issue/PR node ID (not item ID)
|
|
2695
|
+
|
|
2696
|
+
"""
|
|
2697
|
+
# Validate project_id format
|
|
2698
|
+
if not project_id or not project_id.startswith("PVT_"):
|
|
2699
|
+
raise ValueError(
|
|
2700
|
+
f"Invalid project_id: {project_id}. "
|
|
2701
|
+
"Project ID must start with 'PVT_' (e.g., PVT_kwDOABCD1234)"
|
|
2702
|
+
)
|
|
2703
|
+
|
|
2704
|
+
# Validate issue_id is provided
|
|
2705
|
+
if not issue_id:
|
|
2706
|
+
raise ValueError("issue_id is required")
|
|
2707
|
+
|
|
2708
|
+
# If issue_id is in "owner/repo#number" format, resolve to node ID
|
|
2709
|
+
content_id = issue_id
|
|
2710
|
+
if "#" in issue_id and "/" in issue_id:
|
|
2711
|
+
# Parse owner/repo#number format
|
|
2712
|
+
try:
|
|
2713
|
+
repo_part, number_str = issue_id.rsplit("#", 1)
|
|
2714
|
+
owner, repo = repo_part.split("/")
|
|
2715
|
+
issue_number = int(number_str)
|
|
2716
|
+
|
|
2717
|
+
# Query GitHub to get issue node ID
|
|
2718
|
+
issue_query = """
|
|
2719
|
+
query GetIssueNodeId($owner: String!, $repo: String!, $number: Int!) {
|
|
2720
|
+
repository(owner: $owner, name: $repo) {
|
|
2721
|
+
issue(number: $number) {
|
|
2722
|
+
id
|
|
2723
|
+
}
|
|
2724
|
+
}
|
|
2725
|
+
}
|
|
2726
|
+
"""
|
|
2727
|
+
|
|
2728
|
+
result = await self._graphql_request(
|
|
2729
|
+
issue_query,
|
|
2730
|
+
{"owner": owner, "repo": repo, "number": issue_number},
|
|
2731
|
+
)
|
|
2732
|
+
|
|
2733
|
+
repo_data = result.get("repository")
|
|
2734
|
+
if not repo_data:
|
|
2735
|
+
raise ValueError(f"Repository {owner}/{repo} not found")
|
|
2736
|
+
|
|
2737
|
+
issue_data = repo_data.get("issue")
|
|
2738
|
+
if not issue_data:
|
|
2739
|
+
raise ValueError(
|
|
2740
|
+
f"Issue #{issue_number} not found in {owner}/{repo}"
|
|
2741
|
+
)
|
|
2742
|
+
|
|
2743
|
+
content_id = issue_data["id"]
|
|
2744
|
+
logger.debug(f"Resolved issue {issue_id} to node ID {content_id}")
|
|
2745
|
+
|
|
2746
|
+
except ValueError:
|
|
2747
|
+
# Re-raise ValueError as-is (already has good message)
|
|
2748
|
+
raise
|
|
2749
|
+
except (KeyError, TypeError) as e:
|
|
2750
|
+
raise ValueError(
|
|
2751
|
+
f"Invalid issue_id format: {issue_id}. "
|
|
2752
|
+
"Expected 'owner/repo#number' or issue node ID (I_kwDO...)"
|
|
2753
|
+
) from e
|
|
2754
|
+
|
|
2755
|
+
# Validate issue node ID format
|
|
2756
|
+
if not content_id.startswith("I_") and not content_id.startswith("PR_"):
|
|
2757
|
+
raise ValueError(
|
|
2758
|
+
f"Invalid issue_id: {content_id}. "
|
|
2759
|
+
"Issue ID must start with 'I_' or 'PR_' (e.g., I_kwDOABCD5678)"
|
|
2760
|
+
)
|
|
2761
|
+
|
|
2762
|
+
try:
|
|
2763
|
+
# Execute ADD_PROJECT_ITEM_MUTATION
|
|
2764
|
+
from .queries import ADD_PROJECT_ITEM_MUTATION
|
|
2765
|
+
|
|
2766
|
+
data = await self.gh_client.execute_graphql(
|
|
2767
|
+
query=ADD_PROJECT_ITEM_MUTATION,
|
|
2768
|
+
variables={
|
|
2769
|
+
"projectId": project_id,
|
|
2770
|
+
"contentId": content_id,
|
|
2771
|
+
},
|
|
2772
|
+
)
|
|
2773
|
+
|
|
2774
|
+
# Check for successful addition
|
|
2775
|
+
add_result = data.get("addProjectV2ItemById", {})
|
|
2776
|
+
item_data = add_result.get("item")
|
|
2777
|
+
|
|
2778
|
+
if item_data:
|
|
2779
|
+
logger.info(
|
|
2780
|
+
f"Successfully added issue {issue_id} to project {project_id}"
|
|
2781
|
+
)
|
|
2782
|
+
return True
|
|
2783
|
+
else:
|
|
2784
|
+
logger.warning(
|
|
2785
|
+
f"Failed to add issue {issue_id} to project {project_id}: No item returned"
|
|
2786
|
+
)
|
|
2787
|
+
return False
|
|
2788
|
+
|
|
2789
|
+
except Exception as e:
|
|
2790
|
+
error_msg = str(e).lower()
|
|
2791
|
+
|
|
2792
|
+
# Handle "already exists" errors gracefully
|
|
2793
|
+
if "already exists" in error_msg or "duplicate" in error_msg:
|
|
2794
|
+
logger.info(f"Issue {issue_id} already exists in project {project_id}")
|
|
2795
|
+
return True
|
|
2796
|
+
|
|
2797
|
+
# Log and re-raise other errors
|
|
2798
|
+
logger.error(f"Failed to add issue {issue_id} to project {project_id}: {e}")
|
|
2799
|
+
raise RuntimeError(f"Failed to add issue to project: {e}") from e
|
|
2800
|
+
|
|
2801
|
+
async def project_remove_issue(
|
|
2802
|
+
self,
|
|
2803
|
+
project_id: str,
|
|
2804
|
+
item_id: str,
|
|
2805
|
+
) -> bool:
|
|
2806
|
+
"""Remove an issue from a GitHub Projects V2 project.
|
|
2807
|
+
|
|
2808
|
+
Args:
|
|
2809
|
+
----
|
|
2810
|
+
project_id: Project node ID (PVT_kwDOABCD...)
|
|
2811
|
+
item_id: Project item ID (PVTI_kwDOABCD...) NOT issue ID
|
|
2812
|
+
|
|
2813
|
+
Returns:
|
|
2814
|
+
-------
|
|
2815
|
+
True if issue was removed successfully
|
|
2816
|
+
|
|
2817
|
+
Raises:
|
|
2818
|
+
------
|
|
2819
|
+
ValueError: If project_id or item_id is invalid
|
|
2820
|
+
RuntimeError: If GraphQL mutation fails
|
|
2821
|
+
|
|
2822
|
+
Example:
|
|
2823
|
+
-------
|
|
2824
|
+
success = await adapter.project_remove_issue(
|
|
2825
|
+
project_id="PVT_kwDOABCD1234",
|
|
2826
|
+
item_id="PVTI_kwDOABCD5678"
|
|
2827
|
+
)
|
|
2828
|
+
|
|
2829
|
+
Note:
|
|
2830
|
+
----
|
|
2831
|
+
Requires the project ITEM ID (PVTI_*), not the issue ID (I_*).
|
|
2832
|
+
Use project_get_issues() to find the item ID for an issue.
|
|
2833
|
+
|
|
2834
|
+
"""
|
|
2835
|
+
# Validate project_id format
|
|
2836
|
+
if not project_id or not project_id.startswith("PVT_"):
|
|
2837
|
+
raise ValueError(
|
|
2838
|
+
f"Invalid project_id: {project_id}. "
|
|
2839
|
+
"Project ID must start with 'PVT_' (e.g., PVT_kwDOABCD1234)"
|
|
2840
|
+
)
|
|
2841
|
+
|
|
2842
|
+
# Validate item_id format
|
|
2843
|
+
if not item_id or not item_id.startswith("PVTI_"):
|
|
2844
|
+
raise ValueError(
|
|
2845
|
+
f"Invalid item_id: {item_id}. "
|
|
2846
|
+
"Item ID must start with 'PVTI_' (e.g., PVTI_kwDOABCD5678). "
|
|
2847
|
+
"Note: This is the project item ID, not the issue ID. "
|
|
2848
|
+
"Use project_get_issues() to get the item ID for an issue."
|
|
2849
|
+
)
|
|
2850
|
+
|
|
2851
|
+
try:
|
|
2852
|
+
# Execute REMOVE_PROJECT_ITEM_MUTATION
|
|
2853
|
+
from .queries import REMOVE_PROJECT_ITEM_MUTATION
|
|
2854
|
+
|
|
2855
|
+
data = await self.gh_client.execute_graphql(
|
|
2856
|
+
query=REMOVE_PROJECT_ITEM_MUTATION,
|
|
2857
|
+
variables={
|
|
2858
|
+
"projectId": project_id,
|
|
2859
|
+
"itemId": item_id,
|
|
2860
|
+
},
|
|
2861
|
+
)
|
|
2862
|
+
|
|
2863
|
+
# Check for successful removal
|
|
2864
|
+
delete_result = data.get("deleteProjectV2Item", {})
|
|
2865
|
+
deleted_item_id = delete_result.get("deletedItemId")
|
|
2866
|
+
|
|
2867
|
+
if deleted_item_id:
|
|
2868
|
+
logger.info(
|
|
2869
|
+
f"Successfully removed item {item_id} from project {project_id}"
|
|
2870
|
+
)
|
|
2871
|
+
return True
|
|
2872
|
+
else:
|
|
2873
|
+
logger.warning(
|
|
2874
|
+
f"Failed to remove item {item_id} from project {project_id}: "
|
|
2875
|
+
"No deleted item ID returned"
|
|
2876
|
+
)
|
|
2877
|
+
return False
|
|
2878
|
+
|
|
2879
|
+
except Exception as e:
|
|
2880
|
+
error_msg = str(e).lower()
|
|
2881
|
+
|
|
2882
|
+
# Handle "not found" errors gracefully
|
|
2883
|
+
if "not found" in error_msg or "does not exist" in error_msg:
|
|
2884
|
+
logger.warning(
|
|
2885
|
+
f"Item {item_id} not found in project {project_id} "
|
|
2886
|
+
"(may have been already removed)"
|
|
2887
|
+
)
|
|
2888
|
+
return False
|
|
2889
|
+
|
|
2890
|
+
# Log and re-raise other errors
|
|
2891
|
+
logger.error(
|
|
2892
|
+
f"Failed to remove item {item_id} from project {project_id}: {e}"
|
|
2893
|
+
)
|
|
2894
|
+
raise RuntimeError(f"Failed to remove issue from project: {e}") from e
|
|
2895
|
+
|
|
2896
|
+
async def project_get_issues(
|
|
2897
|
+
self,
|
|
2898
|
+
project_id: str,
|
|
2899
|
+
state: str | None = None,
|
|
2900
|
+
limit: int = 10,
|
|
2901
|
+
cursor: str | None = None,
|
|
2902
|
+
) -> list[Task]:
|
|
2903
|
+
"""Get issues in a GitHub Projects V2 project.
|
|
2904
|
+
|
|
2905
|
+
Args:
|
|
2906
|
+
----
|
|
2907
|
+
project_id: Project node ID (PVT_kwDOABCD...)
|
|
2908
|
+
state: Filter by issue state ("OPEN", "CLOSED", None for all)
|
|
2909
|
+
limit: Maximum number of issues to return (default 10)
|
|
2910
|
+
cursor: Pagination cursor for next page
|
|
2911
|
+
|
|
2912
|
+
Returns:
|
|
2913
|
+
-------
|
|
2914
|
+
List of Task objects representing issues in the project
|
|
2915
|
+
|
|
2916
|
+
Raises:
|
|
2917
|
+
------
|
|
2918
|
+
ValueError: If project_id is invalid
|
|
2919
|
+
RuntimeError: If GraphQL query fails
|
|
2920
|
+
|
|
2921
|
+
Example:
|
|
2922
|
+
-------
|
|
2923
|
+
# Get all open issues
|
|
2924
|
+
issues = await adapter.project_get_issues(
|
|
2925
|
+
project_id="PVT_kwDOABCD1234",
|
|
2926
|
+
state="OPEN",
|
|
2927
|
+
limit=20
|
|
2928
|
+
)
|
|
2929
|
+
|
|
2930
|
+
# Get next page
|
|
2931
|
+
issues = await adapter.project_get_issues(
|
|
2932
|
+
project_id="PVT_kwDOABCD1234",
|
|
2933
|
+
cursor=last_cursor
|
|
2934
|
+
)
|
|
2935
|
+
|
|
2936
|
+
Note:
|
|
2937
|
+
----
|
|
2938
|
+
Returns Task objects with additional project context:
|
|
2939
|
+
- task.metadata["project_item_id"]: ID for removal operations
|
|
2940
|
+
- task.metadata["project_number"]: Project number
|
|
2941
|
+
|
|
2942
|
+
"""
|
|
2943
|
+
# Validate project_id format
|
|
2944
|
+
if not project_id or not project_id.startswith("PVT_"):
|
|
2945
|
+
raise ValueError(
|
|
2946
|
+
f"Invalid project_id: {project_id}. "
|
|
2947
|
+
"Project ID must start with 'PVT_' (e.g., PVT_kwDOABCD1234)"
|
|
2948
|
+
)
|
|
2949
|
+
|
|
2950
|
+
try:
|
|
2951
|
+
# Execute PROJECT_ITEMS_QUERY
|
|
2952
|
+
from .queries import PROJECT_ITEMS_QUERY
|
|
2953
|
+
|
|
2954
|
+
data = await self.gh_client.execute_graphql(
|
|
2955
|
+
query=PROJECT_ITEMS_QUERY,
|
|
2956
|
+
variables={
|
|
2957
|
+
"projectId": project_id,
|
|
2958
|
+
"first": limit,
|
|
2959
|
+
"after": cursor,
|
|
2960
|
+
},
|
|
2961
|
+
)
|
|
2962
|
+
|
|
2963
|
+
# Parse response and extract items array
|
|
2964
|
+
project_node = data.get("node")
|
|
2965
|
+
if not project_node:
|
|
2966
|
+
logger.warning(f"Project {project_id} not found")
|
|
2967
|
+
return []
|
|
2968
|
+
|
|
2969
|
+
items_data = project_node.get("items", {})
|
|
2970
|
+
item_nodes = items_data.get("nodes", [])
|
|
2971
|
+
|
|
2972
|
+
# Filter items by content type (only "Issue", skip "PullRequest", "DraftIssue")
|
|
2973
|
+
tasks = []
|
|
2974
|
+
for item in item_nodes:
|
|
2975
|
+
content = item.get("content")
|
|
2976
|
+
if not content:
|
|
2977
|
+
# Skip archived items without content
|
|
2978
|
+
logger.debug(f"Skipping item {item.get('id')} without content")
|
|
2979
|
+
continue
|
|
2980
|
+
|
|
2981
|
+
content_type = content.get("__typename")
|
|
2982
|
+
|
|
2983
|
+
# Only process Issues
|
|
2984
|
+
if content_type != "Issue":
|
|
2985
|
+
logger.debug(f"Skipping {content_type} item {item.get('id')}")
|
|
2986
|
+
continue
|
|
2987
|
+
|
|
2988
|
+
# Map GitHub issue to Task using existing mapper
|
|
2989
|
+
from .mappers import map_github_issue_to_task
|
|
2990
|
+
|
|
2991
|
+
# Convert GraphQL format to format expected by mapper
|
|
2992
|
+
issue_dict = {
|
|
2993
|
+
"number": content.get("number"),
|
|
2994
|
+
"title": content.get("title"),
|
|
2995
|
+
"state": content.get("state", "").lower(),
|
|
2996
|
+
"labels": content.get("labels", {}),
|
|
2997
|
+
# Note: PROJECT_ITEMS_QUERY doesn't include all issue fields
|
|
2998
|
+
# Only basic fields are available
|
|
2999
|
+
}
|
|
3000
|
+
|
|
3001
|
+
task = map_github_issue_to_task(issue_dict, self.custom_priority_scheme)
|
|
3002
|
+
|
|
3003
|
+
# Add project context to metadata
|
|
3004
|
+
if "github" not in task.metadata:
|
|
3005
|
+
task.metadata["github"] = {}
|
|
3006
|
+
|
|
3007
|
+
task.metadata["github"]["project_item_id"] = item["id"]
|
|
3008
|
+
task.metadata["github"]["project_id"] = project_id
|
|
3009
|
+
|
|
3010
|
+
# Extract project number from project_id if needed
|
|
3011
|
+
# Project node ID format: PVT_kwDO... but we don't have number here
|
|
3012
|
+
# We'll need to query the project separately or store it
|
|
3013
|
+
|
|
3014
|
+
tasks.append(task)
|
|
3015
|
+
|
|
3016
|
+
# Filter by state if provided (post-query filtering)
|
|
3017
|
+
if state:
|
|
3018
|
+
state_lower = state.lower()
|
|
3019
|
+
tasks = [
|
|
3020
|
+
task
|
|
3021
|
+
for task in tasks
|
|
3022
|
+
if (isinstance(task.state, str) and task.state == state_lower)
|
|
3023
|
+
or (
|
|
3024
|
+
hasattr(task.state, "value") and task.state.value == state_lower
|
|
3025
|
+
)
|
|
3026
|
+
or (
|
|
3027
|
+
state_lower == "open"
|
|
3028
|
+
and (
|
|
3029
|
+
(
|
|
3030
|
+
isinstance(task.state, str)
|
|
3031
|
+
and task.state
|
|
3032
|
+
in ["open", "in_progress", "blocked", "waiting"]
|
|
3033
|
+
)
|
|
3034
|
+
or (
|
|
3035
|
+
hasattr(task.state, "value")
|
|
3036
|
+
and task.state.value
|
|
3037
|
+
in ["open", "in_progress", "blocked", "waiting"]
|
|
3038
|
+
)
|
|
3039
|
+
)
|
|
3040
|
+
)
|
|
3041
|
+
or (
|
|
3042
|
+
state_lower == "closed"
|
|
3043
|
+
and (
|
|
3044
|
+
(
|
|
3045
|
+
isinstance(task.state, str)
|
|
3046
|
+
and task.state in ["done", "closed"]
|
|
3047
|
+
)
|
|
3048
|
+
or (
|
|
3049
|
+
hasattr(task.state, "value")
|
|
3050
|
+
and task.state.value in ["done", "closed"]
|
|
3051
|
+
)
|
|
3052
|
+
)
|
|
3053
|
+
)
|
|
3054
|
+
]
|
|
3055
|
+
|
|
3056
|
+
logger.info(
|
|
3057
|
+
f"Retrieved {len(tasks)} issues from project {project_id} "
|
|
3058
|
+
f"(filtered by state={state})"
|
|
3059
|
+
)
|
|
3060
|
+
|
|
3061
|
+
return tasks
|
|
3062
|
+
|
|
3063
|
+
except Exception as e:
|
|
3064
|
+
logger.error(f"Failed to get issues from project {project_id}: {e}")
|
|
3065
|
+
raise RuntimeError(f"Failed to get project issues: {e}") from e
|
|
3066
|
+
|
|
3067
|
+
async def project_get_statistics(
|
|
3068
|
+
self,
|
|
3069
|
+
project_id: str,
|
|
3070
|
+
) -> ProjectStatistics:
|
|
3071
|
+
"""Get comprehensive statistics for a GitHub Projects V2 project.
|
|
3072
|
+
|
|
3073
|
+
Calculates issue state breakdown, priority distribution, and health status
|
|
3074
|
+
by analyzing all issues in the project. Priority is determined from issue
|
|
3075
|
+
labels (priority:low, priority/medium, etc.), and blocked status is detected
|
|
3076
|
+
from "blocked" or "blocker" labels.
|
|
3077
|
+
|
|
3078
|
+
Health Scoring Logic:
|
|
3079
|
+
- on_track: >70% complete AND <10% blocked
|
|
3080
|
+
- at_risk: >40% complete AND <30% blocked
|
|
3081
|
+
- off_track: Otherwise (low completion or high blocked rate)
|
|
3082
|
+
|
|
3083
|
+
Args:
|
|
3084
|
+
----
|
|
3085
|
+
project_id: Project node ID (PVT_kwDOABCD...)
|
|
3086
|
+
|
|
3087
|
+
Returns:
|
|
3088
|
+
-------
|
|
3089
|
+
ProjectStatistics with metrics and health scoring
|
|
3090
|
+
|
|
3091
|
+
Raises:
|
|
3092
|
+
------
|
|
3093
|
+
ValueError: If project_id is invalid format
|
|
3094
|
+
RuntimeError: If statistics calculation fails
|
|
3095
|
+
|
|
3096
|
+
Example:
|
|
3097
|
+
-------
|
|
3098
|
+
stats = await adapter.project_get_statistics("PVT_kwDOABCD1234")
|
|
3099
|
+
print(f"Health: {stats.health}, Progress: {stats.progress_percentage}%")
|
|
3100
|
+
print(f"Priority breakdown: H={stats.priority_high_count}, "
|
|
3101
|
+
f"M={stats.priority_medium_count}")
|
|
3102
|
+
|
|
3103
|
+
Note:
|
|
3104
|
+
----
|
|
3105
|
+
Fetches up to 1000 issues for reasonable performance. For projects
|
|
3106
|
+
with >1000 issues, statistics may be based on a sample.
|
|
3107
|
+
|
|
3108
|
+
"""
|
|
3109
|
+
from ...core.models import ProjectStatistics
|
|
3110
|
+
|
|
3111
|
+
# Validate project_id format
|
|
3112
|
+
if not project_id or not project_id.startswith("PVT_"):
|
|
3113
|
+
raise ValueError(
|
|
3114
|
+
f"Invalid project_id: {project_id}. "
|
|
3115
|
+
"Project ID must start with 'PVT_' (e.g., PVT_kwDOABCD1234)"
|
|
3116
|
+
)
|
|
3117
|
+
|
|
3118
|
+
logger.debug(f"Calculating statistics for project {project_id}")
|
|
3119
|
+
|
|
3120
|
+
try:
|
|
3121
|
+
# Fetch all issues (limit 1000 for reasonable performance)
|
|
3122
|
+
issues = await self.project_get_issues(project_id=project_id, limit=1000)
|
|
3123
|
+
except Exception as e:
|
|
3124
|
+
logger.error(f"Failed to fetch issues for statistics: {e}")
|
|
3125
|
+
raise RuntimeError(f"Failed to calculate project statistics: {e}") from e
|
|
3126
|
+
|
|
3127
|
+
# Calculate basic counts
|
|
3128
|
+
total = len(issues)
|
|
3129
|
+
open_count = 0
|
|
3130
|
+
closed_count = 0
|
|
3131
|
+
in_progress_count = 0
|
|
3132
|
+
|
|
3133
|
+
# Count by priority (from labels)
|
|
3134
|
+
priority_counts = {"low": 0, "medium": 0, "high": 0, "critical": 0}
|
|
3135
|
+
blocked_count = 0
|
|
3136
|
+
|
|
3137
|
+
for issue in issues:
|
|
3138
|
+
# Count by state (GitHub only has OPEN/CLOSED)
|
|
3139
|
+
# We map based on state enum value
|
|
3140
|
+
state_value = (
|
|
3141
|
+
issue.state.value if hasattr(issue.state, "value") else str(issue.state)
|
|
3142
|
+
)
|
|
3143
|
+
|
|
3144
|
+
if state_value in ["open", "in_progress", "blocked", "waiting"]:
|
|
3145
|
+
if state_value == "in_progress":
|
|
3146
|
+
in_progress_count += 1
|
|
3147
|
+
else:
|
|
3148
|
+
open_count += 1
|
|
3149
|
+
elif state_value in ["done", "closed"]:
|
|
3150
|
+
closed_count += 1
|
|
3151
|
+
else:
|
|
3152
|
+
# Default unrecognized states to open
|
|
3153
|
+
open_count += 1
|
|
3154
|
+
|
|
3155
|
+
# Check tags (labels) for priority and blocked status
|
|
3156
|
+
for tag in issue.tags:
|
|
3157
|
+
tag_lower = tag.lower()
|
|
3158
|
+
|
|
3159
|
+
# Priority detection (priority:high, priority/low, etc.)
|
|
3160
|
+
if "priority:" in tag_lower or "priority/" in tag_lower:
|
|
3161
|
+
# Extract priority level
|
|
3162
|
+
priority = (
|
|
3163
|
+
tag_lower.replace("priority:", "")
|
|
3164
|
+
.replace("priority/", "")
|
|
3165
|
+
.strip()
|
|
3166
|
+
)
|
|
3167
|
+
if priority in priority_counts:
|
|
3168
|
+
priority_counts[priority] += 1
|
|
3169
|
+
elif "crit" in priority or "p0" in priority:
|
|
3170
|
+
priority_counts["critical"] += 1
|
|
3171
|
+
elif "high" in priority or "p1" in priority:
|
|
3172
|
+
priority_counts["high"] += 1
|
|
3173
|
+
elif "med" in priority or "p2" in priority:
|
|
3174
|
+
priority_counts["medium"] += 1
|
|
3175
|
+
elif "low" in priority or "p3" in priority:
|
|
3176
|
+
priority_counts["low"] += 1
|
|
3177
|
+
|
|
3178
|
+
# Blocked detection
|
|
3179
|
+
if "blocked" in tag_lower or "blocker" in tag_lower:
|
|
3180
|
+
blocked_count += 1
|
|
3181
|
+
|
|
3182
|
+
# Calculate health and progress
|
|
3183
|
+
if total == 0:
|
|
3184
|
+
health = "on_track"
|
|
3185
|
+
progress_pct = 0.0
|
|
3186
|
+
else:
|
|
3187
|
+
completed_pct = (closed_count / total) * 100
|
|
3188
|
+
blocked_pct = (blocked_count / total) * 100
|
|
3189
|
+
|
|
3190
|
+
# Health scoring logic
|
|
3191
|
+
if completed_pct > 70 and blocked_pct < 10:
|
|
3192
|
+
health = "on_track"
|
|
3193
|
+
elif completed_pct > 40 and blocked_pct < 30:
|
|
3194
|
+
health = "at_risk"
|
|
3195
|
+
else:
|
|
3196
|
+
health = "off_track"
|
|
3197
|
+
|
|
3198
|
+
progress_pct = completed_pct
|
|
3199
|
+
|
|
3200
|
+
# Create statistics object
|
|
3201
|
+
stats = ProjectStatistics(
|
|
3202
|
+
total_count=total,
|
|
3203
|
+
open_count=open_count,
|
|
3204
|
+
in_progress_count=in_progress_count,
|
|
3205
|
+
completed_count=closed_count,
|
|
3206
|
+
blocked_count=blocked_count,
|
|
3207
|
+
priority_low_count=priority_counts["low"],
|
|
3208
|
+
priority_medium_count=priority_counts["medium"],
|
|
3209
|
+
priority_high_count=priority_counts["high"],
|
|
3210
|
+
priority_critical_count=priority_counts["critical"],
|
|
3211
|
+
health=health,
|
|
3212
|
+
progress_percentage=round(progress_pct, 1),
|
|
3213
|
+
)
|
|
3214
|
+
|
|
3215
|
+
logger.info(
|
|
3216
|
+
f"Statistics for {project_id}: {total} issues, "
|
|
3217
|
+
f"{health} health, {progress_pct:.1f}% complete, "
|
|
3218
|
+
f"{blocked_count} blocked"
|
|
3219
|
+
)
|
|
3220
|
+
|
|
3221
|
+
return stats
|
|
3222
|
+
|
|
2082
3223
|
async def close(self) -> None:
|
|
2083
3224
|
"""Close the HTTP client connection."""
|
|
2084
3225
|
await self.client.aclose()
|