git-aware-coding-agent 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- avos_cli/__init__.py +3 -0
- avos_cli/agents/avos_ask_agent.md +47 -0
- avos_cli/agents/avos_ask_agent_JSON_converter.md +78 -0
- avos_cli/agents/avos_hisotry_agent_JSON_converter.md +92 -0
- avos_cli/agents/avos_history_agent.md +58 -0
- avos_cli/agents/git_diff_agent.md +63 -0
- avos_cli/artifacts/__init__.py +17 -0
- avos_cli/artifacts/base.py +47 -0
- avos_cli/artifacts/commit_builder.py +35 -0
- avos_cli/artifacts/doc_builder.py +30 -0
- avos_cli/artifacts/issue_builder.py +37 -0
- avos_cli/artifacts/pr_builder.py +50 -0
- avos_cli/cli/__init__.py +1 -0
- avos_cli/cli/main.py +504 -0
- avos_cli/commands/__init__.py +1 -0
- avos_cli/commands/ask.py +541 -0
- avos_cli/commands/connect.py +363 -0
- avos_cli/commands/history.py +549 -0
- avos_cli/commands/hook_install.py +260 -0
- avos_cli/commands/hook_sync.py +231 -0
- avos_cli/commands/ingest.py +506 -0
- avos_cli/commands/ingest_pr.py +239 -0
- avos_cli/config/__init__.py +1 -0
- avos_cli/config/hash_store.py +93 -0
- avos_cli/config/lock.py +122 -0
- avos_cli/config/manager.py +180 -0
- avos_cli/config/state.py +90 -0
- avos_cli/exceptions.py +272 -0
- avos_cli/models/__init__.py +58 -0
- avos_cli/models/api.py +75 -0
- avos_cli/models/artifacts.py +99 -0
- avos_cli/models/config.py +56 -0
- avos_cli/models/diff.py +117 -0
- avos_cli/models/query.py +234 -0
- avos_cli/parsers/__init__.py +21 -0
- avos_cli/parsers/artifact_ref_extractor.py +173 -0
- avos_cli/parsers/reference_parser.py +117 -0
- avos_cli/services/__init__.py +1 -0
- avos_cli/services/chronology_service.py +68 -0
- avos_cli/services/citation_validator.py +134 -0
- avos_cli/services/context_budget_service.py +104 -0
- avos_cli/services/diff_resolver.py +398 -0
- avos_cli/services/diff_summary_service.py +141 -0
- avos_cli/services/git_client.py +351 -0
- avos_cli/services/github_client.py +443 -0
- avos_cli/services/llm_client.py +312 -0
- avos_cli/services/memory_client.py +323 -0
- avos_cli/services/query_fallback_formatter.py +108 -0
- avos_cli/services/reply_output_service.py +341 -0
- avos_cli/services/sanitization_service.py +218 -0
- avos_cli/utils/__init__.py +1 -0
- avos_cli/utils/dotenv_load.py +50 -0
- avos_cli/utils/hashing.py +22 -0
- avos_cli/utils/logger.py +77 -0
- avos_cli/utils/output.py +232 -0
- avos_cli/utils/sanitization_diagnostics.py +81 -0
- avos_cli/utils/time_helpers.py +56 -0
- git_aware_coding_agent-1.0.0.dist-info/METADATA +390 -0
- git_aware_coding_agent-1.0.0.dist-info/RECORD +62 -0
- git_aware_coding_agent-1.0.0.dist-info/WHEEL +4 -0
- git_aware_coding_agent-1.0.0.dist-info/entry_points.txt +2 -0
- git_aware_coding_agent-1.0.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,443 @@
|
|
|
1
|
+
"""GitHub REST API client for fetching PRs, issues, comments, and reviews.
|
|
2
|
+
|
|
3
|
+
Provides paginated listing with date filtering, rate limit handling,
|
|
4
|
+
and typed error mapping. Uses httpx sync client with tenacity retry
|
|
5
|
+
for transient 5xx errors.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import os
|
|
11
|
+
import re
|
|
12
|
+
import time
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any
|
|
15
|
+
|
|
16
|
+
import httpx
|
|
17
|
+
from tenacity import (
|
|
18
|
+
retry,
|
|
19
|
+
retry_if_exception_type,
|
|
20
|
+
stop_after_attempt,
|
|
21
|
+
wait_exponential,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
from avos_cli.exceptions import (
|
|
25
|
+
AuthError,
|
|
26
|
+
ConfigurationNotInitializedError,
|
|
27
|
+
RateLimitError,
|
|
28
|
+
ResourceNotFoundError,
|
|
29
|
+
UpstreamUnavailableError,
|
|
30
|
+
)
|
|
31
|
+
from avos_cli.utils.dotenv_load import load_layers
|
|
32
|
+
from avos_cli.utils.logger import get_logger
|
|
33
|
+
|
|
34
|
+
_log = get_logger("github_client")
|
|
35
|
+
|
|
36
|
+
_API_BASE = "https://api.github.com"
|
|
37
|
+
_dotenv_loaded_for_github = False
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _ensure_dotenv_for_github() -> None:
|
|
41
|
+
"""Load layered ``.env`` once so ``GITHUB_TOKEN`` from repo root is visible."""
|
|
42
|
+
global _dotenv_loaded_for_github
|
|
43
|
+
if not _dotenv_loaded_for_github:
|
|
44
|
+
load_layers()
|
|
45
|
+
_dotenv_loaded_for_github = True
|
|
46
|
+
_TIMEOUT = 30.0
|
|
47
|
+
_MAX_RETRIES = 3
|
|
48
|
+
_MAX_PAGES = 100
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class _RetryableGitHubError(Exception):
|
|
52
|
+
"""Internal marker for transient GitHub API errors."""
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class GitHubClient:
|
|
56
|
+
"""HTTP client for the GitHub REST API.
|
|
57
|
+
|
|
58
|
+
Provides PR/issue listing, detail fetching, and repo validation
|
|
59
|
+
with pagination, rate limit handling, and typed error mapping.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
token: GitHub personal access token. If omitted, uses ``GITHUB_TOKEN``
|
|
63
|
+
from the environment after loading layered ``.env`` files (including
|
|
64
|
+
the repository root ``.env``). Pass ``""`` explicitly to require a
|
|
65
|
+
non-empty token and ignore the environment.
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
def __init__(self, token: str | None = None) -> None:
|
|
69
|
+
_ensure_dotenv_for_github()
|
|
70
|
+
resolved = (
|
|
71
|
+
os.environ.get("GITHUB_TOKEN", "").strip()
|
|
72
|
+
if token is None
|
|
73
|
+
else token.strip()
|
|
74
|
+
)
|
|
75
|
+
if not resolved:
|
|
76
|
+
raise AuthError("GitHub token is required", service="GitHub")
|
|
77
|
+
self._token = resolved
|
|
78
|
+
self._client = httpx.Client(
|
|
79
|
+
headers={
|
|
80
|
+
"Authorization": f"Bearer {resolved}",
|
|
81
|
+
"Accept": "application/vnd.github.v3+json",
|
|
82
|
+
},
|
|
83
|
+
timeout=_TIMEOUT,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
def list_pull_requests(
|
|
87
|
+
self,
|
|
88
|
+
owner: str,
|
|
89
|
+
repo: str,
|
|
90
|
+
since_date: str | None = None,
|
|
91
|
+
) -> list[dict[str, Any]]:
|
|
92
|
+
"""List pull requests for a repository.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
owner: Repository owner.
|
|
96
|
+
repo: Repository name.
|
|
97
|
+
since_date: Optional ISO date for lower-bound filtering.
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
List of PR data dicts.
|
|
101
|
+
"""
|
|
102
|
+
params: dict[str, str | int] = {
|
|
103
|
+
"state": "all",
|
|
104
|
+
"sort": "updated",
|
|
105
|
+
"direction": "desc",
|
|
106
|
+
"per_page": 100,
|
|
107
|
+
}
|
|
108
|
+
if since_date:
|
|
109
|
+
params["since"] = since_date
|
|
110
|
+
|
|
111
|
+
url = f"{_API_BASE}/repos/{owner}/{repo}/pulls"
|
|
112
|
+
return self._paginate(url, params)
|
|
113
|
+
|
|
114
|
+
def get_pr_details(
|
|
115
|
+
self, owner: str, repo: str, pr_number: int
|
|
116
|
+
) -> dict[str, Any]:
|
|
117
|
+
"""Fetch detailed PR data including comments, reviews, and files.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
owner: Repository owner.
|
|
121
|
+
repo: Repository name.
|
|
122
|
+
pr_number: Pull request number.
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
Dict with PR data, comments, reviews, and files.
|
|
126
|
+
"""
|
|
127
|
+
base = f"{_API_BASE}/repos/{owner}/{repo}/pulls/{pr_number}"
|
|
128
|
+
pr_data = self._get(base)
|
|
129
|
+
comments = self._paginate(f"{base}/comments", {})
|
|
130
|
+
reviews = self._paginate(f"{base}/reviews", {})
|
|
131
|
+
files = self._paginate(f"{base}/files", {})
|
|
132
|
+
|
|
133
|
+
pr_data["comments"] = comments
|
|
134
|
+
pr_data["reviews"] = reviews
|
|
135
|
+
pr_data["files"] = files
|
|
136
|
+
return pr_data
|
|
137
|
+
|
|
138
|
+
def list_issues(
|
|
139
|
+
self,
|
|
140
|
+
owner: str,
|
|
141
|
+
repo: str,
|
|
142
|
+
since_date: str | None = None,
|
|
143
|
+
) -> list[dict[str, Any]]:
|
|
144
|
+
"""List issues (excluding PRs) for a repository.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
owner: Repository owner.
|
|
148
|
+
repo: Repository name.
|
|
149
|
+
since_date: Optional ISO date for lower-bound filtering.
|
|
150
|
+
|
|
151
|
+
Returns:
|
|
152
|
+
List of issue data dicts (PRs filtered out).
|
|
153
|
+
"""
|
|
154
|
+
params: dict[str, str | int] = {
|
|
155
|
+
"state": "all",
|
|
156
|
+
"sort": "updated",
|
|
157
|
+
"direction": "desc",
|
|
158
|
+
"per_page": 100,
|
|
159
|
+
}
|
|
160
|
+
if since_date:
|
|
161
|
+
params["since"] = since_date
|
|
162
|
+
|
|
163
|
+
url = f"{_API_BASE}/repos/{owner}/{repo}/issues"
|
|
164
|
+
all_items = self._paginate(url, params)
|
|
165
|
+
return [item for item in all_items if not item.get("pull_request")]
|
|
166
|
+
|
|
167
|
+
def get_issue_details(
|
|
168
|
+
self, owner: str, repo: str, issue_number: int
|
|
169
|
+
) -> dict[str, Any]:
|
|
170
|
+
"""Fetch detailed issue data including comments.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
owner: Repository owner.
|
|
174
|
+
repo: Repository name.
|
|
175
|
+
issue_number: Issue number.
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
Dict with issue data and comments.
|
|
179
|
+
"""
|
|
180
|
+
base = f"{_API_BASE}/repos/{owner}/{repo}/issues/{issue_number}"
|
|
181
|
+
issue_data = self._get(base)
|
|
182
|
+
comments = self._paginate(f"{base}/comments", {})
|
|
183
|
+
issue_data["comments"] = comments
|
|
184
|
+
return issue_data
|
|
185
|
+
|
|
186
|
+
def get_repo_metadata(self, owner: str, repo: str) -> dict[str, Any]:
|
|
187
|
+
"""Fetch repository metadata.
|
|
188
|
+
|
|
189
|
+
Args:
|
|
190
|
+
owner: Repository owner.
|
|
191
|
+
repo: Repository name.
|
|
192
|
+
|
|
193
|
+
Returns:
|
|
194
|
+
Dict with repository metadata.
|
|
195
|
+
"""
|
|
196
|
+
url = f"{_API_BASE}/repos/{owner}/{repo}"
|
|
197
|
+
return self._get(url)
|
|
198
|
+
|
|
199
|
+
def validate_repo(self, owner: str, repo: str) -> bool:
|
|
200
|
+
"""Check if a repository exists and is accessible.
|
|
201
|
+
|
|
202
|
+
Args:
|
|
203
|
+
owner: Repository owner.
|
|
204
|
+
repo: Repository name.
|
|
205
|
+
|
|
206
|
+
Returns:
|
|
207
|
+
True if accessible, False if 404.
|
|
208
|
+
"""
|
|
209
|
+
url = f"{_API_BASE}/repos/{owner}/{repo}"
|
|
210
|
+
try:
|
|
211
|
+
self._get(url)
|
|
212
|
+
return True
|
|
213
|
+
except ResourceNotFoundError:
|
|
214
|
+
return False
|
|
215
|
+
|
|
216
|
+
def get_pr_diff(self, owner: str, repo: str, pr_number: int) -> str:
|
|
217
|
+
"""Fetch the unified diff for a pull request.
|
|
218
|
+
|
|
219
|
+
Uses the GitHub diff media type to get raw unified diff output.
|
|
220
|
+
|
|
221
|
+
Args:
|
|
222
|
+
owner: Repository owner.
|
|
223
|
+
repo: Repository name.
|
|
224
|
+
pr_number: Pull request number.
|
|
225
|
+
|
|
226
|
+
Returns:
|
|
227
|
+
Raw unified diff text.
|
|
228
|
+
|
|
229
|
+
Raises:
|
|
230
|
+
ResourceNotFoundError: If the PR does not exist.
|
|
231
|
+
"""
|
|
232
|
+
url = f"{_API_BASE}/repos/{owner}/{repo}/pulls/{pr_number}"
|
|
233
|
+
response = self._request_with_retry_diff(url)
|
|
234
|
+
self._check_response(response)
|
|
235
|
+
return response.text
|
|
236
|
+
|
|
237
|
+
def list_pr_commits(self, owner: str, repo: str, pr_number: int) -> list[str]:
|
|
238
|
+
"""List all commit SHAs in a pull request.
|
|
239
|
+
|
|
240
|
+
Fetches the full list of commits (with pagination) and extracts
|
|
241
|
+
the SHA for each commit.
|
|
242
|
+
|
|
243
|
+
Args:
|
|
244
|
+
owner: Repository owner.
|
|
245
|
+
repo: Repository name.
|
|
246
|
+
pr_number: Pull request number.
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
List of full 40-character commit SHAs.
|
|
250
|
+
|
|
251
|
+
Raises:
|
|
252
|
+
ResourceNotFoundError: If the PR does not exist.
|
|
253
|
+
"""
|
|
254
|
+
url = f"{_API_BASE}/repos/{owner}/{repo}/pulls/{pr_number}/commits"
|
|
255
|
+
commits = self._paginate(url, {})
|
|
256
|
+
return [commit["sha"] for commit in commits]
|
|
257
|
+
|
|
258
|
+
def get_commit(self, owner: str, repo: str, commit_ref: str) -> dict[str, Any]:
|
|
259
|
+
"""Fetch a single commit as JSON (includes full SHA).
|
|
260
|
+
|
|
261
|
+
Args:
|
|
262
|
+
owner: Repository owner.
|
|
263
|
+
repo: Repository name.
|
|
264
|
+
commit_ref: Commit SHA (short or full), branch, or tag.
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
GitHub commit object dict.
|
|
268
|
+
|
|
269
|
+
Raises:
|
|
270
|
+
ResourceNotFoundError: If the commit does not exist.
|
|
271
|
+
"""
|
|
272
|
+
url = f"{_API_BASE}/repos/{owner}/{repo}/commits/{commit_ref}"
|
|
273
|
+
return self._get(url)
|
|
274
|
+
|
|
275
|
+
def get_commit_diff(self, owner: str, repo: str, commit_ref: str) -> str:
|
|
276
|
+
"""Fetch the unified diff for a commit (parent..commit).
|
|
277
|
+
|
|
278
|
+
Uses the GitHub diff media type on the commits endpoint. No local
|
|
279
|
+
git is required.
|
|
280
|
+
|
|
281
|
+
Args:
|
|
282
|
+
owner: Repository owner.
|
|
283
|
+
repo: Repository name.
|
|
284
|
+
commit_ref: Commit SHA (short or full), branch, or tag.
|
|
285
|
+
|
|
286
|
+
Returns:
|
|
287
|
+
Raw unified diff text.
|
|
288
|
+
|
|
289
|
+
Raises:
|
|
290
|
+
ResourceNotFoundError: If the commit does not exist.
|
|
291
|
+
"""
|
|
292
|
+
url = f"{_API_BASE}/repos/{owner}/{repo}/commits/{commit_ref}"
|
|
293
|
+
response = self._request_with_retry_diff(url)
|
|
294
|
+
self._check_response(response)
|
|
295
|
+
return response.text
|
|
296
|
+
|
|
297
|
+
@retry(
|
|
298
|
+
retry=retry_if_exception_type(_RetryableGitHubError),
|
|
299
|
+
stop=stop_after_attempt(_MAX_RETRIES),
|
|
300
|
+
wait=wait_exponential(multiplier=0.5, min=0.1, max=10),
|
|
301
|
+
reraise=True,
|
|
302
|
+
)
|
|
303
|
+
def _request_with_retry_diff(self, url: str) -> httpx.Response:
|
|
304
|
+
"""Execute GET with diff Accept header and retry on 5xx errors."""
|
|
305
|
+
try:
|
|
306
|
+
response = self._client.get(
|
|
307
|
+
url,
|
|
308
|
+
headers={"Accept": "application/vnd.github.v3.diff"},
|
|
309
|
+
)
|
|
310
|
+
except (httpx.ConnectError, httpx.TimeoutException) as e:
|
|
311
|
+
raise _RetryableGitHubError(str(e)) from e
|
|
312
|
+
|
|
313
|
+
if response.status_code >= 500:
|
|
314
|
+
raise _RetryableGitHubError(f"HTTP {response.status_code}")
|
|
315
|
+
|
|
316
|
+
return response
|
|
317
|
+
|
|
318
|
+
def _get(self, url: str, params: dict[str, Any] | None = None) -> dict[str, Any]:
|
|
319
|
+
"""Execute a GET request with error handling."""
|
|
320
|
+
response = self._request_with_retry(url, params)
|
|
321
|
+
self._check_response(response)
|
|
322
|
+
result: dict[str, Any] = response.json()
|
|
323
|
+
return result
|
|
324
|
+
|
|
325
|
+
def _paginate(
|
|
326
|
+
self, url: str, params: dict[str, str | int]
|
|
327
|
+
) -> list[dict[str, Any]]:
|
|
328
|
+
"""Follow pagination via Link headers up to MAX_PAGES.
|
|
329
|
+
|
|
330
|
+
Args:
|
|
331
|
+
url: Initial request URL.
|
|
332
|
+
params: Query parameters for the first page.
|
|
333
|
+
|
|
334
|
+
Returns:
|
|
335
|
+
Aggregated list of all items across pages.
|
|
336
|
+
"""
|
|
337
|
+
all_items: list[dict[str, Any]] = []
|
|
338
|
+
current_url: str | None = url
|
|
339
|
+
current_params: dict[str, str | int] | None = params
|
|
340
|
+
page_count = 0
|
|
341
|
+
|
|
342
|
+
while current_url and page_count < _MAX_PAGES:
|
|
343
|
+
response = self._request_with_retry(current_url, current_params)
|
|
344
|
+
self._check_response(response)
|
|
345
|
+
self._check_rate_limit(response)
|
|
346
|
+
|
|
347
|
+
data = response.json()
|
|
348
|
+
if isinstance(data, list):
|
|
349
|
+
all_items.extend(data)
|
|
350
|
+
else:
|
|
351
|
+
all_items.append(data)
|
|
352
|
+
|
|
353
|
+
current_url = self._next_page_url(response)
|
|
354
|
+
current_params = None
|
|
355
|
+
page_count += 1
|
|
356
|
+
|
|
357
|
+
return all_items
|
|
358
|
+
|
|
359
|
+
@retry(
|
|
360
|
+
retry=retry_if_exception_type(_RetryableGitHubError),
|
|
361
|
+
stop=stop_after_attempt(_MAX_RETRIES),
|
|
362
|
+
wait=wait_exponential(multiplier=0.5, min=0.1, max=10),
|
|
363
|
+
reraise=True,
|
|
364
|
+
)
|
|
365
|
+
def _request_with_retry(
|
|
366
|
+
self, url: str, params: dict[str, Any] | None = None
|
|
367
|
+
) -> httpx.Response:
|
|
368
|
+
"""Execute GET with retry on 5xx errors."""
|
|
369
|
+
try:
|
|
370
|
+
response = self._client.get(url, params=params)
|
|
371
|
+
except (httpx.ConnectError, httpx.TimeoutException) as e:
|
|
372
|
+
raise _RetryableGitHubError(str(e)) from e
|
|
373
|
+
|
|
374
|
+
if response.status_code >= 500:
|
|
375
|
+
raise _RetryableGitHubError(f"HTTP {response.status_code}")
|
|
376
|
+
|
|
377
|
+
return response
|
|
378
|
+
|
|
379
|
+
def _check_response(self, response: httpx.Response) -> None:
|
|
380
|
+
"""Map HTTP error codes to typed exceptions."""
|
|
381
|
+
if response.status_code in (401, 403):
|
|
382
|
+
msg = response.json().get("message", "Authentication failed")
|
|
383
|
+
raise AuthError(msg, service="GitHub")
|
|
384
|
+
if response.status_code == 404:
|
|
385
|
+
raise ResourceNotFoundError(
|
|
386
|
+
f"GitHub resource not found: {response.url}"
|
|
387
|
+
)
|
|
388
|
+
if response.status_code >= 400:
|
|
389
|
+
raise UpstreamUnavailableError(
|
|
390
|
+
f"GitHub API error: HTTP {response.status_code}"
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
def _check_rate_limit(self, response: httpx.Response) -> None:
|
|
394
|
+
"""Log rate limit status and pause if exhausted."""
|
|
395
|
+
remaining = response.headers.get("X-RateLimit-Remaining")
|
|
396
|
+
reset_ts = response.headers.get("X-RateLimit-Reset")
|
|
397
|
+
|
|
398
|
+
if remaining is not None and int(remaining) == 0 and reset_ts:
|
|
399
|
+
wait_seconds = max(0, int(reset_ts) - int(time.time()))
|
|
400
|
+
if wait_seconds > 0:
|
|
401
|
+
_log.warning("GitHub rate limit exhausted, waiting %ds", wait_seconds)
|
|
402
|
+
raise RateLimitError(
|
|
403
|
+
"GitHub API rate limit exhausted",
|
|
404
|
+
retry_after=float(wait_seconds),
|
|
405
|
+
)
|
|
406
|
+
|
|
407
|
+
def _next_page_url(self, response: httpx.Response) -> str | None:
|
|
408
|
+
"""Parse the 'next' URL from the Link header."""
|
|
409
|
+
link_header = response.headers.get("link", "")
|
|
410
|
+
if not link_header:
|
|
411
|
+
return None
|
|
412
|
+
match = re.search(r'<([^>]+)>;\s*rel="next"', link_header)
|
|
413
|
+
return match.group(1) if match else None
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
def github_client_for_repo(repo_root: Path) -> GitHubClient:
|
|
417
|
+
"""Build ``GitHubClient`` from connected config and/or environment (memory only).
|
|
418
|
+
|
|
419
|
+
Uses the same secret sources users already configured: optional
|
|
420
|
+
``github_token`` on :class:`~avos_cli.models.config.RepoConfig` (file plus
|
|
421
|
+
env overlay from :func:`~avos_cli.config.manager.load_config`, never
|
|
422
|
+
re-persisted here), then ``GITHUB_TOKEN`` after layered ``.env`` load.
|
|
423
|
+
|
|
424
|
+
Args:
|
|
425
|
+
repo_root: Git root containing ``.avos/config.json`` when connected.
|
|
426
|
+
|
|
427
|
+
Returns:
|
|
428
|
+
Authenticated client.
|
|
429
|
+
|
|
430
|
+
Raises:
|
|
431
|
+
AuthError: If no non-empty token is available.
|
|
432
|
+
"""
|
|
433
|
+
from avos_cli.config.manager import load_config
|
|
434
|
+
|
|
435
|
+
try:
|
|
436
|
+
cfg = load_config(repo_root)
|
|
437
|
+
except ConfigurationNotInitializedError:
|
|
438
|
+
return GitHubClient()
|
|
439
|
+
if cfg.github_token is not None:
|
|
440
|
+
token_value = cfg.github_token.get_secret_value().strip()
|
|
441
|
+
if token_value:
|
|
442
|
+
return GitHubClient(token=token_value)
|
|
443
|
+
return GitHubClient()
|