gtg 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- goodtogo/__init__.py +66 -0
- goodtogo/adapters/__init__.py +22 -0
- goodtogo/adapters/agent_state.py +490 -0
- goodtogo/adapters/cache_memory.py +208 -0
- goodtogo/adapters/cache_sqlite.py +305 -0
- goodtogo/adapters/github.py +523 -0
- goodtogo/adapters/time_provider.py +123 -0
- goodtogo/cli.py +311 -0
- goodtogo/container.py +313 -0
- goodtogo/core/__init__.py +0 -0
- goodtogo/core/analyzer.py +982 -0
- goodtogo/core/errors.py +100 -0
- goodtogo/core/interfaces.py +388 -0
- goodtogo/core/models.py +312 -0
- goodtogo/core/validation.py +144 -0
- goodtogo/parsers/__init__.py +0 -0
- goodtogo/parsers/claude.py +188 -0
- goodtogo/parsers/coderabbit.py +352 -0
- goodtogo/parsers/cursor.py +135 -0
- goodtogo/parsers/generic.py +192 -0
- goodtogo/parsers/greptile.py +249 -0
- gtg-0.4.0.dist-info/METADATA +278 -0
- gtg-0.4.0.dist-info/RECORD +27 -0
- gtg-0.4.0.dist-info/WHEEL +5 -0
- gtg-0.4.0.dist-info/entry_points.txt +2 -0
- gtg-0.4.0.dist-info/licenses/LICENSE +21 -0
- gtg-0.4.0.dist-info/top_level.txt +1 -0
goodtogo/cli.py
ADDED
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
"""Thin CLI wrapper around PRAnalyzer.
|
|
2
|
+
|
|
3
|
+
This module provides a command-line interface for the GoodToMerge library,
|
|
4
|
+
enabling AI agents and humans to check PR readiness from the terminal or
|
|
5
|
+
CI/CD pipelines.
|
|
6
|
+
|
|
7
|
+
Exit codes (default - AI-friendly mode):
|
|
8
|
+
0 - Any analyzable state (READY, ACTION_REQUIRED, UNRESOLVED_THREADS, CI_FAILING)
|
|
9
|
+
4 - Error fetching data (PRStatus.ERROR)
|
|
10
|
+
|
|
11
|
+
Exit codes (with -q or --semantic-codes):
|
|
12
|
+
0 - Ready to merge (PRStatus.READY)
|
|
13
|
+
1 - Actionable comments need addressing (PRStatus.ACTION_REQUIRED)
|
|
14
|
+
2 - Unresolved threads exist (PRStatus.UNRESOLVED_THREADS)
|
|
15
|
+
3 - CI/CD checks failing (PRStatus.CI_FAILING)
|
|
16
|
+
4 - Error fetching data (PRStatus.ERROR)
|
|
17
|
+
|
|
18
|
+
Example:
|
|
19
|
+
$ gtg 123 # auto-detect repo from git origin
|
|
20
|
+
$ gtg 123 --repo myorg/myrepo # explicit repo
|
|
21
|
+
$ gtg 123 --format text --verbose # human-readable output
|
|
22
|
+
$ gtg 123 -q # quiet mode with semantic exit codes
|
|
23
|
+
$ gtg 123 --semantic-codes # semantic exit codes with output
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
from __future__ import annotations
|
|
27
|
+
|
|
28
|
+
import os
|
|
29
|
+
import re
|
|
30
|
+
import subprocess
|
|
31
|
+
import sys
|
|
32
|
+
from typing import Optional
|
|
33
|
+
|
|
34
|
+
import click
|
|
35
|
+
|
|
36
|
+
from goodtogo import __version__
|
|
37
|
+
from goodtogo.container import Container
|
|
38
|
+
from goodtogo.core.analyzer import PRAnalyzer
|
|
39
|
+
from goodtogo.core.errors import redact_error
|
|
40
|
+
from goodtogo.core.models import PRAnalysisResult, PRStatus
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def parse_github_remote_url(url: str) -> Optional[tuple[str, str]]:
|
|
44
|
+
"""Parse a GitHub remote URL to extract owner and repo.
|
|
45
|
+
|
|
46
|
+
Supports both HTTPS and SSH formats:
|
|
47
|
+
- https://github.com/owner/repo.git
|
|
48
|
+
- https://github.com/owner/repo
|
|
49
|
+
- git@github.com:owner/repo.git
|
|
50
|
+
- git@github.com:owner/repo
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
url: The git remote URL to parse.
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
Tuple of (owner, repo) if the URL is a valid GitHub URL,
|
|
57
|
+
None otherwise.
|
|
58
|
+
"""
|
|
59
|
+
if not url:
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
# HTTPS format: https://github.com/owner/repo.git or https://github.com/owner/repo
|
|
63
|
+
https_pattern = r"^https://github\.com/([^/]+)/([^/]+?)(?:\.git)?/?$"
|
|
64
|
+
match = re.match(https_pattern, url)
|
|
65
|
+
if match:
|
|
66
|
+
return (match.group(1), match.group(2))
|
|
67
|
+
|
|
68
|
+
# SSH format: git@github.com:owner/repo.git or git@github.com:owner/repo
|
|
69
|
+
ssh_pattern = r"^git@github\.com:([^/]+)/([^/]+?)(?:\.git)?/?$"
|
|
70
|
+
match = re.match(ssh_pattern, url)
|
|
71
|
+
if match:
|
|
72
|
+
return (match.group(1), match.group(2))
|
|
73
|
+
|
|
74
|
+
return None
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def get_repo_from_git_origin() -> Optional[tuple[str, str]]:
|
|
78
|
+
"""Get repository owner and name from git remote origin.
|
|
79
|
+
|
|
80
|
+
Runs `git remote get-url origin` to get the origin URL,
|
|
81
|
+
then parses it to extract owner and repo name.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
Tuple of (owner, repo) if origin is a valid GitHub URL,
|
|
85
|
+
None if not in a git repo, no origin remote, origin isn't GitHub,
|
|
86
|
+
git is not installed, or the command times out.
|
|
87
|
+
"""
|
|
88
|
+
try:
|
|
89
|
+
result = subprocess.run(
|
|
90
|
+
["git", "remote", "get-url", "origin"],
|
|
91
|
+
capture_output=True,
|
|
92
|
+
text=True,
|
|
93
|
+
check=True,
|
|
94
|
+
timeout=5,
|
|
95
|
+
)
|
|
96
|
+
origin_url = result.stdout.strip()
|
|
97
|
+
return parse_github_remote_url(origin_url)
|
|
98
|
+
except (subprocess.CalledProcessError, FileNotFoundError, subprocess.TimeoutExpired):
|
|
99
|
+
# No origin remote, not a git repo, git not installed, or timeout
|
|
100
|
+
return None
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
# Semantic exit codes for shell scripting (-q or --semantic-codes)
|
|
104
|
+
SEMANTIC_EXIT_CODES: dict[PRStatus, int] = {
|
|
105
|
+
PRStatus.READY: 0,
|
|
106
|
+
PRStatus.ACTION_REQUIRED: 1,
|
|
107
|
+
PRStatus.UNRESOLVED_THREADS: 2,
|
|
108
|
+
PRStatus.CI_FAILING: 3,
|
|
109
|
+
PRStatus.ERROR: 4,
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
# AI-friendly exit codes (default) - only ERROR is non-zero
|
|
113
|
+
AI_FRIENDLY_EXIT_CODES: dict[PRStatus, int] = {
|
|
114
|
+
PRStatus.READY: 0,
|
|
115
|
+
PRStatus.ACTION_REQUIRED: 0,
|
|
116
|
+
PRStatus.UNRESOLVED_THREADS: 0,
|
|
117
|
+
PRStatus.CI_FAILING: 0,
|
|
118
|
+
PRStatus.ERROR: 4,
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
@click.command()
|
|
123
|
+
@click.argument("pr_number", type=int)
|
|
124
|
+
@click.option(
|
|
125
|
+
"--repo",
|
|
126
|
+
"-r",
|
|
127
|
+
required=False,
|
|
128
|
+
default=None,
|
|
129
|
+
help="Repository in owner/repo format (auto-detected from git origin if not provided)",
|
|
130
|
+
)
|
|
131
|
+
@click.option(
|
|
132
|
+
"--cache",
|
|
133
|
+
type=click.Choice(["sqlite", "redis", "none"]),
|
|
134
|
+
default="sqlite",
|
|
135
|
+
help="Cache backend (default: sqlite)",
|
|
136
|
+
)
|
|
137
|
+
@click.option(
|
|
138
|
+
"--cache-path",
|
|
139
|
+
default=".goodtogo/cache.db",
|
|
140
|
+
help="SQLite cache path",
|
|
141
|
+
)
|
|
142
|
+
@click.option(
|
|
143
|
+
"--redis-url",
|
|
144
|
+
envvar="REDIS_URL",
|
|
145
|
+
help="Redis URL (required if --cache=redis)",
|
|
146
|
+
)
|
|
147
|
+
@click.option(
|
|
148
|
+
"--format",
|
|
149
|
+
"output_format",
|
|
150
|
+
type=click.Choice(["json", "text"]),
|
|
151
|
+
default="json",
|
|
152
|
+
help="Output format (default: json)",
|
|
153
|
+
)
|
|
154
|
+
@click.option(
|
|
155
|
+
"--verbose",
|
|
156
|
+
"-v",
|
|
157
|
+
is_flag=True,
|
|
158
|
+
help="Verbose output",
|
|
159
|
+
)
|
|
160
|
+
@click.option(
|
|
161
|
+
"--exclude-checks",
|
|
162
|
+
"-x",
|
|
163
|
+
multiple=True,
|
|
164
|
+
help="CI check names to exclude (can be repeated)",
|
|
165
|
+
)
|
|
166
|
+
@click.option(
|
|
167
|
+
"-q",
|
|
168
|
+
"--quiet",
|
|
169
|
+
is_flag=True,
|
|
170
|
+
help="Quiet mode: no output, use semantic exit codes (like grep -q)",
|
|
171
|
+
)
|
|
172
|
+
@click.option(
|
|
173
|
+
"--semantic-codes",
|
|
174
|
+
is_flag=True,
|
|
175
|
+
help="Use semantic exit codes (0=ready, 1=action, 2=threads, 3=ci, 4=error)",
|
|
176
|
+
)
|
|
177
|
+
@click.version_option(version=__version__)
|
|
178
|
+
def main(
|
|
179
|
+
pr_number: int,
|
|
180
|
+
repo: Optional[str],
|
|
181
|
+
cache: str,
|
|
182
|
+
cache_path: str,
|
|
183
|
+
redis_url: Optional[str],
|
|
184
|
+
output_format: str,
|
|
185
|
+
verbose: bool,
|
|
186
|
+
exclude_checks: tuple[str, ...],
|
|
187
|
+
quiet: bool,
|
|
188
|
+
semantic_codes: bool,
|
|
189
|
+
) -> None:
|
|
190
|
+
"""Check if a PR is ready to merge.
|
|
191
|
+
|
|
192
|
+
PR_NUMBER is the pull request number to check.
|
|
193
|
+
|
|
194
|
+
Exit codes (default - AI-friendly):
|
|
195
|
+
0 - Any analyzable state (ready, action required, threads, CI)
|
|
196
|
+
4 - Error fetching data
|
|
197
|
+
|
|
198
|
+
Exit codes (with -q or --semantic-codes):
|
|
199
|
+
0 - Ready to merge
|
|
200
|
+
1 - Actionable comments need addressing
|
|
201
|
+
2 - Unresolved threads exist
|
|
202
|
+
3 - CI/CD checks failing
|
|
203
|
+
4 - Error fetching data
|
|
204
|
+
"""
|
|
205
|
+
# Get GitHub token from environment
|
|
206
|
+
github_token = os.environ.get("GITHUB_TOKEN")
|
|
207
|
+
if not github_token:
|
|
208
|
+
click.echo("Error: GITHUB_TOKEN environment variable required", err=True)
|
|
209
|
+
sys.exit(4)
|
|
210
|
+
|
|
211
|
+
# Determine owner/repo - either from --repo option or auto-detect from git origin
|
|
212
|
+
if repo is not None:
|
|
213
|
+
# Parse owner/repo from --repo option
|
|
214
|
+
try:
|
|
215
|
+
owner, repo_name = repo.strip().split("/")
|
|
216
|
+
if not owner or not repo_name:
|
|
217
|
+
raise ValueError("Empty owner or repo name")
|
|
218
|
+
except ValueError:
|
|
219
|
+
click.echo("Error: --repo must be in owner/repo format", err=True)
|
|
220
|
+
sys.exit(4)
|
|
221
|
+
else:
|
|
222
|
+
# Auto-detect from git origin
|
|
223
|
+
detected = get_repo_from_git_origin()
|
|
224
|
+
if detected is None:
|
|
225
|
+
click.echo(
|
|
226
|
+
"Error: Could not detect repository from git origin. "
|
|
227
|
+
"Use --repo owner/repo or run from a GitHub repository.",
|
|
228
|
+
err=True,
|
|
229
|
+
)
|
|
230
|
+
sys.exit(4)
|
|
231
|
+
owner, repo_name = detected
|
|
232
|
+
|
|
233
|
+
# Create container and analyzer, then analyze PR
|
|
234
|
+
try:
|
|
235
|
+
container = Container.create_default(
|
|
236
|
+
github_token=github_token,
|
|
237
|
+
cache_type=cache,
|
|
238
|
+
cache_path=cache_path,
|
|
239
|
+
redis_url=redis_url,
|
|
240
|
+
)
|
|
241
|
+
analyzer = PRAnalyzer(container)
|
|
242
|
+
result = analyzer.analyze(owner, repo_name, pr_number, exclude_checks=set(exclude_checks))
|
|
243
|
+
except Exception as e:
|
|
244
|
+
# Redact sensitive data from error messages
|
|
245
|
+
redacted = redact_error(e)
|
|
246
|
+
if verbose:
|
|
247
|
+
click.echo(f"Error: {redacted}", err=True)
|
|
248
|
+
else:
|
|
249
|
+
click.echo(
|
|
250
|
+
"Error: Failed to analyze PR. Use --verbose for details.",
|
|
251
|
+
err=True,
|
|
252
|
+
)
|
|
253
|
+
sys.exit(4)
|
|
254
|
+
|
|
255
|
+
# Determine which exit code mapping to use
|
|
256
|
+
use_semantic = quiet or semantic_codes
|
|
257
|
+
exit_codes = SEMANTIC_EXIT_CODES if use_semantic else AI_FRIENDLY_EXIT_CODES
|
|
258
|
+
|
|
259
|
+
# Output result in requested format (skip if quiet mode)
|
|
260
|
+
if not quiet:
|
|
261
|
+
if output_format == "json":
|
|
262
|
+
click.echo(result.model_dump_json(indent=2))
|
|
263
|
+
else:
|
|
264
|
+
_print_text_output(result, verbose)
|
|
265
|
+
|
|
266
|
+
sys.exit(exit_codes[result.status])
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def _print_text_output(result: PRAnalysisResult, verbose: bool) -> None:
|
|
270
|
+
"""Print human-readable output.
|
|
271
|
+
|
|
272
|
+
Displays a formatted summary of the PR analysis result suitable
|
|
273
|
+
for human consumption in the terminal.
|
|
274
|
+
|
|
275
|
+
Args:
|
|
276
|
+
result: The PR analysis result to display.
|
|
277
|
+
verbose: If True, show additional details like ambiguous comments.
|
|
278
|
+
"""
|
|
279
|
+
status_icons = {
|
|
280
|
+
PRStatus.READY: "OK",
|
|
281
|
+
PRStatus.ACTION_REQUIRED: "!!",
|
|
282
|
+
PRStatus.UNRESOLVED_THREADS: "??",
|
|
283
|
+
PRStatus.CI_FAILING: "XX",
|
|
284
|
+
PRStatus.ERROR: "##",
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
icon = status_icons.get(result.status, "??")
|
|
288
|
+
click.echo(f"{icon} PR #{result.pr_number}: {result.status.value}")
|
|
289
|
+
click.echo(
|
|
290
|
+
f" CI: {result.ci_status.state} "
|
|
291
|
+
f"({result.ci_status.passed}/{result.ci_status.total_checks} passed)"
|
|
292
|
+
)
|
|
293
|
+
click.echo(f" Threads: {result.threads.resolved}/{result.threads.total} resolved")
|
|
294
|
+
|
|
295
|
+
if result.action_items:
|
|
296
|
+
click.echo("\nAction required:")
|
|
297
|
+
for item in result.action_items:
|
|
298
|
+
click.echo(f" - {item}")
|
|
299
|
+
|
|
300
|
+
if verbose and result.ambiguous_comments:
|
|
301
|
+
click.echo("\nAmbiguous (needs investigation):")
|
|
302
|
+
for comment in result.ambiguous_comments:
|
|
303
|
+
# Truncate body to 80 chars for readability
|
|
304
|
+
body_preview = comment.body[:80]
|
|
305
|
+
if len(comment.body) > 80:
|
|
306
|
+
body_preview += "..."
|
|
307
|
+
click.echo(f" - [{comment.author}] {body_preview}")
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
if __name__ == "__main__": # pragma: no cover
|
|
311
|
+
main()
|
goodtogo/container.py
ADDED
|
@@ -0,0 +1,313 @@
|
|
|
1
|
+
"""Dependency injection container for GoodToMerge.
|
|
2
|
+
|
|
3
|
+
This module provides the Container class that manages all dependencies
|
|
4
|
+
for the GoodToMerge library, following the Ports & Adapters (Hexagonal)
|
|
5
|
+
architecture pattern.
|
|
6
|
+
|
|
7
|
+
The Container provides factory methods for creating production and test
|
|
8
|
+
configurations, ensuring all dependencies are properly initialized with
|
|
9
|
+
appropriate adapters.
|
|
10
|
+
|
|
11
|
+
Example:
|
|
12
|
+
# Production usage
|
|
13
|
+
container = Container.create_default(
|
|
14
|
+
github_token="ghp_...",
|
|
15
|
+
cache_type="sqlite",
|
|
16
|
+
cache_path=".goodtogo/cache.db",
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
# Test usage
|
|
20
|
+
container = Container.create_for_testing()
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from __future__ import annotations
|
|
24
|
+
|
|
25
|
+
from dataclasses import dataclass
|
|
26
|
+
from typing import Any, Optional
|
|
27
|
+
|
|
28
|
+
from goodtogo.adapters.cache_memory import InMemoryCacheAdapter
|
|
29
|
+
from goodtogo.adapters.cache_sqlite import SqliteCacheAdapter
|
|
30
|
+
from goodtogo.adapters.github import GitHubAdapter
|
|
31
|
+
from goodtogo.adapters.time_provider import MockTimeProvider, SystemTimeProvider
|
|
32
|
+
from goodtogo.core.interfaces import CachePort, GitHubPort, ReviewerParser, TimeProvider
|
|
33
|
+
from goodtogo.core.models import ReviewerType
|
|
34
|
+
from goodtogo.parsers.claude import ClaudeCodeParser
|
|
35
|
+
from goodtogo.parsers.coderabbit import CodeRabbitParser
|
|
36
|
+
from goodtogo.parsers.cursor import CursorBugbotParser
|
|
37
|
+
from goodtogo.parsers.generic import GenericParser
|
|
38
|
+
from goodtogo.parsers.greptile import GreptileParser
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class MockGitHubAdapter(GitHubPort):
|
|
42
|
+
"""Mock GitHub adapter for testing.
|
|
43
|
+
|
|
44
|
+
This adapter raises NotImplementedError for all methods by default.
|
|
45
|
+
Test code should replace individual methods with mock implementations
|
|
46
|
+
as needed.
|
|
47
|
+
|
|
48
|
+
Example:
|
|
49
|
+
container = Container.create_for_testing()
|
|
50
|
+
container.github.get_pr = MagicMock(return_value={"number": 123})
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
def __repr__(self) -> str:
|
|
54
|
+
"""Return string representation for debugging."""
|
|
55
|
+
return "MockGitHubAdapter()"
|
|
56
|
+
|
|
57
|
+
def __str__(self) -> str:
|
|
58
|
+
"""Return string representation."""
|
|
59
|
+
return self.__repr__()
|
|
60
|
+
|
|
61
|
+
def get_pr(self, owner: str, repo: str, pr_number: int) -> dict[str, Any]:
|
|
62
|
+
"""Fetch PR metadata.
|
|
63
|
+
|
|
64
|
+
Raises:
|
|
65
|
+
NotImplementedError: Always raised - override in tests.
|
|
66
|
+
"""
|
|
67
|
+
raise NotImplementedError(
|
|
68
|
+
"MockGitHubAdapter.get_pr() not implemented. " "Replace with a mock in your test."
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
def get_pr_comments(self, owner: str, repo: str, pr_number: int) -> list[dict[str, Any]]:
|
|
72
|
+
"""Fetch all PR comments.
|
|
73
|
+
|
|
74
|
+
Raises:
|
|
75
|
+
NotImplementedError: Always raised - override in tests.
|
|
76
|
+
"""
|
|
77
|
+
raise NotImplementedError(
|
|
78
|
+
"MockGitHubAdapter.get_pr_comments() not implemented. "
|
|
79
|
+
"Replace with a mock in your test."
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
def get_pr_reviews(self, owner: str, repo: str, pr_number: int) -> list[dict[str, Any]]:
|
|
83
|
+
"""Fetch all PR reviews.
|
|
84
|
+
|
|
85
|
+
Raises:
|
|
86
|
+
NotImplementedError: Always raised - override in tests.
|
|
87
|
+
"""
|
|
88
|
+
raise NotImplementedError(
|
|
89
|
+
"MockGitHubAdapter.get_pr_reviews() not implemented. "
|
|
90
|
+
"Replace with a mock in your test."
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
def get_pr_threads(self, owner: str, repo: str, pr_number: int) -> list[dict[str, Any]]:
|
|
94
|
+
"""Fetch all review threads.
|
|
95
|
+
|
|
96
|
+
Raises:
|
|
97
|
+
NotImplementedError: Always raised - override in tests.
|
|
98
|
+
"""
|
|
99
|
+
raise NotImplementedError(
|
|
100
|
+
"MockGitHubAdapter.get_pr_threads() not implemented. "
|
|
101
|
+
"Replace with a mock in your test."
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
def get_ci_status(self, owner: str, repo: str, ref: str) -> dict[str, Any]:
|
|
105
|
+
"""Fetch CI/CD check status.
|
|
106
|
+
|
|
107
|
+
Raises:
|
|
108
|
+
NotImplementedError: Always raised - override in tests.
|
|
109
|
+
"""
|
|
110
|
+
raise NotImplementedError(
|
|
111
|
+
"MockGitHubAdapter.get_ci_status() not implemented. "
|
|
112
|
+
"Replace with a mock in your test."
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
@dataclass
|
|
117
|
+
class Container:
|
|
118
|
+
"""DI container - all dependencies injected, no global state.
|
|
119
|
+
|
|
120
|
+
The Container holds all adapters and parsers needed by the PRAnalyzer.
|
|
121
|
+
It provides factory methods for creating properly configured instances
|
|
122
|
+
for production and testing scenarios.
|
|
123
|
+
|
|
124
|
+
Attributes:
|
|
125
|
+
github: GitHub API adapter implementing GitHubPort interface.
|
|
126
|
+
cache: Cache adapter implementing CachePort interface.
|
|
127
|
+
parsers: Dictionary mapping ReviewerType to parser implementations.
|
|
128
|
+
time_provider: TimeProvider for time operations (enables deterministic testing).
|
|
129
|
+
|
|
130
|
+
Example:
|
|
131
|
+
# Create production container
|
|
132
|
+
container = Container.create_default(github_token="ghp_...")
|
|
133
|
+
analyzer = PRAnalyzer(container)
|
|
134
|
+
result = analyzer.analyze("owner", "repo", 123)
|
|
135
|
+
|
|
136
|
+
# Create test container with mocks
|
|
137
|
+
container = Container.create_for_testing()
|
|
138
|
+
container.github.get_pr = MagicMock(return_value={...})
|
|
139
|
+
"""
|
|
140
|
+
|
|
141
|
+
github: GitHubPort
|
|
142
|
+
cache: CachePort
|
|
143
|
+
parsers: dict[ReviewerType, ReviewerParser]
|
|
144
|
+
time_provider: TimeProvider
|
|
145
|
+
|
|
146
|
+
@classmethod
|
|
147
|
+
def create_default(
|
|
148
|
+
cls,
|
|
149
|
+
github_token: str,
|
|
150
|
+
cache_type: str = "sqlite",
|
|
151
|
+
cache_path: str = ".goodtogo/cache.db",
|
|
152
|
+
redis_url: Optional[str] = None,
|
|
153
|
+
) -> Container:
|
|
154
|
+
"""Factory for standard production configuration.
|
|
155
|
+
|
|
156
|
+
Creates a Container with production-ready adapters:
|
|
157
|
+
- GitHubAdapter for real GitHub API access
|
|
158
|
+
- SQLite or Redis cache based on cache_type
|
|
159
|
+
- All default reviewer parsers
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
github_token: GitHub personal access token or OAuth token.
|
|
163
|
+
Must have 'repo' scope for private repositories.
|
|
164
|
+
cache_type: Cache backend to use. One of:
|
|
165
|
+
- "sqlite": Local SQLite database (default)
|
|
166
|
+
- "redis": Redis server (requires redis_url)
|
|
167
|
+
- "none": No caching (NoCacheAdapter)
|
|
168
|
+
cache_path: Path to SQLite database file. Only used when
|
|
169
|
+
cache_type is "sqlite". Default: ".goodtogo/cache.db"
|
|
170
|
+
redis_url: Redis connection URL. Required when cache_type is "redis".
|
|
171
|
+
Example: "redis://localhost:6379" or "rediss://..." for TLS.
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
Configured Container instance ready for production use.
|
|
175
|
+
|
|
176
|
+
Raises:
|
|
177
|
+
ValueError: If cache_type is "redis" but redis_url is not provided,
|
|
178
|
+
or if cache_type is unknown.
|
|
179
|
+
"""
|
|
180
|
+
time_provider = SystemTimeProvider()
|
|
181
|
+
cache = _create_cache(cache_type, cache_path, redis_url, time_provider)
|
|
182
|
+
return cls(
|
|
183
|
+
github=GitHubAdapter(token=github_token, time_provider=time_provider),
|
|
184
|
+
cache=cache,
|
|
185
|
+
parsers=_create_default_parsers(),
|
|
186
|
+
time_provider=time_provider,
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
@classmethod
|
|
190
|
+
def create_for_testing(
|
|
191
|
+
cls,
|
|
192
|
+
github: Optional[GitHubPort] = None,
|
|
193
|
+
cache: Optional[CachePort] = None,
|
|
194
|
+
time_provider: Optional[TimeProvider] = None,
|
|
195
|
+
) -> Container:
|
|
196
|
+
"""Factory for tests - all mocks by default.
|
|
197
|
+
|
|
198
|
+
Creates a Container suitable for testing with mock adapters:
|
|
199
|
+
- MockGitHubAdapter that raises NotImplementedError (override as needed)
|
|
200
|
+
- InMemoryCacheAdapter for fast, ephemeral caching
|
|
201
|
+
- MockTimeProvider for deterministic time control
|
|
202
|
+
- All default reviewer parsers
|
|
203
|
+
|
|
204
|
+
Args:
|
|
205
|
+
github: Optional GitHubPort implementation to use instead of mock.
|
|
206
|
+
If None, uses MockGitHubAdapter.
|
|
207
|
+
cache: Optional CachePort implementation to use instead of mock.
|
|
208
|
+
If None, uses InMemoryCacheAdapter.
|
|
209
|
+
time_provider: Optional TimeProvider implementation. If None, uses
|
|
210
|
+
MockTimeProvider starting at time 0.
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
Container instance configured for testing.
|
|
214
|
+
|
|
215
|
+
Example:
|
|
216
|
+
# Basic test setup
|
|
217
|
+
container = Container.create_for_testing()
|
|
218
|
+
|
|
219
|
+
# With custom mock
|
|
220
|
+
mock_github = MagicMock(spec=GitHubPort)
|
|
221
|
+
mock_github.get_pr.return_value = {"number": 123, "title": "Test"}
|
|
222
|
+
container = Container.create_for_testing(github=mock_github)
|
|
223
|
+
|
|
224
|
+
# With controlled time
|
|
225
|
+
time = MockTimeProvider(start=1000.0)
|
|
226
|
+
container = Container.create_for_testing(time_provider=time)
|
|
227
|
+
time.advance(60) # Advance 60 seconds instantly
|
|
228
|
+
"""
|
|
229
|
+
resolved_time_provider = time_provider if time_provider is not None else MockTimeProvider()
|
|
230
|
+
return cls(
|
|
231
|
+
github=github if github is not None else MockGitHubAdapter(),
|
|
232
|
+
cache=(
|
|
233
|
+
cache
|
|
234
|
+
if cache is not None
|
|
235
|
+
else InMemoryCacheAdapter(time_provider=resolved_time_provider)
|
|
236
|
+
),
|
|
237
|
+
parsers=_create_default_parsers(),
|
|
238
|
+
time_provider=resolved_time_provider,
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
def _create_cache(
|
|
243
|
+
cache_type: str,
|
|
244
|
+
path: str,
|
|
245
|
+
redis_url: Optional[str],
|
|
246
|
+
time_provider: Optional[TimeProvider] = None,
|
|
247
|
+
) -> CachePort:
|
|
248
|
+
"""Create cache adapter based on type.
|
|
249
|
+
|
|
250
|
+
Factory function that creates the appropriate cache adapter
|
|
251
|
+
based on the specified cache type.
|
|
252
|
+
|
|
253
|
+
Args:
|
|
254
|
+
cache_type: Type of cache to create. One of:
|
|
255
|
+
- "sqlite": Local SQLite database
|
|
256
|
+
- "redis": Redis server
|
|
257
|
+
- "none": No-op cache adapter
|
|
258
|
+
path: Path to SQLite database file (only used for "sqlite").
|
|
259
|
+
redis_url: Redis connection URL (only used for "redis").
|
|
260
|
+
time_provider: Optional TimeProvider for time operations.
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
CachePort implementation matching the requested type.
|
|
264
|
+
|
|
265
|
+
Raises:
|
|
266
|
+
ValueError: If cache_type is "redis" but redis_url is not provided,
|
|
267
|
+
or if cache_type is unknown.
|
|
268
|
+
"""
|
|
269
|
+
if cache_type == "sqlite":
|
|
270
|
+
return SqliteCacheAdapter(path, time_provider=time_provider)
|
|
271
|
+
elif cache_type == "redis":
|
|
272
|
+
if not redis_url:
|
|
273
|
+
raise ValueError("redis_url required for redis cache")
|
|
274
|
+
# Import Redis adapter only when needed to avoid requiring redis package
|
|
275
|
+
from goodtogo.adapters.cache_redis import ( # type: ignore[import-untyped]
|
|
276
|
+
RedisCacheAdapter,
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
result: CachePort = RedisCacheAdapter(redis_url)
|
|
280
|
+
return result
|
|
281
|
+
elif cache_type == "none":
|
|
282
|
+
# No-op cache - use in-memory with immediate expiration
|
|
283
|
+
# For a true no-op, we could create a NoCacheAdapter, but
|
|
284
|
+
# InMemoryCacheAdapter with TTL=0 effectively accomplishes this
|
|
285
|
+
return InMemoryCacheAdapter(time_provider=time_provider)
|
|
286
|
+
else:
|
|
287
|
+
raise ValueError(f"Unknown cache type: {cache_type}")
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def _create_default_parsers() -> dict[ReviewerType, ReviewerParser]:
|
|
291
|
+
"""Create default parser registry.
|
|
292
|
+
|
|
293
|
+
Creates a dictionary mapping each ReviewerType to its corresponding
|
|
294
|
+
parser implementation. The GenericParser is used as a fallback for
|
|
295
|
+
both HUMAN and UNKNOWN reviewer types.
|
|
296
|
+
|
|
297
|
+
Returns:
|
|
298
|
+
Dictionary with all default parsers registered:
|
|
299
|
+
- CODERABBIT: CodeRabbitParser
|
|
300
|
+
- GREPTILE: GreptileParser
|
|
301
|
+
- CLAUDE: ClaudeCodeParser
|
|
302
|
+
- CURSOR: CursorBugbotParser
|
|
303
|
+
- HUMAN: GenericParser (fallback)
|
|
304
|
+
- UNKNOWN: GenericParser (fallback)
|
|
305
|
+
"""
|
|
306
|
+
return {
|
|
307
|
+
ReviewerType.CODERABBIT: CodeRabbitParser(),
|
|
308
|
+
ReviewerType.GREPTILE: GreptileParser(),
|
|
309
|
+
ReviewerType.CLAUDE: ClaudeCodeParser(),
|
|
310
|
+
ReviewerType.CURSOR: CursorBugbotParser(),
|
|
311
|
+
ReviewerType.HUMAN: GenericParser(),
|
|
312
|
+
ReviewerType.UNKNOWN: GenericParser(),
|
|
313
|
+
}
|
|
File without changes
|