xai-review 0.25.0__py3-none-any.whl → 0.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of xai-review might be problematic. Click here for more details.
- ai_review/clients/bitbucket/__init__.py +0 -0
- ai_review/clients/bitbucket/client.py +31 -0
- ai_review/clients/bitbucket/pr/__init__.py +0 -0
- ai_review/clients/bitbucket/pr/client.py +141 -0
- ai_review/clients/bitbucket/pr/schema/__init__.py +0 -0
- ai_review/clients/bitbucket/pr/schema/comments.py +49 -0
- ai_review/clients/bitbucket/pr/schema/files.py +30 -0
- ai_review/clients/bitbucket/pr/schema/pull_request.py +38 -0
- ai_review/clients/bitbucket/pr/types.py +44 -0
- ai_review/clients/bitbucket/tools.py +6 -0
- ai_review/clients/github/pr/client.py +66 -12
- ai_review/clients/github/pr/schema/comments.py +2 -1
- ai_review/clients/github/pr/schema/files.py +2 -1
- ai_review/clients/github/pr/schema/reviews.py +2 -1
- ai_review/clients/github/tools.py +6 -0
- ai_review/clients/gitlab/mr/client.py +35 -6
- ai_review/clients/gitlab/mr/schema/discussions.py +2 -1
- ai_review/clients/gitlab/mr/schema/notes.py +2 -1
- ai_review/clients/gitlab/tools.py +5 -0
- ai_review/libs/config/vcs/base.py +13 -1
- ai_review/libs/config/vcs/bitbucket.py +13 -0
- ai_review/libs/config/vcs/pagination.py +6 -0
- ai_review/libs/constants/vcs_provider.py +1 -0
- ai_review/libs/http/paginate.py +43 -0
- ai_review/services/vcs/bitbucket/__init__.py +0 -0
- ai_review/services/vcs/bitbucket/client.py +185 -0
- ai_review/services/vcs/factory.py +3 -0
- ai_review/tests/fixtures/clients/bitbucket.py +204 -0
- ai_review/tests/suites/clients/bitbucket/__init__.py +0 -0
- ai_review/tests/suites/clients/bitbucket/test_client.py +14 -0
- ai_review/tests/suites/clients/bitbucket/test_tools.py +31 -0
- ai_review/tests/suites/clients/github/test_tools.py +31 -0
- ai_review/tests/suites/clients/gitlab/test_tools.py +26 -0
- ai_review/tests/suites/libs/http/__init__.py +0 -0
- ai_review/tests/suites/libs/http/test_paginate.py +95 -0
- ai_review/tests/suites/services/vcs/bitbucket/__init__.py +0 -0
- ai_review/tests/suites/services/vcs/bitbucket/test_service.py +117 -0
- ai_review/tests/suites/services/vcs/test_factory.py +8 -1
- {xai_review-0.25.0.dist-info → xai_review-0.27.0.dist-info}/METADATA +4 -4
- {xai_review-0.25.0.dist-info → xai_review-0.27.0.dist-info}/RECORD +44 -17
- {xai_review-0.25.0.dist-info → xai_review-0.27.0.dist-info}/WHEEL +0 -0
- {xai_review-0.25.0.dist-info → xai_review-0.27.0.dist-info}/entry_points.txt +0 -0
- {xai_review-0.25.0.dist-info → xai_review-0.27.0.dist-info}/licenses/LICENSE +0 -0
- {xai_review-0.25.0.dist-info → xai_review-0.27.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
from typing import Awaitable, Callable, TypeVar
|
|
2
|
+
|
|
3
|
+
from httpx import Response
|
|
4
|
+
from pydantic import BaseModel
|
|
5
|
+
|
|
6
|
+
from ai_review.libs.logger import get_logger
|
|
7
|
+
|
|
8
|
+
T = TypeVar("T", bound=BaseModel)
|
|
9
|
+
|
|
10
|
+
logger = get_logger("PAGINATE")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
async def paginate(
|
|
14
|
+
fetch_page: Callable[[int], Awaitable[Response]],
|
|
15
|
+
extract_items: Callable[[Response], list[T]],
|
|
16
|
+
has_next_page: Callable[[Response], bool],
|
|
17
|
+
max_pages: int | None = None,
|
|
18
|
+
) -> list[T]:
|
|
19
|
+
page = 1
|
|
20
|
+
items: list[T] = []
|
|
21
|
+
|
|
22
|
+
while True:
|
|
23
|
+
response = await fetch_page(page)
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
extracted = extract_items(response)
|
|
27
|
+
except Exception as error:
|
|
28
|
+
logger.error(f"Failed to extract items on {page=}")
|
|
29
|
+
raise RuntimeError(f"Failed to extract items on {page=}") from error
|
|
30
|
+
|
|
31
|
+
logger.debug(f"Page {page}: extracted {len(extracted)} items (total={len(items) + len(extracted)})")
|
|
32
|
+
items.extend(extracted)
|
|
33
|
+
|
|
34
|
+
if not has_next_page(response):
|
|
35
|
+
logger.debug(f"Pagination finished after {page} page(s), total items={len(items)}")
|
|
36
|
+
break
|
|
37
|
+
|
|
38
|
+
page += 1
|
|
39
|
+
if max_pages and (page > max_pages):
|
|
40
|
+
logger.error(f"Pagination exceeded {max_pages=}")
|
|
41
|
+
raise RuntimeError(f"Pagination exceeded {max_pages=}")
|
|
42
|
+
|
|
43
|
+
return items
|
|
File without changes
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
from ai_review.clients.bitbucket.client import get_bitbucket_http_client
|
|
2
|
+
from ai_review.clients.bitbucket.pr.schema.comments import (
|
|
3
|
+
BitbucketCommentInlineSchema,
|
|
4
|
+
BitbucketCommentContentSchema,
|
|
5
|
+
BitbucketCreatePRCommentRequestSchema,
|
|
6
|
+
)
|
|
7
|
+
from ai_review.config import settings
|
|
8
|
+
from ai_review.libs.logger import get_logger
|
|
9
|
+
from ai_review.services.vcs.types import (
|
|
10
|
+
VCSClientProtocol,
|
|
11
|
+
UserSchema,
|
|
12
|
+
BranchRefSchema,
|
|
13
|
+
ReviewInfoSchema,
|
|
14
|
+
ReviewCommentSchema,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
logger = get_logger("BITBUCKET_VCS_CLIENT")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class BitbucketVCSClient(VCSClientProtocol):
|
|
21
|
+
def __init__(self):
|
|
22
|
+
self.http_client = get_bitbucket_http_client()
|
|
23
|
+
self.workspace = settings.vcs.pipeline.workspace
|
|
24
|
+
self.repo_slug = settings.vcs.pipeline.repo_slug
|
|
25
|
+
self.pull_request_id = settings.vcs.pipeline.pull_request_id
|
|
26
|
+
|
|
27
|
+
async def get_review_info(self) -> ReviewInfoSchema:
|
|
28
|
+
try:
|
|
29
|
+
pr = await self.http_client.pr.get_pull_request(
|
|
30
|
+
workspace=self.workspace,
|
|
31
|
+
repo_slug=self.repo_slug,
|
|
32
|
+
pull_request_id=self.pull_request_id,
|
|
33
|
+
)
|
|
34
|
+
files = await self.http_client.pr.get_files(
|
|
35
|
+
workspace=self.workspace,
|
|
36
|
+
repo_slug=self.repo_slug,
|
|
37
|
+
pull_request_id=self.pull_request_id,
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
logger.info(f"Fetched PR info for {self.workspace}/{self.repo_slug}#{self.pull_request_id}")
|
|
41
|
+
|
|
42
|
+
return ReviewInfoSchema(
|
|
43
|
+
id=pr.id,
|
|
44
|
+
title=pr.title,
|
|
45
|
+
description=pr.description or "",
|
|
46
|
+
author=UserSchema(
|
|
47
|
+
id=pr.author.uuid,
|
|
48
|
+
name=pr.author.display_name,
|
|
49
|
+
username=pr.author.nickname,
|
|
50
|
+
),
|
|
51
|
+
labels=[],
|
|
52
|
+
base_sha=pr.destination.commit.hash,
|
|
53
|
+
head_sha=pr.source.commit.hash,
|
|
54
|
+
assignees=[
|
|
55
|
+
UserSchema(
|
|
56
|
+
id=user.uuid,
|
|
57
|
+
name=user.display_name,
|
|
58
|
+
username=user.nickname,
|
|
59
|
+
)
|
|
60
|
+
for user in pr.participants
|
|
61
|
+
],
|
|
62
|
+
reviewers=[
|
|
63
|
+
UserSchema(
|
|
64
|
+
id=user.uuid,
|
|
65
|
+
name=user.display_name,
|
|
66
|
+
username=user.nickname,
|
|
67
|
+
)
|
|
68
|
+
for user in pr.reviewers
|
|
69
|
+
],
|
|
70
|
+
source_branch=BranchRefSchema(
|
|
71
|
+
ref=pr.source.branch.name,
|
|
72
|
+
sha=pr.source.commit.hash,
|
|
73
|
+
),
|
|
74
|
+
target_branch=BranchRefSchema(
|
|
75
|
+
ref=pr.destination.branch.name,
|
|
76
|
+
sha=pr.destination.commit.hash,
|
|
77
|
+
),
|
|
78
|
+
changed_files=[
|
|
79
|
+
file.new.path if file.new else file.old.path
|
|
80
|
+
for file in files.values
|
|
81
|
+
],
|
|
82
|
+
)
|
|
83
|
+
except Exception as error:
|
|
84
|
+
logger.exception(
|
|
85
|
+
f"Failed to fetch PR info {self.workspace}/{self.repo_slug}#{self.pull_request_id}: {error}"
|
|
86
|
+
)
|
|
87
|
+
return ReviewInfoSchema()
|
|
88
|
+
|
|
89
|
+
async def get_general_comments(self) -> list[ReviewCommentSchema]:
|
|
90
|
+
try:
|
|
91
|
+
response = await self.http_client.pr.get_comments(
|
|
92
|
+
workspace=self.workspace,
|
|
93
|
+
repo_slug=self.repo_slug,
|
|
94
|
+
pull_request_id=self.pull_request_id,
|
|
95
|
+
)
|
|
96
|
+
logger.info(f"Fetched general comments for {self.workspace}/{self.repo_slug}#{self.pull_request_id}")
|
|
97
|
+
|
|
98
|
+
return [
|
|
99
|
+
ReviewCommentSchema(id=comment.id, body=comment.content.raw)
|
|
100
|
+
for comment in response.values
|
|
101
|
+
if comment.inline is None
|
|
102
|
+
]
|
|
103
|
+
except Exception as error:
|
|
104
|
+
logger.exception(
|
|
105
|
+
f"Failed to fetch general comments for "
|
|
106
|
+
f"{self.workspace}/{self.repo_slug}#{self.pull_request_id}: {error}"
|
|
107
|
+
)
|
|
108
|
+
return []
|
|
109
|
+
|
|
110
|
+
async def get_inline_comments(self) -> list[ReviewCommentSchema]:
|
|
111
|
+
try:
|
|
112
|
+
response = await self.http_client.pr.get_comments(
|
|
113
|
+
workspace=self.workspace,
|
|
114
|
+
repo_slug=self.repo_slug,
|
|
115
|
+
pull_request_id=self.pull_request_id,
|
|
116
|
+
)
|
|
117
|
+
logger.info(f"Fetched inline comments for {self.workspace}/{self.repo_slug}#{self.pull_request_id}")
|
|
118
|
+
|
|
119
|
+
return [
|
|
120
|
+
ReviewCommentSchema(
|
|
121
|
+
id=comment.id,
|
|
122
|
+
body=comment.content.raw,
|
|
123
|
+
file=comment.inline.path,
|
|
124
|
+
line=comment.inline.to_line,
|
|
125
|
+
)
|
|
126
|
+
for comment in response.values
|
|
127
|
+
if comment.inline is not None
|
|
128
|
+
]
|
|
129
|
+
except Exception as error:
|
|
130
|
+
logger.exception(
|
|
131
|
+
f"Failed to fetch inline comments for "
|
|
132
|
+
f"{self.workspace}/{self.repo_slug}#{self.pull_request_id}: {error}"
|
|
133
|
+
)
|
|
134
|
+
return []
|
|
135
|
+
|
|
136
|
+
async def create_general_comment(self, message: str) -> None:
|
|
137
|
+
try:
|
|
138
|
+
logger.info(
|
|
139
|
+
f"Posting general comment to PR {self.workspace}/{self.repo_slug}#{self.pull_request_id}: {message}"
|
|
140
|
+
)
|
|
141
|
+
request = BitbucketCreatePRCommentRequestSchema(
|
|
142
|
+
content=BitbucketCommentContentSchema(raw=message)
|
|
143
|
+
)
|
|
144
|
+
await self.http_client.pr.create_comment(
|
|
145
|
+
workspace=self.workspace,
|
|
146
|
+
repo_slug=self.repo_slug,
|
|
147
|
+
pull_request_id=self.pull_request_id,
|
|
148
|
+
request=request,
|
|
149
|
+
)
|
|
150
|
+
logger.info(
|
|
151
|
+
f"Created general comment in PR {self.workspace}/{self.repo_slug}#{self.pull_request_id}"
|
|
152
|
+
)
|
|
153
|
+
except Exception as error:
|
|
154
|
+
logger.exception(
|
|
155
|
+
f"Failed to create general comment in PR "
|
|
156
|
+
f"{self.workspace}/{self.repo_slug}#{self.pull_request_id}: {error}"
|
|
157
|
+
)
|
|
158
|
+
raise
|
|
159
|
+
|
|
160
|
+
async def create_inline_comment(self, file: str, line: int, message: str) -> None:
|
|
161
|
+
try:
|
|
162
|
+
logger.info(
|
|
163
|
+
f"Posting inline comment in {self.workspace}/{self.repo_slug}#{self.pull_request_id} "
|
|
164
|
+
f"at {file}:{line}: {message}"
|
|
165
|
+
)
|
|
166
|
+
request = BitbucketCreatePRCommentRequestSchema(
|
|
167
|
+
content=BitbucketCommentContentSchema(raw=message),
|
|
168
|
+
inline=BitbucketCommentInlineSchema(path=file, to_line=line),
|
|
169
|
+
)
|
|
170
|
+
await self.http_client.pr.create_comment(
|
|
171
|
+
workspace=self.workspace,
|
|
172
|
+
repo_slug=self.repo_slug,
|
|
173
|
+
pull_request_id=self.pull_request_id,
|
|
174
|
+
request=request,
|
|
175
|
+
)
|
|
176
|
+
logger.info(
|
|
177
|
+
f"Created inline comment in {self.workspace}/{self.repo_slug}#{self.pull_request_id} "
|
|
178
|
+
f"at {file}:{line}"
|
|
179
|
+
)
|
|
180
|
+
except Exception as error:
|
|
181
|
+
logger.exception(
|
|
182
|
+
f"Failed to create inline comment in {self.workspace}/{self.repo_slug}#{self.pull_request_id} "
|
|
183
|
+
f"at {file}:{line}: {error}"
|
|
184
|
+
)
|
|
185
|
+
raise
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from ai_review.config import settings
|
|
2
2
|
from ai_review.libs.constants.vcs_provider import VCSProvider
|
|
3
|
+
from ai_review.services.vcs.bitbucket.client import BitbucketVCSClient
|
|
3
4
|
from ai_review.services.vcs.github.client import GitHubVCSClient
|
|
4
5
|
from ai_review.services.vcs.gitlab.client import GitLabVCSClient
|
|
5
6
|
from ai_review.services.vcs.types import VCSClientProtocol
|
|
@@ -11,5 +12,7 @@ def get_vcs_client() -> VCSClientProtocol:
|
|
|
11
12
|
return GitLabVCSClient()
|
|
12
13
|
case VCSProvider.GITHUB:
|
|
13
14
|
return GitHubVCSClient()
|
|
15
|
+
case VCSProvider.BITBUCKET:
|
|
16
|
+
return BitbucketVCSClient()
|
|
14
17
|
case _:
|
|
15
18
|
raise ValueError(f"Unsupported VCS provider: {settings.vcs.provider}")
|
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from pydantic import HttpUrl, SecretStr
|
|
3
|
+
|
|
4
|
+
from ai_review.clients.bitbucket.pr.schema.comments import (
|
|
5
|
+
BitbucketPRCommentSchema,
|
|
6
|
+
BitbucketCommentContentSchema,
|
|
7
|
+
BitbucketCommentInlineSchema,
|
|
8
|
+
BitbucketGetPRCommentsResponseSchema,
|
|
9
|
+
BitbucketCreatePRCommentRequestSchema,
|
|
10
|
+
BitbucketCreatePRCommentResponseSchema,
|
|
11
|
+
)
|
|
12
|
+
from ai_review.clients.bitbucket.pr.schema.files import (
|
|
13
|
+
BitbucketGetPRFilesResponseSchema,
|
|
14
|
+
BitbucketPRFileSchema,
|
|
15
|
+
BitbucketPRFilePathSchema,
|
|
16
|
+
)
|
|
17
|
+
from ai_review.clients.bitbucket.pr.schema.pull_request import (
|
|
18
|
+
BitbucketUserSchema,
|
|
19
|
+
BitbucketBranchSchema,
|
|
20
|
+
BitbucketCommitSchema,
|
|
21
|
+
BitbucketRepositorySchema,
|
|
22
|
+
BitbucketPRLocationSchema,
|
|
23
|
+
BitbucketGetPRResponseSchema,
|
|
24
|
+
)
|
|
25
|
+
from ai_review.clients.bitbucket.pr.types import BitbucketPullRequestsHTTPClientProtocol
|
|
26
|
+
from ai_review.config import settings
|
|
27
|
+
from ai_review.libs.config.vcs.base import BitbucketVCSConfig
|
|
28
|
+
from ai_review.libs.config.vcs.bitbucket import BitbucketPipelineConfig, BitbucketHTTPClientConfig
|
|
29
|
+
from ai_review.libs.constants.vcs_provider import VCSProvider
|
|
30
|
+
from ai_review.services.vcs.bitbucket.client import BitbucketVCSClient
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class FakeBitbucketPullRequestsHTTPClient(BitbucketPullRequestsHTTPClientProtocol):
|
|
34
|
+
def __init__(self):
|
|
35
|
+
self.calls: list[tuple[str, dict]] = []
|
|
36
|
+
|
|
37
|
+
async def get_pull_request(
|
|
38
|
+
self,
|
|
39
|
+
workspace: str,
|
|
40
|
+
repo_slug: str,
|
|
41
|
+
pull_request_id: str
|
|
42
|
+
) -> BitbucketGetPRResponseSchema:
|
|
43
|
+
self.calls.append(
|
|
44
|
+
(
|
|
45
|
+
"get_pull_request",
|
|
46
|
+
{"workspace": workspace, "repo_slug": repo_slug, "pull_request_id": pull_request_id}
|
|
47
|
+
)
|
|
48
|
+
)
|
|
49
|
+
return BitbucketGetPRResponseSchema(
|
|
50
|
+
id=1,
|
|
51
|
+
title="Fake Bitbucket PR",
|
|
52
|
+
description="This is a fake PR for testing",
|
|
53
|
+
state="OPEN",
|
|
54
|
+
author=BitbucketUserSchema(uuid="u1", display_name="Tester", nickname="tester"),
|
|
55
|
+
source=BitbucketPRLocationSchema(
|
|
56
|
+
commit=BitbucketCommitSchema(hash="def456"),
|
|
57
|
+
branch=BitbucketBranchSchema(name="feature/test"),
|
|
58
|
+
repository=BitbucketRepositorySchema(uuid="r1", full_name="workspace/repo"),
|
|
59
|
+
),
|
|
60
|
+
destination=BitbucketPRLocationSchema(
|
|
61
|
+
commit=BitbucketCommitSchema(hash="abc123"),
|
|
62
|
+
branch=BitbucketBranchSchema(name="main"),
|
|
63
|
+
repository=BitbucketRepositorySchema(uuid="r1", full_name="workspace/repo"),
|
|
64
|
+
),
|
|
65
|
+
reviewers=[BitbucketUserSchema(uuid="u2", display_name="Reviewer", nickname="reviewer")],
|
|
66
|
+
participants=[BitbucketUserSchema(uuid="u3", display_name="Participant", nickname="participant")],
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
async def get_files(
|
|
70
|
+
self,
|
|
71
|
+
workspace: str,
|
|
72
|
+
repo_slug: str,
|
|
73
|
+
pull_request_id: str
|
|
74
|
+
) -> BitbucketGetPRFilesResponseSchema:
|
|
75
|
+
self.calls.append(
|
|
76
|
+
(
|
|
77
|
+
"get_files",
|
|
78
|
+
{"workspace": workspace, "repo_slug": repo_slug, "pull_request_id": pull_request_id}
|
|
79
|
+
)
|
|
80
|
+
)
|
|
81
|
+
return BitbucketGetPRFilesResponseSchema(
|
|
82
|
+
size=2,
|
|
83
|
+
page=1,
|
|
84
|
+
pagelen=100,
|
|
85
|
+
next=None,
|
|
86
|
+
values=[
|
|
87
|
+
BitbucketPRFileSchema(
|
|
88
|
+
new=BitbucketPRFilePathSchema(path="app/main.py"),
|
|
89
|
+
old=None,
|
|
90
|
+
status="modified",
|
|
91
|
+
lines_added=10,
|
|
92
|
+
lines_removed=2,
|
|
93
|
+
),
|
|
94
|
+
BitbucketPRFileSchema(
|
|
95
|
+
new=BitbucketPRFilePathSchema(path="utils/helper.py"),
|
|
96
|
+
old=None,
|
|
97
|
+
status="added",
|
|
98
|
+
lines_added=5,
|
|
99
|
+
lines_removed=0,
|
|
100
|
+
),
|
|
101
|
+
],
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
async def get_comments(
|
|
105
|
+
self,
|
|
106
|
+
workspace: str,
|
|
107
|
+
repo_slug: str,
|
|
108
|
+
pull_request_id: str
|
|
109
|
+
) -> BitbucketGetPRCommentsResponseSchema:
|
|
110
|
+
self.calls.append(
|
|
111
|
+
(
|
|
112
|
+
"get_comments",
|
|
113
|
+
{"workspace": workspace, "repo_slug": repo_slug, "pull_request_id": pull_request_id}
|
|
114
|
+
)
|
|
115
|
+
)
|
|
116
|
+
return BitbucketGetPRCommentsResponseSchema(
|
|
117
|
+
size=2,
|
|
118
|
+
page=1,
|
|
119
|
+
next=None,
|
|
120
|
+
values=[
|
|
121
|
+
BitbucketPRCommentSchema(
|
|
122
|
+
id=1,
|
|
123
|
+
inline=None,
|
|
124
|
+
content=BitbucketCommentContentSchema(raw="General comment"),
|
|
125
|
+
),
|
|
126
|
+
BitbucketPRCommentSchema(
|
|
127
|
+
id=2,
|
|
128
|
+
inline=BitbucketCommentInlineSchema(path="file.py", to_line=5),
|
|
129
|
+
content=BitbucketCommentContentSchema(raw="Inline comment"),
|
|
130
|
+
),
|
|
131
|
+
],
|
|
132
|
+
pagelen=100,
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
async def create_comment(
|
|
136
|
+
self,
|
|
137
|
+
workspace: str,
|
|
138
|
+
repo_slug: str,
|
|
139
|
+
pull_request_id: str,
|
|
140
|
+
request: BitbucketCreatePRCommentRequestSchema
|
|
141
|
+
) -> BitbucketCreatePRCommentResponseSchema:
|
|
142
|
+
self.calls.append(
|
|
143
|
+
(
|
|
144
|
+
"create_comment",
|
|
145
|
+
{
|
|
146
|
+
"workspace": workspace,
|
|
147
|
+
"repo_slug": repo_slug,
|
|
148
|
+
"pull_request_id": pull_request_id,
|
|
149
|
+
**request.model_dump(by_alias=True)
|
|
150
|
+
}
|
|
151
|
+
)
|
|
152
|
+
)
|
|
153
|
+
return BitbucketCreatePRCommentResponseSchema(
|
|
154
|
+
id=10,
|
|
155
|
+
content=request.content,
|
|
156
|
+
inline=request.inline,
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
class FakeBitbucketHTTPClient:
|
|
161
|
+
def __init__(self, pull_requests_client: BitbucketPullRequestsHTTPClientProtocol):
|
|
162
|
+
self.pr = pull_requests_client
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
@pytest.fixture
|
|
166
|
+
def fake_bitbucket_pull_requests_http_client() -> FakeBitbucketPullRequestsHTTPClient:
|
|
167
|
+
return FakeBitbucketPullRequestsHTTPClient()
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
@pytest.fixture
|
|
171
|
+
def fake_bitbucket_http_client(
|
|
172
|
+
fake_bitbucket_pull_requests_http_client: FakeBitbucketPullRequestsHTTPClient
|
|
173
|
+
) -> FakeBitbucketHTTPClient:
|
|
174
|
+
return FakeBitbucketHTTPClient(pull_requests_client=fake_bitbucket_pull_requests_http_client)
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
@pytest.fixture
|
|
178
|
+
def bitbucket_vcs_client(
|
|
179
|
+
monkeypatch: pytest.MonkeyPatch,
|
|
180
|
+
fake_bitbucket_http_client: FakeBitbucketHTTPClient
|
|
181
|
+
) -> BitbucketVCSClient:
|
|
182
|
+
monkeypatch.setattr(
|
|
183
|
+
"ai_review.services.vcs.bitbucket.client.get_bitbucket_http_client",
|
|
184
|
+
lambda: fake_bitbucket_http_client,
|
|
185
|
+
)
|
|
186
|
+
return BitbucketVCSClient()
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
@pytest.fixture
|
|
190
|
+
def bitbucket_http_client_config(monkeypatch: pytest.MonkeyPatch):
|
|
191
|
+
fake_config = BitbucketVCSConfig(
|
|
192
|
+
provider=VCSProvider.BITBUCKET,
|
|
193
|
+
pipeline=BitbucketPipelineConfig(
|
|
194
|
+
workspace="workspace",
|
|
195
|
+
repo_slug="repo",
|
|
196
|
+
pull_request_id="123",
|
|
197
|
+
),
|
|
198
|
+
http_client=BitbucketHTTPClientConfig(
|
|
199
|
+
timeout=10,
|
|
200
|
+
api_url=HttpUrl("https://api.bitbucket.org/2.0"),
|
|
201
|
+
api_token=SecretStr("fake-token"),
|
|
202
|
+
)
|
|
203
|
+
)
|
|
204
|
+
monkeypatch.setattr(settings, "vcs", fake_config)
|
|
File without changes
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from httpx import AsyncClient
|
|
3
|
+
|
|
4
|
+
from ai_review.clients.bitbucket.client import get_bitbucket_http_client, BitbucketHTTPClient
|
|
5
|
+
from ai_review.clients.bitbucket.pr.client import BitbucketPullRequestsHTTPClient
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@pytest.mark.usefixtures("bitbucket_http_client_config")
|
|
9
|
+
def test_get_bitbucket_http_client_builds_ok():
|
|
10
|
+
bitbucket_http_client = get_bitbucket_http_client()
|
|
11
|
+
|
|
12
|
+
assert isinstance(bitbucket_http_client, BitbucketHTTPClient)
|
|
13
|
+
assert isinstance(bitbucket_http_client.pr, BitbucketPullRequestsHTTPClient)
|
|
14
|
+
assert isinstance(bitbucket_http_client.pr.client, AsyncClient)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from httpx import Response, Request
|
|
2
|
+
|
|
3
|
+
from ai_review.clients.bitbucket.tools import bitbucket_has_next_page
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def make_response(data: dict) -> Response:
|
|
7
|
+
return Response(
|
|
8
|
+
json=data,
|
|
9
|
+
request=Request("GET", "http://bitbucket.test"),
|
|
10
|
+
status_code=200,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def test_bitbucket_has_next_page_true():
|
|
15
|
+
resp = make_response({"next": "https://api.bitbucket.org/2.0/repositories/test/repo?page=2"})
|
|
16
|
+
assert bitbucket_has_next_page(resp) is True
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def test_bitbucket_has_next_page_false_none():
|
|
20
|
+
resp = make_response({"next": None})
|
|
21
|
+
assert bitbucket_has_next_page(resp) is False
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def test_bitbucket_has_next_page_false_missing():
|
|
25
|
+
resp = make_response({})
|
|
26
|
+
assert bitbucket_has_next_page(resp) is False
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def test_bitbucket_has_next_page_false_empty_string():
|
|
30
|
+
resp = make_response({"next": ""})
|
|
31
|
+
assert bitbucket_has_next_page(resp) is False
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from httpx import Response, Request
|
|
2
|
+
|
|
3
|
+
from ai_review.clients.github.tools import github_has_next_page
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def make_response(headers: dict) -> Response:
|
|
7
|
+
return Response(
|
|
8
|
+
request=Request("GET", "http://test"),
|
|
9
|
+
headers=headers,
|
|
10
|
+
status_code=200,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def test_github_has_next_page_true():
|
|
15
|
+
response = make_response({
|
|
16
|
+
"Link": '<https://api.github.com/resource?page=2>; rel="next", '
|
|
17
|
+
'<https://api.github.com/resource?page=5>; rel="last"'
|
|
18
|
+
})
|
|
19
|
+
assert github_has_next_page(response) is True
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def test_github_has_next_page_false_no_next():
|
|
23
|
+
response = make_response({
|
|
24
|
+
"Link": '<https://api.github.com/resource?page=5>; rel="last"'
|
|
25
|
+
})
|
|
26
|
+
assert github_has_next_page(response) is False
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def test_github_has_next_page_false_no_header():
|
|
30
|
+
resp = make_response({})
|
|
31
|
+
assert github_has_next_page(resp) is False
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from httpx import Response, Request
|
|
2
|
+
|
|
3
|
+
from ai_review.clients.gitlab.tools import gitlab_has_next_page
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def make_response(headers: dict) -> Response:
|
|
7
|
+
return Response(
|
|
8
|
+
request=Request("GET", "http://gitlab.test"),
|
|
9
|
+
headers=headers,
|
|
10
|
+
status_code=200,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def test_gitlab_has_next_page_true():
|
|
15
|
+
resp = make_response({"X-Next-Page": "2"})
|
|
16
|
+
assert gitlab_has_next_page(resp) is True
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def test_gitlab_has_next_page_false_empty():
|
|
20
|
+
resp = make_response({"X-Next-Page": ""})
|
|
21
|
+
assert gitlab_has_next_page(resp) is False
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def test_gitlab_has_next_page_false_missing():
|
|
25
|
+
resp = make_response({})
|
|
26
|
+
assert gitlab_has_next_page(resp) is False
|
|
File without changes
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from httpx import Response, Request
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
|
|
5
|
+
from ai_review.libs.http.paginate import paginate
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class DummySchema(BaseModel):
|
|
9
|
+
value: int
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def make_response(data: dict) -> Response:
|
|
13
|
+
return Response(
|
|
14
|
+
json=data,
|
|
15
|
+
request=Request("GET", "http://test"),
|
|
16
|
+
status_code=200,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@pytest.mark.asyncio
|
|
21
|
+
async def test_single_page():
|
|
22
|
+
async def fetch_page(_: int) -> Response:
|
|
23
|
+
return make_response({"items": [1, 2, 3]})
|
|
24
|
+
|
|
25
|
+
def extract_items(response: Response) -> list[DummySchema]:
|
|
26
|
+
return [DummySchema(value=value) for value in response.json()["items"]]
|
|
27
|
+
|
|
28
|
+
def has_next_page(_: Response) -> bool:
|
|
29
|
+
return False
|
|
30
|
+
|
|
31
|
+
items = await paginate(fetch_page, extract_items, has_next_page)
|
|
32
|
+
assert len(items) == 3
|
|
33
|
+
assert [item.value for item in items] == [1, 2, 3]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@pytest.mark.asyncio
|
|
37
|
+
async def test_multiple_pages():
|
|
38
|
+
async def fetch_page(page: int) -> Response:
|
|
39
|
+
return make_response({"items": [page]})
|
|
40
|
+
|
|
41
|
+
def extract_items(response: Response):
|
|
42
|
+
return [DummySchema(value=value) for value in response.json()["items"]]
|
|
43
|
+
|
|
44
|
+
def has_next_page(response: Response) -> bool:
|
|
45
|
+
return response.json()["items"][0] < 3
|
|
46
|
+
|
|
47
|
+
items = await paginate(fetch_page, extract_items, has_next_page)
|
|
48
|
+
assert [item.value for item in items] == [1, 2, 3]
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@pytest.mark.asyncio
|
|
52
|
+
async def test_extract_items_error():
|
|
53
|
+
async def fetch_page(_: int) -> Response:
|
|
54
|
+
return make_response({"items": [1]})
|
|
55
|
+
|
|
56
|
+
def extract_items(_: Response):
|
|
57
|
+
raise ValueError("bad json")
|
|
58
|
+
|
|
59
|
+
def has_next_page(_: Response) -> bool:
|
|
60
|
+
return False
|
|
61
|
+
|
|
62
|
+
with pytest.raises(RuntimeError) as exc:
|
|
63
|
+
await paginate(fetch_page, extract_items, has_next_page)
|
|
64
|
+
assert "Failed to extract items" in str(exc.value)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@pytest.mark.asyncio
|
|
68
|
+
async def test_max_pages_exceeded():
|
|
69
|
+
async def fetch_page(page: int) -> Response:
|
|
70
|
+
return make_response({"items": [page]})
|
|
71
|
+
|
|
72
|
+
def extract_items(response: Response):
|
|
73
|
+
return [DummySchema(value=value) for value in response.json()["items"]]
|
|
74
|
+
|
|
75
|
+
def has_next_page(_: Response) -> bool:
|
|
76
|
+
return True
|
|
77
|
+
|
|
78
|
+
with pytest.raises(RuntimeError) as exc:
|
|
79
|
+
await paginate(fetch_page, extract_items, has_next_page, max_pages=2)
|
|
80
|
+
assert "Pagination exceeded" in str(exc.value)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
@pytest.mark.asyncio
|
|
84
|
+
async def test_empty_items():
|
|
85
|
+
async def fetch_page(_: int) -> Response:
|
|
86
|
+
return make_response({"items": []})
|
|
87
|
+
|
|
88
|
+
def extract_items(_: Response):
|
|
89
|
+
return []
|
|
90
|
+
|
|
91
|
+
def has_next_page(_: Response) -> bool:
|
|
92
|
+
return False
|
|
93
|
+
|
|
94
|
+
result = await paginate(fetch_page, extract_items, has_next_page)
|
|
95
|
+
assert result == []
|
|
File without changes
|