xai-review 0.24.0__py3-none-any.whl → 0.26.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of xai-review might be problematic. Click here for more details.
- ai_review/clients/bitbucket/__init__.py +0 -0
- ai_review/clients/bitbucket/client.py +31 -0
- ai_review/clients/bitbucket/pr/__init__.py +0 -0
- ai_review/clients/bitbucket/pr/client.py +104 -0
- ai_review/clients/bitbucket/pr/schema/__init__.py +0 -0
- ai_review/clients/bitbucket/pr/schema/comments.py +44 -0
- ai_review/clients/bitbucket/pr/schema/files.py +25 -0
- ai_review/clients/bitbucket/pr/schema/pull_request.py +38 -0
- ai_review/clients/bitbucket/pr/types.py +44 -0
- ai_review/clients/claude/client.py +2 -1
- ai_review/clients/claude/types.py +8 -0
- ai_review/clients/gemini/client.py +2 -1
- ai_review/clients/gemini/types.py +8 -0
- ai_review/clients/ollama/__init__.py +0 -0
- ai_review/clients/ollama/client.py +41 -0
- ai_review/clients/ollama/schema.py +47 -0
- ai_review/clients/ollama/types.py +8 -0
- ai_review/clients/openai/client.py +2 -1
- ai_review/clients/openai/types.py +8 -0
- ai_review/libs/config/http.py +4 -1
- ai_review/libs/config/llm/base.py +8 -1
- ai_review/libs/config/llm/claude.py +4 -7
- ai_review/libs/config/llm/gemini.py +4 -7
- ai_review/libs/config/llm/meta.py +7 -0
- ai_review/libs/config/llm/ollama.py +14 -0
- ai_review/libs/config/llm/openai.py +4 -7
- ai_review/libs/config/vcs/base.py +11 -1
- ai_review/libs/config/vcs/bitbucket.py +13 -0
- ai_review/libs/config/vcs/github.py +2 -2
- ai_review/libs/config/vcs/gitlab.py +2 -2
- ai_review/libs/constants/llm_provider.py +1 -0
- ai_review/libs/constants/vcs_provider.py +1 -0
- ai_review/services/llm/factory.py +3 -0
- ai_review/services/llm/ollama/__init__.py +0 -0
- ai_review/services/llm/ollama/client.py +34 -0
- ai_review/services/vcs/bitbucket/__init__.py +0 -0
- ai_review/services/vcs/bitbucket/client.py +185 -0
- ai_review/services/vcs/factory.py +3 -0
- ai_review/tests/fixtures/clients/bitbucket.py +204 -0
- ai_review/tests/fixtures/clients/claude.py +45 -0
- ai_review/tests/fixtures/clients/gemini.py +52 -0
- ai_review/tests/fixtures/clients/ollama.py +65 -0
- ai_review/tests/fixtures/clients/openai.py +48 -0
- ai_review/tests/suites/clients/ollama/__init__.py +0 -0
- ai_review/tests/suites/clients/ollama/test_client.py +12 -0
- ai_review/tests/suites/clients/ollama/test_schema.py +65 -0
- ai_review/tests/suites/services/llm/claude/__init__.py +0 -0
- ai_review/tests/suites/services/llm/claude/test_client.py +22 -0
- ai_review/tests/suites/services/llm/gemini/__init__.py +0 -0
- ai_review/tests/suites/services/llm/gemini/test_client.py +22 -0
- ai_review/tests/suites/services/llm/ollama/__init__.py +0 -0
- ai_review/tests/suites/services/llm/ollama/test_client.py +22 -0
- ai_review/tests/suites/services/llm/openai/__init__.py +0 -0
- ai_review/tests/suites/services/llm/openai/test_client.py +22 -0
- ai_review/tests/suites/services/llm/test_factory.py +8 -1
- ai_review/tests/suites/services/vcs/bitbucket/__init__.py +0 -0
- ai_review/tests/suites/services/vcs/bitbucket/test_service.py +117 -0
- ai_review/tests/suites/services/vcs/test_factory.py +8 -1
- {xai_review-0.24.0.dist-info → xai_review-0.26.0.dist-info}/METADATA +10 -6
- {xai_review-0.24.0.dist-info → xai_review-0.26.0.dist-info}/RECORD +64 -26
- {xai_review-0.24.0.dist-info → xai_review-0.26.0.dist-info}/WHEEL +0 -0
- {xai_review-0.24.0.dist-info → xai_review-0.26.0.dist-info}/entry_points.txt +0 -0
- {xai_review-0.24.0.dist-info → xai_review-0.26.0.dist-info}/licenses/LICENSE +0 -0
- {xai_review-0.24.0.dist-info → xai_review-0.26.0.dist-info}/top_level.txt +0 -0
|
@@ -1,10 +1,58 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
1
3
|
import pytest
|
|
2
4
|
from pydantic import HttpUrl, SecretStr
|
|
3
5
|
|
|
6
|
+
from ai_review.clients.openai.schema import (
|
|
7
|
+
OpenAIUsageSchema,
|
|
8
|
+
OpenAIChoiceSchema,
|
|
9
|
+
OpenAIMessageSchema,
|
|
10
|
+
OpenAIChatRequestSchema,
|
|
11
|
+
OpenAIChatResponseSchema,
|
|
12
|
+
)
|
|
13
|
+
from ai_review.clients.openai.types import OpenAIHTTPClientProtocol
|
|
4
14
|
from ai_review.config import settings
|
|
5
15
|
from ai_review.libs.config.llm.base import OpenAILLMConfig
|
|
6
16
|
from ai_review.libs.config.llm.openai import OpenAIMetaConfig, OpenAIHTTPClientConfig
|
|
7
17
|
from ai_review.libs.constants.llm_provider import LLMProvider
|
|
18
|
+
from ai_review.services.llm.openai.client import OpenAILLMClient
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class FakeOpenAIHTTPClient(OpenAIHTTPClientProtocol):
|
|
22
|
+
def __init__(self, responses: dict[str, Any] | None = None) -> None:
|
|
23
|
+
self.calls: list[tuple[str, dict]] = []
|
|
24
|
+
self.responses = responses or {}
|
|
25
|
+
|
|
26
|
+
async def chat(self, request: OpenAIChatRequestSchema) -> OpenAIChatResponseSchema:
|
|
27
|
+
self.calls.append(("chat", {"request": request}))
|
|
28
|
+
return self.responses.get(
|
|
29
|
+
"chat",
|
|
30
|
+
OpenAIChatResponseSchema(
|
|
31
|
+
usage=OpenAIUsageSchema(total_tokens=12, prompt_tokens=5, completion_tokens=7),
|
|
32
|
+
choices=[
|
|
33
|
+
OpenAIChoiceSchema(
|
|
34
|
+
message=OpenAIMessageSchema(role="assistant", content="FAKE_OPENAI_RESPONSE")
|
|
35
|
+
)
|
|
36
|
+
],
|
|
37
|
+
),
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@pytest.fixture
|
|
42
|
+
def fake_openai_http_client():
|
|
43
|
+
return FakeOpenAIHTTPClient()
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@pytest.fixture
|
|
47
|
+
def openai_llm_client(
|
|
48
|
+
monkeypatch: pytest.MonkeyPatch,
|
|
49
|
+
fake_openai_http_client: FakeOpenAIHTTPClient
|
|
50
|
+
) -> OpenAILLMClient:
|
|
51
|
+
monkeypatch.setattr(
|
|
52
|
+
"ai_review.services.llm.openai.client.get_openai_http_client",
|
|
53
|
+
lambda: fake_openai_http_client,
|
|
54
|
+
)
|
|
55
|
+
return OpenAILLMClient()
|
|
8
56
|
|
|
9
57
|
|
|
10
58
|
@pytest.fixture
|
|
File without changes
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from httpx import AsyncClient
|
|
3
|
+
|
|
4
|
+
from ai_review.clients.ollama.client import get_ollama_http_client, OllamaHTTPClient
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@pytest.mark.usefixtures('ollama_http_client_config')
|
|
8
|
+
def test_get_ollama_http_client_builds_ok():
|
|
9
|
+
ollama_http_client = get_ollama_http_client()
|
|
10
|
+
|
|
11
|
+
assert isinstance(ollama_http_client, OllamaHTTPClient)
|
|
12
|
+
assert isinstance(ollama_http_client.client, AsyncClient)
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
from ai_review.clients.ollama.schema import (
|
|
2
|
+
OllamaMessageSchema,
|
|
3
|
+
OllamaOptionsSchema,
|
|
4
|
+
OllamaChatRequestSchema,
|
|
5
|
+
OllamaUsageSchema,
|
|
6
|
+
OllamaChatResponseSchema,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
# ---------- OllamaUsageSchema ----------
|
|
11
|
+
|
|
12
|
+
def test_usage_total_tokens_sum_ok():
|
|
13
|
+
usage = OllamaUsageSchema(prompt_tokens=5, completion_tokens=7)
|
|
14
|
+
assert usage.total_tokens == 12
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def test_usage_total_tokens_none_if_missing():
|
|
18
|
+
usage = OllamaUsageSchema(prompt_tokens=3)
|
|
19
|
+
assert usage.total_tokens is None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# ---------- OllamaChatResponseSchema ----------
|
|
23
|
+
|
|
24
|
+
def test_first_text_returns_text():
|
|
25
|
+
resp = OllamaChatResponseSchema(
|
|
26
|
+
model="llama2",
|
|
27
|
+
message=OllamaMessageSchema(role="assistant", content=" hello ollama "),
|
|
28
|
+
usage=OllamaUsageSchema(prompt_tokens=2, completion_tokens=3),
|
|
29
|
+
)
|
|
30
|
+
assert resp.first_text == "hello ollama"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def test_first_text_empty_if_content_empty():
|
|
34
|
+
resp = OllamaChatResponseSchema(
|
|
35
|
+
model="llama2",
|
|
36
|
+
message=OllamaMessageSchema(role="assistant", content=" "),
|
|
37
|
+
usage=OllamaUsageSchema(prompt_tokens=1, completion_tokens=1),
|
|
38
|
+
)
|
|
39
|
+
assert resp.first_text == ""
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
# ---------- OllamaChatRequestSchema ----------
|
|
43
|
+
|
|
44
|
+
def test_chat_request_schema_builds_ok():
|
|
45
|
+
msg = OllamaMessageSchema(role="user", content="hi ollama")
|
|
46
|
+
opts = OllamaOptionsSchema(
|
|
47
|
+
stop=["stop1", "stop2"],
|
|
48
|
+
seed=123,
|
|
49
|
+
top_p=0.9,
|
|
50
|
+
temperature=0.7,
|
|
51
|
+
num_predict=256,
|
|
52
|
+
repeat_penalty=1.1,
|
|
53
|
+
)
|
|
54
|
+
req = OllamaChatRequestSchema(
|
|
55
|
+
model="llama2",
|
|
56
|
+
stream=False,
|
|
57
|
+
options=opts,
|
|
58
|
+
messages=[msg],
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
assert req.model == "llama2"
|
|
62
|
+
assert req.options.temperature == 0.7
|
|
63
|
+
assert req.options.num_predict == 256
|
|
64
|
+
assert req.options.stop == ["stop1", "stop2"]
|
|
65
|
+
assert req.messages[0].content == "hi ollama"
|
|
File without changes
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
|
|
3
|
+
from ai_review.services.llm.claude.client import ClaudeLLMClient
|
|
4
|
+
from ai_review.services.llm.types import ChatResultSchema
|
|
5
|
+
from ai_review.tests.fixtures.clients.claude import FakeClaudeHTTPClient
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@pytest.mark.asyncio
|
|
9
|
+
@pytest.mark.usefixtures("claude_http_client_config")
|
|
10
|
+
async def test_claude_llm_chat(
|
|
11
|
+
claude_llm_client: ClaudeLLMClient,
|
|
12
|
+
fake_claude_http_client: FakeClaudeHTTPClient
|
|
13
|
+
):
|
|
14
|
+
result = await claude_llm_client.chat("prompt", "prompt_system")
|
|
15
|
+
|
|
16
|
+
assert isinstance(result, ChatResultSchema)
|
|
17
|
+
assert result.text == "FAKE_CLAUDE_RESPONSE"
|
|
18
|
+
assert result.total_tokens == 12
|
|
19
|
+
assert result.prompt_tokens == 5
|
|
20
|
+
assert result.completion_tokens == 7
|
|
21
|
+
|
|
22
|
+
assert fake_claude_http_client.calls[0][0] == "chat"
|
|
File without changes
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
|
|
3
|
+
from ai_review.services.llm.gemini.client import GeminiLLMClient
|
|
4
|
+
from ai_review.services.llm.types import ChatResultSchema
|
|
5
|
+
from ai_review.tests.fixtures.clients.gemini import FakeGeminiHTTPClient
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@pytest.mark.asyncio
|
|
9
|
+
@pytest.mark.usefixtures("gemini_http_client_config")
|
|
10
|
+
async def test_gemini_llm_chat(
|
|
11
|
+
gemini_llm_client: GeminiLLMClient,
|
|
12
|
+
fake_gemini_http_client: FakeGeminiHTTPClient
|
|
13
|
+
):
|
|
14
|
+
result = await gemini_llm_client.chat("prompt", "prompt_system")
|
|
15
|
+
|
|
16
|
+
assert isinstance(result, ChatResultSchema)
|
|
17
|
+
assert result.text == "FAKE_GEMINI_RESPONSE"
|
|
18
|
+
assert result.total_tokens == 10
|
|
19
|
+
assert result.prompt_tokens == 2
|
|
20
|
+
assert result.completion_tokens is None
|
|
21
|
+
|
|
22
|
+
assert fake_gemini_http_client.calls[0][0] == "chat"
|
|
File without changes
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
|
|
3
|
+
from ai_review.services.llm.ollama.client import OllamaLLMClient
|
|
4
|
+
from ai_review.services.llm.types import ChatResultSchema
|
|
5
|
+
from ai_review.tests.fixtures.clients.ollama import FakeOllamaHTTPClient
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@pytest.mark.asyncio
|
|
9
|
+
@pytest.mark.usefixtures("ollama_http_client_config")
|
|
10
|
+
async def test_ollama_llm_chat(
|
|
11
|
+
ollama_llm_client: OllamaLLMClient,
|
|
12
|
+
fake_ollama_http_client: FakeOllamaHTTPClient
|
|
13
|
+
):
|
|
14
|
+
result = await ollama_llm_client.chat("prompt", "prompt_system")
|
|
15
|
+
|
|
16
|
+
assert isinstance(result, ChatResultSchema)
|
|
17
|
+
assert result.text == "FAKE_OLLAMA_RESPONSE"
|
|
18
|
+
assert result.total_tokens == 8
|
|
19
|
+
assert result.prompt_tokens == 3
|
|
20
|
+
assert result.completion_tokens == 5
|
|
21
|
+
|
|
22
|
+
assert fake_ollama_http_client.calls[0][0] == "chat"
|
|
File without changes
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
|
|
3
|
+
from ai_review.services.llm.openai.client import OpenAILLMClient
|
|
4
|
+
from ai_review.services.llm.types import ChatResultSchema
|
|
5
|
+
from ai_review.tests.fixtures.clients.openai import FakeOpenAIHTTPClient
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@pytest.mark.asyncio
|
|
9
|
+
@pytest.mark.usefixtures("openai_http_client_config")
|
|
10
|
+
async def test_openai_llm_chat(
|
|
11
|
+
openai_llm_client: OpenAILLMClient,
|
|
12
|
+
fake_openai_http_client: FakeOpenAIHTTPClient
|
|
13
|
+
):
|
|
14
|
+
result = await openai_llm_client.chat("prompt", "prompt_system")
|
|
15
|
+
|
|
16
|
+
assert isinstance(result, ChatResultSchema)
|
|
17
|
+
assert result.text == "FAKE_OPENAI_RESPONSE"
|
|
18
|
+
assert result.total_tokens == 12
|
|
19
|
+
assert result.prompt_tokens == 5
|
|
20
|
+
assert result.completion_tokens == 7
|
|
21
|
+
|
|
22
|
+
assert fake_openai_http_client.calls[0][0] == "chat"
|
|
@@ -3,6 +3,7 @@ import pytest
|
|
|
3
3
|
from ai_review.services.llm.claude.client import ClaudeLLMClient
|
|
4
4
|
from ai_review.services.llm.factory import get_llm_client
|
|
5
5
|
from ai_review.services.llm.gemini.client import GeminiLLMClient
|
|
6
|
+
from ai_review.services.llm.ollama.client import OllamaLLMClient
|
|
6
7
|
from ai_review.services.llm.openai.client import OpenAILLMClient
|
|
7
8
|
|
|
8
9
|
|
|
@@ -24,7 +25,13 @@ def test_get_llm_client_returns_claude(monkeypatch: pytest.MonkeyPatch):
|
|
|
24
25
|
assert isinstance(client, ClaudeLLMClient)
|
|
25
26
|
|
|
26
27
|
|
|
28
|
+
@pytest.mark.usefixtures("ollama_http_client_config")
|
|
29
|
+
def test_get_llm_client_returns_ollama(monkeypatch: pytest.MonkeyPatch):
|
|
30
|
+
client = get_llm_client()
|
|
31
|
+
assert isinstance(client, OllamaLLMClient)
|
|
32
|
+
|
|
33
|
+
|
|
27
34
|
def test_get_llm_client_unsupported_provider(monkeypatch: pytest.MonkeyPatch):
|
|
28
|
-
monkeypatch.setattr("ai_review.services.llm.factory.settings.llm.provider", "
|
|
35
|
+
monkeypatch.setattr("ai_review.services.llm.factory.settings.llm.provider", "UNSUPPORTED")
|
|
29
36
|
with pytest.raises(ValueError):
|
|
30
37
|
get_llm_client()
|
|
File without changes
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
|
|
3
|
+
from ai_review.services.vcs.bitbucket.client import BitbucketVCSClient
|
|
4
|
+
from ai_review.services.vcs.types import ReviewInfoSchema, ReviewCommentSchema
|
|
5
|
+
from ai_review.tests.fixtures.clients.bitbucket import FakeBitbucketPullRequestsHTTPClient
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@pytest.mark.asyncio
|
|
9
|
+
@pytest.mark.usefixtures("bitbucket_http_client_config")
|
|
10
|
+
async def test_get_review_info_returns_valid_schema(
|
|
11
|
+
bitbucket_vcs_client: BitbucketVCSClient,
|
|
12
|
+
fake_bitbucket_pull_requests_http_client: FakeBitbucketPullRequestsHTTPClient,
|
|
13
|
+
):
|
|
14
|
+
"""Should return detailed PR info with branches, author, reviewers, and files."""
|
|
15
|
+
info = await bitbucket_vcs_client.get_review_info()
|
|
16
|
+
|
|
17
|
+
assert isinstance(info, ReviewInfoSchema)
|
|
18
|
+
assert info.id == 1
|
|
19
|
+
assert info.title == "Fake Bitbucket PR"
|
|
20
|
+
assert info.description == "This is a fake PR for testing"
|
|
21
|
+
|
|
22
|
+
assert info.author.username == "tester"
|
|
23
|
+
assert {r.username for r in info.reviewers} == {"reviewer"}
|
|
24
|
+
|
|
25
|
+
assert info.source_branch.ref == "feature/test"
|
|
26
|
+
assert info.target_branch.ref == "main"
|
|
27
|
+
assert info.base_sha == "abc123"
|
|
28
|
+
assert info.head_sha == "def456"
|
|
29
|
+
|
|
30
|
+
assert "app/main.py" in info.changed_files
|
|
31
|
+
assert len(info.changed_files) == 2
|
|
32
|
+
|
|
33
|
+
called_methods = [name for name, _ in fake_bitbucket_pull_requests_http_client.calls]
|
|
34
|
+
assert called_methods == ["get_pull_request", "get_files"]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@pytest.mark.asyncio
|
|
38
|
+
@pytest.mark.usefixtures("bitbucket_http_client_config")
|
|
39
|
+
async def test_get_general_comments_filters_inline(
|
|
40
|
+
bitbucket_vcs_client: BitbucketVCSClient,
|
|
41
|
+
fake_bitbucket_pull_requests_http_client: FakeBitbucketPullRequestsHTTPClient,
|
|
42
|
+
):
|
|
43
|
+
"""Should return only general comments (without inline info)."""
|
|
44
|
+
comments = await bitbucket_vcs_client.get_general_comments()
|
|
45
|
+
|
|
46
|
+
assert all(isinstance(c, ReviewCommentSchema) for c in comments)
|
|
47
|
+
assert len(comments) == 1
|
|
48
|
+
|
|
49
|
+
first = comments[0]
|
|
50
|
+
assert first.body == "General comment"
|
|
51
|
+
assert first.file is None
|
|
52
|
+
assert first.line is None
|
|
53
|
+
|
|
54
|
+
called_methods = [name for name, _ in fake_bitbucket_pull_requests_http_client.calls]
|
|
55
|
+
assert called_methods == ["get_comments"]
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@pytest.mark.asyncio
|
|
59
|
+
@pytest.mark.usefixtures("bitbucket_http_client_config")
|
|
60
|
+
async def test_get_inline_comments_filters_general(
|
|
61
|
+
bitbucket_vcs_client: BitbucketVCSClient,
|
|
62
|
+
fake_bitbucket_pull_requests_http_client: FakeBitbucketPullRequestsHTTPClient,
|
|
63
|
+
):
|
|
64
|
+
"""Should return only inline comments with file and line references."""
|
|
65
|
+
comments = await bitbucket_vcs_client.get_inline_comments()
|
|
66
|
+
|
|
67
|
+
assert all(isinstance(c, ReviewCommentSchema) for c in comments)
|
|
68
|
+
assert len(comments) == 1
|
|
69
|
+
|
|
70
|
+
first = comments[0]
|
|
71
|
+
assert first.body == "Inline comment"
|
|
72
|
+
assert first.file == "file.py"
|
|
73
|
+
assert first.line == 5
|
|
74
|
+
|
|
75
|
+
called_methods = [name for name, _ in fake_bitbucket_pull_requests_http_client.calls]
|
|
76
|
+
assert called_methods == ["get_comments"]
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@pytest.mark.asyncio
|
|
80
|
+
@pytest.mark.usefixtures("bitbucket_http_client_config")
|
|
81
|
+
async def test_create_general_comment_posts_comment(
|
|
82
|
+
bitbucket_vcs_client: BitbucketVCSClient,
|
|
83
|
+
fake_bitbucket_pull_requests_http_client: FakeBitbucketPullRequestsHTTPClient,
|
|
84
|
+
):
|
|
85
|
+
"""Should post a general (non-inline) comment."""
|
|
86
|
+
message = "Hello from Bitbucket test!"
|
|
87
|
+
|
|
88
|
+
await bitbucket_vcs_client.create_general_comment(message)
|
|
89
|
+
|
|
90
|
+
calls = [args for name, args in fake_bitbucket_pull_requests_http_client.calls if name == "create_comment"]
|
|
91
|
+
assert len(calls) == 1
|
|
92
|
+
call_args = calls[0]
|
|
93
|
+
assert call_args["content"]["raw"] == message
|
|
94
|
+
assert call_args["workspace"] == "workspace"
|
|
95
|
+
assert call_args["repo_slug"] == "repo"
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@pytest.mark.asyncio
|
|
99
|
+
@pytest.mark.usefixtures("bitbucket_http_client_config")
|
|
100
|
+
async def test_create_inline_comment_posts_comment(
|
|
101
|
+
bitbucket_vcs_client: BitbucketVCSClient,
|
|
102
|
+
fake_bitbucket_pull_requests_http_client: FakeBitbucketPullRequestsHTTPClient,
|
|
103
|
+
):
|
|
104
|
+
"""Should post an inline comment with correct file and line."""
|
|
105
|
+
file = "file.py"
|
|
106
|
+
line = 10
|
|
107
|
+
message = "Looks good"
|
|
108
|
+
|
|
109
|
+
await bitbucket_vcs_client.create_inline_comment(file, line, message)
|
|
110
|
+
|
|
111
|
+
calls = [args for name, args in fake_bitbucket_pull_requests_http_client.calls if name == "create_comment"]
|
|
112
|
+
assert len(calls) == 1
|
|
113
|
+
|
|
114
|
+
call_args = calls[0]
|
|
115
|
+
assert call_args["content"]["raw"] == message
|
|
116
|
+
assert call_args["inline"]["path"] == file
|
|
117
|
+
assert call_args["inline"]["to"] == line
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import pytest
|
|
2
2
|
|
|
3
|
+
from ai_review.services.vcs.bitbucket.client import BitbucketVCSClient
|
|
3
4
|
from ai_review.services.vcs.factory import get_vcs_client
|
|
4
5
|
from ai_review.services.vcs.github.client import GitHubVCSClient
|
|
5
6
|
from ai_review.services.vcs.gitlab.client import GitLabVCSClient
|
|
@@ -17,7 +18,13 @@ def test_get_vcs_client_returns_gitlab(monkeypatch: pytest.MonkeyPatch):
|
|
|
17
18
|
assert isinstance(client, GitLabVCSClient)
|
|
18
19
|
|
|
19
20
|
|
|
21
|
+
@pytest.mark.usefixtures("bitbucket_http_client_config")
|
|
22
|
+
def test_get_vcs_client_returns_bitbucket(monkeypatch: pytest.MonkeyPatch):
|
|
23
|
+
client = get_vcs_client()
|
|
24
|
+
assert isinstance(client, BitbucketVCSClient)
|
|
25
|
+
|
|
26
|
+
|
|
20
27
|
def test_get_vcs_client_unsupported_provider(monkeypatch: pytest.MonkeyPatch):
|
|
21
|
-
monkeypatch.setattr("ai_review.services.vcs.factory.settings.vcs.provider", "
|
|
28
|
+
monkeypatch.setattr("ai_review.services.vcs.factory.settings.vcs.provider", "UNSUPPORTED")
|
|
22
29
|
with pytest.raises(ValueError):
|
|
23
30
|
get_vcs_client()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: xai-review
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.26.0
|
|
4
4
|
Summary: AI-powered code review tool
|
|
5
5
|
Author-email: Nikita Filonov <nikita.filonov@example.com>
|
|
6
6
|
Maintainer-email: Nikita Filonov <nikita.filonov@example.com>
|
|
@@ -66,8 +66,8 @@ improve code quality, enforce consistency, and speed up the review process.
|
|
|
66
66
|
|
|
67
67
|
✨ Key features:
|
|
68
68
|
|
|
69
|
-
- **Multiple LLM providers** — choose between **OpenAI**, **Claude**,
|
|
70
|
-
- **VCS integration** — works out of the box with GitLab
|
|
69
|
+
- **Multiple LLM providers** — choose between **OpenAI**, **Claude**, **Gemini**, or **Ollama**, and switch anytime.
|
|
70
|
+
- **VCS integration** — works out of the box with **GitLab**, **GitHub**, and **Bitbucket**.
|
|
71
71
|
- **Customizable prompts** — adapt inline, context, and summary reviews to match your team’s coding guidelines.
|
|
72
72
|
- **Flexible configuration** — supports `YAML`, `JSON`, and `ENV`, with seamless overrides in CI/CD pipelines.
|
|
73
73
|
- **AI Review runs fully client-side** — it never proxies or inspects your requests.
|
|
@@ -168,9 +168,9 @@ for complete, ready-to-use examples.
|
|
|
168
168
|
|
|
169
169
|
Key things you can customize:
|
|
170
170
|
|
|
171
|
-
- **LLM provider** — OpenAI, Gemini, or
|
|
171
|
+
- **LLM provider** — OpenAI, Gemini, Claude, or Ollama
|
|
172
172
|
- **Model settings** — model name, temperature, max tokens
|
|
173
|
-
- **VCS integration** — works out of the box with **GitLab** and **
|
|
173
|
+
- **VCS integration** — works out of the box with **GitLab**, **GitHub**, and **Bitbucket**
|
|
174
174
|
- **Review policy** — which files to include/exclude, review modes
|
|
175
175
|
- **Prompts** — inline/context/summary prompt templates
|
|
176
176
|
|
|
@@ -209,7 +209,7 @@ jobs:
|
|
|
209
209
|
runs-on: ubuntu-latest
|
|
210
210
|
steps:
|
|
211
211
|
- uses: actions/checkout@v4
|
|
212
|
-
- uses: Nikita-Filonov/ai-review@v0.
|
|
212
|
+
- uses: Nikita-Filonov/ai-review@v0.26.0
|
|
213
213
|
with:
|
|
214
214
|
review-command: ${{ inputs.review-command }}
|
|
215
215
|
env:
|
|
@@ -288,6 +288,10 @@ provider** explicitly configured in your `.ai-review.yaml`.
|
|
|
288
288
|
All data is sent **directly** from your CI/CD environment to the selected LLM API endpoint (e.g. OpenAI, Gemini,
|
|
289
289
|
Claude). No intermediary servers or storage layers are involved.
|
|
290
290
|
|
|
291
|
+
If you use **Ollama**, requests are sent to your **local or self-hosted Ollama runtime**
|
|
292
|
+
(by default `http://localhost:11434`). This allows you to run reviews completely **offline**, keeping all data strictly
|
|
293
|
+
inside your infrastructure.
|
|
294
|
+
|
|
291
295
|
> ⚠️ Please ensure you use proper API tokens and avoid exposing corporate or personal secrets.
|
|
292
296
|
> If you accidentally leak private code or credentials due to incorrect configuration (e.g., using a personal key
|
|
293
297
|
> instead of an enterprise one), it is **your responsibility** — the tool does not retain or share any data by itself.
|
|
@@ -8,12 +8,23 @@ ai_review/cli/commands/run_inline_review.py,sha256=u55K-Su0PR2-NcK7XI2rTCIi7HTEi
|
|
|
8
8
|
ai_review/cli/commands/run_review.py,sha256=i39IYNDE_lAiQQnKLmxG71Ao8WAIOSn82L9EpdbPcsI,261
|
|
9
9
|
ai_review/cli/commands/run_summary_review.py,sha256=NqjepGH5cbqczPzcuMEAxO4dI58FEUZl0b6uRVQ9SiA,224
|
|
10
10
|
ai_review/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
ai_review/clients/bitbucket/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
+
ai_review/clients/bitbucket/client.py,sha256=VaqaQ5USMPTOEeS5XPdr-RkMKsxUpJ2SBE6lcemkz-g,1174
|
|
13
|
+
ai_review/clients/bitbucket/pr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
|
+
ai_review/clients/bitbucket/pr/client.py,sha256=9C6vXBz8o0Df76N9WW4hORN-Q39Vd8I575AaidyW_HM,4359
|
|
15
|
+
ai_review/clients/bitbucket/pr/types.py,sha256=ZICV4ghYChj1Jl9Nlwyw1_kwmGybX51GhGdGzkRaLCk,1296
|
|
16
|
+
ai_review/clients/bitbucket/pr/schema/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
|
+
ai_review/clients/bitbucket/pr/schema/comments.py,sha256=DLi3LhThXfHB9MJ5Akv7Yf_n-VttjvJAausSMoksHTY,1152
|
|
18
|
+
ai_review/clients/bitbucket/pr/schema/files.py,sha256=A-h9Cgi0iJ6e9pGr5TcbpgSb3y9SMTqNi5FxJ7ySxpk,546
|
|
19
|
+
ai_review/clients/bitbucket/pr/schema/pull_request.py,sha256=buGULgaCkxCUFSdiw0XTwaSIYP_p1rAEuKXUyJ_Mzi8,863
|
|
11
20
|
ai_review/clients/claude/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
-
ai_review/clients/claude/client.py,sha256=
|
|
21
|
+
ai_review/clients/claude/client.py,sha256=uEadbBNBJnzjHDczbxXiiw1V1H1PdUWKu-Gn-eIDEmw,1890
|
|
13
22
|
ai_review/clients/claude/schema.py,sha256=LE6KCjJKDXqBGU2Cno5XL5R8vUfScgskE9MqvE0Pt2A,887
|
|
23
|
+
ai_review/clients/claude/types.py,sha256=y_-yF7zQrTvyiowS2b9xjIlAzkF8i6OfOjqo9eB8Xo4,267
|
|
14
24
|
ai_review/clients/gemini/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
|
-
ai_review/clients/gemini/client.py,sha256=
|
|
25
|
+
ai_review/clients/gemini/client.py,sha256=4G1LBcpiFcrITOysQbMwhY1db4hHcSGgyI-0XazZMV0,1889
|
|
16
26
|
ai_review/clients/gemini/schema.py,sha256=5oVvbI-h_sw8bFreS4JUmMj-aXa_frvxK3H8sg4iJIA,2264
|
|
27
|
+
ai_review/clients/gemini/types.py,sha256=D-P0THorrQ8yq5P-NKAC65zzhEYRa9HkiXTORG9QoIk,267
|
|
17
28
|
ai_review/clients/github/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
29
|
ai_review/clients/github/client.py,sha256=pprQcCYdrhRYtuqRsTFiCbj54Qb1Ll6_jmlm7AJg8pk,1149
|
|
19
30
|
ai_review/clients/github/pr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -33,9 +44,14 @@ ai_review/clients/gitlab/mr/schema/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQe
|
|
|
33
44
|
ai_review/clients/gitlab/mr/schema/changes.py,sha256=ZqSPb8zO0z_V8cEjxoTqnwbjRLxo6OTV4LeQEAg91cU,835
|
|
34
45
|
ai_review/clients/gitlab/mr/schema/discussions.py,sha256=JgvxKfHoYxmp86aP4MpIczK-arU0hc-BZLASWDWBIRs,790
|
|
35
46
|
ai_review/clients/gitlab/mr/schema/notes.py,sha256=yfnnRt69fALKfapzZpVtvCvNwPkq5jBFI7fbPMq1w1c,424
|
|
47
|
+
ai_review/clients/ollama/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
|
+
ai_review/clients/ollama/client.py,sha256=KoJ9J5_Vfpv5XNJREshE_gA46uo9J0Z3qVC7wJPEcX8,1720
|
|
49
|
+
ai_review/clients/ollama/schema.py,sha256=A6oKwkkEVrduyzMR_lhLnaLyvKXqlfsXjkMIF2eXaYw,1310
|
|
50
|
+
ai_review/clients/ollama/types.py,sha256=9ES8K-EClKYU7UsaMKgXvZ3sUOF9o6reEvfL6wFOJ4M,267
|
|
36
51
|
ai_review/clients/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
37
|
-
ai_review/clients/openai/client.py,sha256=
|
|
52
|
+
ai_review/clients/openai/client.py,sha256=jY1XG_5GtNboNjkXu3UtuXFx5V9rD6UskK7VT0lOzP8,1816
|
|
38
53
|
ai_review/clients/openai/schema.py,sha256=glxwMtBrDA6W0BQgH-ruKe0bKH3Ps1P-Y1-2jGdqaUM,764
|
|
54
|
+
ai_review/clients/openai/types.py,sha256=4VRY45ihKjii8w0d5XLnUGnHuBSh9wRsOP6lmkseC0Q,267
|
|
39
55
|
ai_review/libs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
40
56
|
ai_review/libs/json.py,sha256=g-P5_pNUomQ-bGHCXASvPKj9Og0s9MaLFVEAkzqGp1A,350
|
|
41
57
|
ai_review/libs/logger.py,sha256=LbXR2Zk1btJ-83I-vHee7cUETgT1mHToSsqEI_8uM0U,370
|
|
@@ -46,22 +62,25 @@ ai_review/libs/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3h
|
|
|
46
62
|
ai_review/libs/config/artifacts.py,sha256=8BzbQu5GxwV6i6qzrUKM1De1Ogb00Ph5WTqwZ3fVpGg,483
|
|
47
63
|
ai_review/libs/config/base.py,sha256=sPf3OKeF1ID0ouOwiVaUtvpWuZXJXQvIw5kbnPUyN9o,686
|
|
48
64
|
ai_review/libs/config/core.py,sha256=ZQ2QtYr7vAF0tXbVLvVwk9QFE5h6JjAKAUQWcb9gHws,87
|
|
49
|
-
ai_review/libs/config/http.py,sha256=
|
|
65
|
+
ai_review/libs/config/http.py,sha256=dx5PwgnGbPocUwf9QRhFmXmjfFDoeerOM04yB3B6S8w,398
|
|
50
66
|
ai_review/libs/config/logger.py,sha256=oPmjpjf6EZwW7CgOjT8mOQdGnT98CLwXepiGB_ajZvU,384
|
|
51
67
|
ai_review/libs/config/prompt.py,sha256=8aO5WNnhVhQcpWzWxqzb9lq6PzormaJazVwPHuf_ia8,4469
|
|
52
68
|
ai_review/libs/config/review.py,sha256=LEZni68iH_0m4URPfN0d3F6yrrK7KSn-BwXf-7w2al8,1058
|
|
53
69
|
ai_review/libs/config/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
54
|
-
ai_review/libs/config/llm/base.py,sha256=
|
|
55
|
-
ai_review/libs/config/llm/claude.py,sha256=
|
|
56
|
-
ai_review/libs/config/llm/gemini.py,sha256=
|
|
57
|
-
ai_review/libs/config/llm/
|
|
70
|
+
ai_review/libs/config/llm/base.py,sha256=ovvULFhfwH66_705D1O87ZGMeaQOZO7ZQhRUzzfzguU,2089
|
|
71
|
+
ai_review/libs/config/llm/claude.py,sha256=MoalXkBA6pEp01znS8ohTRopfea9RUcqhZX5lOIuek8,293
|
|
72
|
+
ai_review/libs/config/llm/gemini.py,sha256=SKtlzsRuNWOlM9m3SFvcqOIjnml8lpPidp7FiGmIEz4,265
|
|
73
|
+
ai_review/libs/config/llm/meta.py,sha256=cEcAHOwy-mQBKo9_KJrQe0I7qppq6h99lSmoWX4ElJI,195
|
|
74
|
+
ai_review/libs/config/llm/ollama.py,sha256=M6aiPb5GvYvkiGcgHTsh9bOw5JsBLqmfSKoIbHCejrU,372
|
|
75
|
+
ai_review/libs/config/llm/openai.py,sha256=jGVL4gJ2wIacoKeK9Zc9LCgY95TxdeYOThdglVPErFU,262
|
|
58
76
|
ai_review/libs/config/vcs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
59
|
-
ai_review/libs/config/vcs/base.py,sha256=
|
|
60
|
-
ai_review/libs/config/vcs/
|
|
61
|
-
ai_review/libs/config/vcs/
|
|
77
|
+
ai_review/libs/config/vcs/base.py,sha256=ks9lrSalkPUuG8ijlaw-8d-F-dv59GdSywHS2TsIKjs,1085
|
|
78
|
+
ai_review/libs/config/vcs/bitbucket.py,sha256=on5sQaE57kM_zSmqdDUNrttVtTPGOzqLHM5s7eFN7DA,275
|
|
79
|
+
ai_review/libs/config/vcs/github.py,sha256=hk-kuDLd8wecqtEb8PSqF7Yy_pkihplJhi6nB6FZID4,256
|
|
80
|
+
ai_review/libs/config/vcs/gitlab.py,sha256=ecYfU158VgVlM6P5mgZn8FOqk3Xt60xx7gUqT5e22a4,252
|
|
62
81
|
ai_review/libs/constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
63
|
-
ai_review/libs/constants/llm_provider.py,sha256=
|
|
64
|
-
ai_review/libs/constants/vcs_provider.py,sha256=
|
|
82
|
+
ai_review/libs/constants/llm_provider.py,sha256=k7GzctIZ-TDsRlhTPbpGYgym_CO2YKVFp_oXG9dTBW0,143
|
|
83
|
+
ai_review/libs/constants/vcs_provider.py,sha256=xJpRdJIdAf05iH2x2f362d1MuviOlPVP7In-JvDVotE,127
|
|
65
84
|
ai_review/libs/diff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
66
85
|
ai_review/libs/diff/models.py,sha256=RT4YJboOPA-AjNJGRj_HIZaJLEmROOhOgMh1wIGpIwY,2344
|
|
67
86
|
ai_review/libs/diff/parser.py,sha256=2BGxZnRN3SRjNnZK4qIOW28aM93Ij__1SltwclJrlno,3817
|
|
@@ -109,12 +128,14 @@ ai_review/services/hook/constants.py,sha256=uQJld5tJVUFk506h5RswTqLy-sIYxQfuQcUw
|
|
|
109
128
|
ai_review/services/hook/service.py,sha256=InPoWBas6SPoy0KUyKJFg5xVk90jBlWdWtUTaX71G88,6364
|
|
110
129
|
ai_review/services/hook/types.py,sha256=zwOmnZVGlg53vUoC2rHNhpEiNsTpf0Tnb-s3SRPKFys,1405
|
|
111
130
|
ai_review/services/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
112
|
-
ai_review/services/llm/factory.py,sha256=
|
|
131
|
+
ai_review/services/llm/factory.py,sha256=AszDqufYPaZdVVR99UZBEFvnGdOildBFQ9uVOiBI1Tc,876
|
|
113
132
|
ai_review/services/llm/types.py,sha256=OvbJWYRDThBgLhn9TWU0mliuanOW01CS3e8ermtuS-s,353
|
|
114
133
|
ai_review/services/llm/claude/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
115
134
|
ai_review/services/llm/claude/client.py,sha256=JJD0FWiXjCCpO7NW3vVoBMXhTQ9VBA4Q93QqkeQqON0,1082
|
|
116
135
|
ai_review/services/llm/gemini/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
117
136
|
ai_review/services/llm/gemini/client.py,sha256=TR4HshVxtDV8_luQKCM3aFNH9tjAjpzNeFBg-oxdsfA,1282
|
|
137
|
+
ai_review/services/llm/ollama/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
138
|
+
ai_review/services/llm/ollama/client.py,sha256=817nOQRsnaVqoY6LdO95l5JkRHkGvvS8TX7hezT2gqk,1479
|
|
118
139
|
ai_review/services/llm/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
119
140
|
ai_review/services/llm/openai/client.py,sha256=c3DWwLnwTheERdSGnMiQIbg5SaICouUAGClcQZSh1fE,1159
|
|
120
141
|
ai_review/services/prompt/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -139,8 +160,10 @@ ai_review/services/review/summary/schema.py,sha256=GipVNWrEKtgZPkytNSrXwzvX9Zq8P
|
|
|
139
160
|
ai_review/services/review/summary/service.py,sha256=F4diIESc0y7YSiUKbInHWiSOW5tW_eQ0rpf78wKxLAo,562
|
|
140
161
|
ai_review/services/review/summary/types.py,sha256=iDsucvx9xJZ5Xb5FN70da3bub3YDtt4vpQeVEK532E8,235
|
|
141
162
|
ai_review/services/vcs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
142
|
-
ai_review/services/vcs/factory.py,sha256=
|
|
163
|
+
ai_review/services/vcs/factory.py,sha256=AfhpZjQ257BkLjb_7zUyw_EUnfEiCUHgTph7GGm-MY4,753
|
|
143
164
|
ai_review/services/vcs/types.py,sha256=S49LhAGHVAd_0QwZUr4JMhfc6DR-HikHR6-T_ETlTus,1998
|
|
165
|
+
ai_review/services/vcs/bitbucket/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
166
|
+
ai_review/services/vcs/bitbucket/client.py,sha256=OceM48MBoiUVKGTh8ZrrpVt8a1fDczCvOMD9VlwoapY,7253
|
|
144
167
|
ai_review/services/vcs/github/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
145
168
|
ai_review/services/vcs/github/client.py,sha256=v6NV97xi_rtRQQi8atRdSrXKhSOQ7CeRHK7YjoyjU6Q,6353
|
|
146
169
|
ai_review/services/vcs/gitlab/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -148,11 +171,13 @@ ai_review/services/vcs/gitlab/client.py,sha256=LK95m-uFSxhDEVU-cBGct61NTKjul-ieL
|
|
|
148
171
|
ai_review/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
149
172
|
ai_review/tests/fixtures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
150
173
|
ai_review/tests/fixtures/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
151
|
-
ai_review/tests/fixtures/clients/
|
|
152
|
-
ai_review/tests/fixtures/clients/
|
|
174
|
+
ai_review/tests/fixtures/clients/bitbucket.py,sha256=XJK1nU7Wir5PnmwCUJ_2uTlByA5a_CTEuXc2a-WmWio,7122
|
|
175
|
+
ai_review/tests/fixtures/clients/claude.py,sha256=6ldJlSSea0zsZV0hRDMi9mqWm0hWT3mp_ROwG_sVU1c,2203
|
|
176
|
+
ai_review/tests/fixtures/clients/gemini.py,sha256=zhLJhm49keKEBCPOf_pLu8_zCatsKKAWM4-gXOhaXeM,2429
|
|
153
177
|
ai_review/tests/fixtures/clients/github.py,sha256=Mzr8LcvVlYLhimzDMG4tEOQwj_6E6kTvYvSrq04R3YI,6865
|
|
154
178
|
ai_review/tests/fixtures/clients/gitlab.py,sha256=_0JSN-ixA7nDOwY18BlL_L9fh_qmT1_6sxGx_CIRhmM,5540
|
|
155
|
-
ai_review/tests/fixtures/clients/
|
|
179
|
+
ai_review/tests/fixtures/clients/ollama.py,sha256=UUHDDPUraQAG8gBC-0UvftaK0BDYir5cJDlRKJymSQg,2109
|
|
180
|
+
ai_review/tests/fixtures/clients/openai.py,sha256=UgfuRZWzl3X7ZVHMLKP4mZxNXVpcccitkc9tuUyffXE,2267
|
|
156
181
|
ai_review/tests/fixtures/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
157
182
|
ai_review/tests/fixtures/services/artifacts.py,sha256=V5FvUnC9OAo0n-paxxJP5OxAgLz1Zz3OZ8zZvqu_01w,1462
|
|
158
183
|
ai_review/tests/fixtures/services/cost.py,sha256=A6Ja0CtQ-k6pR2-B5LRE8EzkqPL34xHGXYtaILjhYvw,1612
|
|
@@ -176,6 +201,9 @@ ai_review/tests/suites/clients/github/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCe
|
|
|
176
201
|
ai_review/tests/suites/clients/github/test_client.py,sha256=BiuLKCHIk83U1szYEZkB-n3vvyPgj6tAI5EqxKiT-CY,558
|
|
177
202
|
ai_review/tests/suites/clients/gitlab/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
178
203
|
ai_review/tests/suites/clients/gitlab/test_client.py,sha256=5QOkNvgm0XRKHh79FNIY9CTonAqYPXqCCxcxeiAHYCA,560
|
|
204
|
+
ai_review/tests/suites/clients/ollama/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
205
|
+
ai_review/tests/suites/clients/ollama/test_client.py,sha256=XZ8NAd1bS_ltTuYZPgqlutPRA6kbvH3_3SKTCbNBTgA,404
|
|
206
|
+
ai_review/tests/suites/clients/ollama/test_schema.py,sha256=A93wCmxwGdvudfbA97VCPYP3gT6u6EYMetAg5fgURRA,1836
|
|
179
207
|
ai_review/tests/suites/clients/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
180
208
|
ai_review/tests/suites/clients/openai/test_client.py,sha256=6Wsxw6-6Uk0uPYFkzpWSwsxfCYUZhT3UYznayo-xlPI,404
|
|
181
209
|
ai_review/tests/suites/clients/openai/test_schema.py,sha256=x1tamS4GC9pOTpjieKDbK2D73CVV4BkATppytwMevLo,1599
|
|
@@ -202,7 +230,15 @@ ai_review/tests/suites/services/diff/test_tools.py,sha256=vsOSSIDZKkuD8dMCoBBEBt
|
|
|
202
230
|
ai_review/tests/suites/services/hook/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
203
231
|
ai_review/tests/suites/services/hook/test_service.py,sha256=GM_AiNVGP2Pgp-3BwGOAIfA8lLXl6ah28ey77KZz_C4,2750
|
|
204
232
|
ai_review/tests/suites/services/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
205
|
-
ai_review/tests/suites/services/llm/test_factory.py,sha256=
|
|
233
|
+
ai_review/tests/suites/services/llm/test_factory.py,sha256=Lp37aXM08fHaLzgwRBw5xZEDZkPDNhJ4qwjtEwGENv8,1394
|
|
234
|
+
ai_review/tests/suites/services/llm/claude/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
235
|
+
ai_review/tests/suites/services/llm/claude/test_client.py,sha256=ymIeuIax0Bp_CuXBSApK1RDl1JmbGc97uzXZToQOZO8,761
|
|
236
|
+
ai_review/tests/suites/services/llm/gemini/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
237
|
+
ai_review/tests/suites/services/llm/gemini/test_client.py,sha256=RjYViMZTgTdbzmDpOvwjuwYVkQV3IyNRhxZ8Y_cfJiQ,764
|
|
238
|
+
ai_review/tests/suites/services/llm/ollama/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
239
|
+
ai_review/tests/suites/services/llm/ollama/test_client.py,sha256=Eu4OERB00SJwCKznyOCyqSFTDBp9J2Lw-BcW7sPJQM4,760
|
|
240
|
+
ai_review/tests/suites/services/llm/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
241
|
+
ai_review/tests/suites/services/llm/openai/test_client.py,sha256=yzIL8GYHyX9iLKIlaF__87aue9w0cr66feoMaCv5gms,761
|
|
206
242
|
ai_review/tests/suites/services/prompt/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
207
243
|
ai_review/tests/suites/services/prompt/test_adapter.py,sha256=9KZOFQmZUs3l_cW7Q5LIMPs4i4J-gOCQ6VrlDPR0ImU,2156
|
|
208
244
|
ai_review/tests/suites/services/prompt/test_schema.py,sha256=rm2__LA2_4qQwSmNAZ_Wnpy11T3yYRkYUkRUrqxUQKE,5421
|
|
@@ -219,14 +255,16 @@ ai_review/tests/suites/services/review/summary/__init__.py,sha256=47DEQpj8HBSa-_
|
|
|
219
255
|
ai_review/tests/suites/services/review/summary/test_schema.py,sha256=HUbSDbQzBp-iTsGLs7hJfu-sz6sq9xLO0woGmZPWyx0,735
|
|
220
256
|
ai_review/tests/suites/services/review/summary/test_service.py,sha256=ibiYOWQMZuQKRutIT_EKGq7DEPQvp62YhscNHeSWFVQ,588
|
|
221
257
|
ai_review/tests/suites/services/vcs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
222
|
-
ai_review/tests/suites/services/vcs/test_factory.py,sha256=
|
|
258
|
+
ai_review/tests/suites/services/vcs/test_factory.py,sha256=EergKSHW4b7RZg9vJJ5Cj0XfPsDTLEclV1kq2_9greA,1138
|
|
259
|
+
ai_review/tests/suites/services/vcs/bitbucket/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
260
|
+
ai_review/tests/suites/services/vcs/bitbucket/test_service.py,sha256=JnG5BYTgGMb-doNjis2BOeI8JrMmvqwv82UFD5f92kg,4448
|
|
223
261
|
ai_review/tests/suites/services/vcs/github/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
224
262
|
ai_review/tests/suites/services/vcs/github/test_service.py,sha256=c2sjecm4qzqYXuO9j6j35NQyJzqDpnXIJImRTcpkyHo,4378
|
|
225
263
|
ai_review/tests/suites/services/vcs/gitlab/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
226
264
|
ai_review/tests/suites/services/vcs/gitlab/test_service.py,sha256=0dqgL5whzjcP-AQ4adP_12QfkYm_ZtdtMotmYm8Se7Y,4449
|
|
227
|
-
xai_review-0.
|
|
228
|
-
xai_review-0.
|
|
229
|
-
xai_review-0.
|
|
230
|
-
xai_review-0.
|
|
231
|
-
xai_review-0.
|
|
232
|
-
xai_review-0.
|
|
265
|
+
xai_review-0.26.0.dist-info/licenses/LICENSE,sha256=p-v8m7Kmz4KKc7PcvsGiGEmCw9AiSXY4_ylOPy_u--Y,11343
|
|
266
|
+
xai_review-0.26.0.dist-info/METADATA,sha256=RXLUNKPnkpxjCcXWbKwFI8RN0UHvVjfWwtqGpVsukIs,11150
|
|
267
|
+
xai_review-0.26.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
268
|
+
xai_review-0.26.0.dist-info/entry_points.txt,sha256=JyC5URanMi5io5P_PXQf7H_I1OGIpk5cZQhaPQ0g4Zs,53
|
|
269
|
+
xai_review-0.26.0.dist-info/top_level.txt,sha256=sTsZbfzLoqvRZKdKa-BcxWvjlHdrpbeJ6DrGY0EuR0E,10
|
|
270
|
+
xai_review-0.26.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|