appkit-assistant 0.16.3__py3-none-any.whl → 0.17.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- appkit_assistant/backend/file_manager.py +117 -0
- appkit_assistant/backend/models.py +12 -0
- appkit_assistant/backend/processors/claude_base.py +178 -0
- appkit_assistant/backend/processors/claude_responses_processor.py +923 -0
- appkit_assistant/backend/processors/gemini_base.py +84 -0
- appkit_assistant/backend/processors/gemini_responses_processor.py +726 -0
- appkit_assistant/backend/processors/lorem_ipsum_processor.py +2 -0
- appkit_assistant/backend/processors/openai_base.py +10 -10
- appkit_assistant/backend/processors/openai_chat_completion_processor.py +25 -8
- appkit_assistant/backend/processors/openai_responses_processor.py +22 -15
- appkit_assistant/{logic → backend}/response_accumulator.py +58 -11
- appkit_assistant/components/__init__.py +2 -0
- appkit_assistant/components/composer.py +99 -12
- appkit_assistant/components/message.py +218 -50
- appkit_assistant/components/thread.py +2 -1
- appkit_assistant/configuration.py +2 -0
- appkit_assistant/state/thread_state.py +239 -5
- {appkit_assistant-0.16.3.dist-info → appkit_assistant-0.17.1.dist-info}/METADATA +4 -1
- {appkit_assistant-0.16.3.dist-info → appkit_assistant-0.17.1.dist-info}/RECORD +20 -15
- {appkit_assistant-0.16.3.dist-info → appkit_assistant-0.17.1.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Gemini base processor for generating AI responses using Google's GenAI API.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from collections.abc import AsyncGenerator
|
|
8
|
+
from typing import Any, Final
|
|
9
|
+
|
|
10
|
+
from google import genai
|
|
11
|
+
|
|
12
|
+
from appkit_assistant.backend.models import (
|
|
13
|
+
AIModel,
|
|
14
|
+
Chunk,
|
|
15
|
+
MCPServer,
|
|
16
|
+
Message,
|
|
17
|
+
)
|
|
18
|
+
from appkit_assistant.backend.processor import Processor
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
GEMINI_3_PRO: Final = AIModel(
|
|
23
|
+
id="gemini-3-pro-preview",
|
|
24
|
+
text="Gemini 3 Pro",
|
|
25
|
+
icon="googlegemini",
|
|
26
|
+
model="gemini-3-pro-preview",
|
|
27
|
+
stream=True,
|
|
28
|
+
supports_attachments=False, # Deferred to Phase 2
|
|
29
|
+
supports_tools=True,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
GEMINI_3_FLASH: Final = AIModel(
|
|
33
|
+
id="gemini-3-flash-preview",
|
|
34
|
+
text="Gemini 3 Flash",
|
|
35
|
+
icon="googlegemini",
|
|
36
|
+
model="gemini-3-flash-preview",
|
|
37
|
+
stream=True,
|
|
38
|
+
supports_attachments=False, # Deferred to Phase 2
|
|
39
|
+
supports_tools=True,
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class BaseGeminiProcessor(Processor, ABC):
|
|
44
|
+
"""Base class for Gemini processors with common initialization and utilities."""
|
|
45
|
+
|
|
46
|
+
def __init__(
|
|
47
|
+
self,
|
|
48
|
+
models: dict[str, AIModel],
|
|
49
|
+
api_key: str | None = None,
|
|
50
|
+
) -> None:
|
|
51
|
+
"""Initialize the base Gemini processor.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
models: Dictionary of supported AI models
|
|
55
|
+
api_key: Google GenAI API key
|
|
56
|
+
"""
|
|
57
|
+
self.models = models
|
|
58
|
+
self.client: genai.Client | None = None
|
|
59
|
+
|
|
60
|
+
if api_key:
|
|
61
|
+
try:
|
|
62
|
+
self.client = genai.Client(
|
|
63
|
+
api_key=api_key, http_options={"api_version": "v1beta"}
|
|
64
|
+
)
|
|
65
|
+
except Exception as e:
|
|
66
|
+
logger.error("Failed to initialize Gemini client: %s", e)
|
|
67
|
+
else:
|
|
68
|
+
logger.warning("Gemini API key not found. Processor disabled.")
|
|
69
|
+
|
|
70
|
+
def get_supported_models(self) -> dict[str, AIModel]:
|
|
71
|
+
"""Get supported models."""
|
|
72
|
+
return self.models
|
|
73
|
+
|
|
74
|
+
@abstractmethod
|
|
75
|
+
async def process(
|
|
76
|
+
self,
|
|
77
|
+
messages: list[Message],
|
|
78
|
+
model_id: str,
|
|
79
|
+
files: list[str] | None = None,
|
|
80
|
+
mcp_servers: list[MCPServer] | None = None,
|
|
81
|
+
payload: dict[str, Any] | None = None,
|
|
82
|
+
user_id: int | None = None,
|
|
83
|
+
) -> AsyncGenerator[Chunk, None]:
|
|
84
|
+
"""Process messages."""
|