appkit-assistant 0.17.3__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- appkit_assistant/backend/{models.py → database/models.py} +32 -132
- appkit_assistant/backend/{repositories.py → database/repositories.py} +93 -1
- appkit_assistant/backend/model_manager.py +5 -5
- appkit_assistant/backend/models/__init__.py +28 -0
- appkit_assistant/backend/models/anthropic.py +31 -0
- appkit_assistant/backend/models/google.py +27 -0
- appkit_assistant/backend/models/openai.py +50 -0
- appkit_assistant/backend/models/perplexity.py +56 -0
- appkit_assistant/backend/processors/__init__.py +29 -0
- appkit_assistant/backend/processors/claude_responses_processor.py +205 -387
- appkit_assistant/backend/processors/gemini_responses_processor.py +231 -299
- appkit_assistant/backend/processors/lorem_ipsum_processor.py +6 -4
- appkit_assistant/backend/processors/mcp_mixin.py +297 -0
- appkit_assistant/backend/processors/openai_base.py +11 -125
- appkit_assistant/backend/processors/openai_chat_completion_processor.py +5 -3
- appkit_assistant/backend/processors/openai_responses_processor.py +480 -402
- appkit_assistant/backend/processors/perplexity_processor.py +156 -79
- appkit_assistant/backend/{processor.py → processors/processor_base.py} +7 -2
- appkit_assistant/backend/processors/streaming_base.py +188 -0
- appkit_assistant/backend/schemas.py +138 -0
- appkit_assistant/backend/services/auth_error_detector.py +99 -0
- appkit_assistant/backend/services/chunk_factory.py +273 -0
- appkit_assistant/backend/services/citation_handler.py +292 -0
- appkit_assistant/backend/services/file_cleanup_service.py +316 -0
- appkit_assistant/backend/services/file_upload_service.py +903 -0
- appkit_assistant/backend/services/file_validation.py +138 -0
- appkit_assistant/backend/{mcp_auth_service.py → services/mcp_auth_service.py} +4 -2
- appkit_assistant/backend/services/mcp_token_service.py +61 -0
- appkit_assistant/backend/services/message_converter.py +289 -0
- appkit_assistant/backend/services/openai_client_service.py +120 -0
- appkit_assistant/backend/{response_accumulator.py → services/response_accumulator.py} +163 -1
- appkit_assistant/backend/services/system_prompt_builder.py +89 -0
- appkit_assistant/backend/services/thread_service.py +5 -3
- appkit_assistant/backend/system_prompt_cache.py +3 -3
- appkit_assistant/components/__init__.py +8 -4
- appkit_assistant/components/composer.py +59 -24
- appkit_assistant/components/file_manager.py +623 -0
- appkit_assistant/components/mcp_server_dialogs.py +12 -20
- appkit_assistant/components/mcp_server_table.py +12 -2
- appkit_assistant/components/message.py +119 -2
- appkit_assistant/components/thread.py +1 -1
- appkit_assistant/components/threadlist.py +4 -2
- appkit_assistant/components/tools_modal.py +37 -20
- appkit_assistant/configuration.py +12 -0
- appkit_assistant/state/file_manager_state.py +697 -0
- appkit_assistant/state/mcp_oauth_state.py +3 -3
- appkit_assistant/state/mcp_server_state.py +47 -2
- appkit_assistant/state/system_prompt_state.py +1 -1
- appkit_assistant/state/thread_list_state.py +99 -5
- appkit_assistant/state/thread_state.py +88 -9
- {appkit_assistant-0.17.3.dist-info → appkit_assistant-1.0.0.dist-info}/METADATA +8 -6
- appkit_assistant-1.0.0.dist-info/RECORD +58 -0
- appkit_assistant/backend/processors/claude_base.py +0 -178
- appkit_assistant/backend/processors/gemini_base.py +0 -84
- appkit_assistant-0.17.3.dist-info/RECORD +0 -39
- /appkit_assistant/backend/{file_manager.py → services/file_manager.py} +0 -0
- {appkit_assistant-0.17.3.dist-info → appkit_assistant-1.0.0.dist-info}/WHEEL +0 -0
|
@@ -1,178 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Claude base processor for generating AI responses using Anthropic's Claude API.
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
|
-
import logging
|
|
6
|
-
from abc import ABC, abstractmethod
|
|
7
|
-
from collections.abc import AsyncGenerator
|
|
8
|
-
from pathlib import Path
|
|
9
|
-
from typing import Any, Final
|
|
10
|
-
|
|
11
|
-
from anthropic import AsyncAnthropic
|
|
12
|
-
|
|
13
|
-
from appkit_assistant.backend.models import (
|
|
14
|
-
AIModel,
|
|
15
|
-
Chunk,
|
|
16
|
-
MCPServer,
|
|
17
|
-
Message,
|
|
18
|
-
)
|
|
19
|
-
from appkit_assistant.backend.processor import Processor
|
|
20
|
-
|
|
21
|
-
logger = logging.getLogger(__name__)
|
|
22
|
-
|
|
23
|
-
CLAUDE_HAIKU_4_5: Final = AIModel(
|
|
24
|
-
id="claude-haiku-4.5",
|
|
25
|
-
text="Claude 4.5 Haiku",
|
|
26
|
-
icon="anthropic",
|
|
27
|
-
model="claude-haiku-4-5",
|
|
28
|
-
stream=True,
|
|
29
|
-
supports_attachments=False,
|
|
30
|
-
supports_tools=True,
|
|
31
|
-
temperature=1.0,
|
|
32
|
-
)
|
|
33
|
-
|
|
34
|
-
CLAUDE_SONNET_4_5: Final = AIModel(
|
|
35
|
-
id="claude-sonnet-4.5",
|
|
36
|
-
text="Claude 4.5 Sonnet",
|
|
37
|
-
icon="anthropic",
|
|
38
|
-
model="claude-sonnet-4-5",
|
|
39
|
-
stream=True,
|
|
40
|
-
supports_attachments=False,
|
|
41
|
-
supports_tools=True,
|
|
42
|
-
temperature=1.0,
|
|
43
|
-
)
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
class BaseClaudeProcessor(Processor, ABC):
|
|
47
|
-
"""Base class for Claude processors with common initialization and utilities."""
|
|
48
|
-
|
|
49
|
-
# Extended thinking budget (fixed at 10k tokens)
|
|
50
|
-
THINKING_BUDGET_TOKENS: Final[int] = 10000
|
|
51
|
-
|
|
52
|
-
# Max file size (5MB)
|
|
53
|
-
MAX_FILE_SIZE: Final[int] = 5 * 1024 * 1024
|
|
54
|
-
|
|
55
|
-
# Allowed file extensions
|
|
56
|
-
ALLOWED_EXTENSIONS: Final[set[str]] = {
|
|
57
|
-
"pdf",
|
|
58
|
-
"png",
|
|
59
|
-
"jpg",
|
|
60
|
-
"jpeg",
|
|
61
|
-
"xlsx",
|
|
62
|
-
"csv",
|
|
63
|
-
"docx",
|
|
64
|
-
"pptx",
|
|
65
|
-
"md",
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
# Image extensions (for determining content type)
|
|
69
|
-
IMAGE_EXTENSIONS: Final[set[str]] = {"png", "jpg", "jpeg", "gif", "webp"}
|
|
70
|
-
|
|
71
|
-
def __init__(
|
|
72
|
-
self,
|
|
73
|
-
models: dict[str, AIModel],
|
|
74
|
-
api_key: str | None = None,
|
|
75
|
-
base_url: str | None = None,
|
|
76
|
-
) -> None:
|
|
77
|
-
"""Initialize the base Claude processor.
|
|
78
|
-
|
|
79
|
-
Args:
|
|
80
|
-
models: Dictionary of supported AI models
|
|
81
|
-
api_key: API key for Anthropic Claude API (or Azure API key)
|
|
82
|
-
base_url: Base URL for Azure-hosted Claude (optional)
|
|
83
|
-
"""
|
|
84
|
-
self.api_key = api_key
|
|
85
|
-
self.base_url = base_url
|
|
86
|
-
self.models = models
|
|
87
|
-
self.client: AsyncAnthropic | None = None
|
|
88
|
-
|
|
89
|
-
if self.api_key:
|
|
90
|
-
if self.base_url:
|
|
91
|
-
# Azure-hosted Claude
|
|
92
|
-
self.client = AsyncAnthropic(
|
|
93
|
-
api_key=self.api_key,
|
|
94
|
-
base_url=self.base_url,
|
|
95
|
-
)
|
|
96
|
-
else:
|
|
97
|
-
# Direct Anthropic API
|
|
98
|
-
self.client = AsyncAnthropic(api_key=self.api_key)
|
|
99
|
-
else:
|
|
100
|
-
logger.warning("No Claude API key found. Processor will not work.")
|
|
101
|
-
|
|
102
|
-
@abstractmethod
|
|
103
|
-
async def process(
|
|
104
|
-
self,
|
|
105
|
-
messages: list[Message],
|
|
106
|
-
model_id: str,
|
|
107
|
-
files: list[str] | None = None,
|
|
108
|
-
mcp_servers: list[MCPServer] | None = None,
|
|
109
|
-
payload: dict[str, Any] | None = None,
|
|
110
|
-
user_id: int | None = None,
|
|
111
|
-
) -> AsyncGenerator[Chunk, None]:
|
|
112
|
-
"""Process messages and generate AI response chunks."""
|
|
113
|
-
|
|
114
|
-
def get_supported_models(self) -> dict[str, AIModel]:
|
|
115
|
-
"""Return supported models if API key is available."""
|
|
116
|
-
return self.models if self.api_key else {}
|
|
117
|
-
|
|
118
|
-
def _get_file_extension(self, file_path: str) -> str:
|
|
119
|
-
"""Extract file extension from path."""
|
|
120
|
-
return file_path.rsplit(".", 1)[-1].lower() if "." in file_path else ""
|
|
121
|
-
|
|
122
|
-
def _is_image_file(self, file_path: str) -> bool:
|
|
123
|
-
"""Check if file is an image based on extension."""
|
|
124
|
-
ext = self._get_file_extension(file_path)
|
|
125
|
-
return ext in self.IMAGE_EXTENSIONS
|
|
126
|
-
|
|
127
|
-
def _get_media_type(self, file_path: str) -> str:
|
|
128
|
-
"""Get MIME type for a file based on extension."""
|
|
129
|
-
ext = self._get_file_extension(file_path)
|
|
130
|
-
media_types = {
|
|
131
|
-
"pdf": "application/pdf",
|
|
132
|
-
"png": "image/png",
|
|
133
|
-
"jpg": "image/jpeg",
|
|
134
|
-
"jpeg": "image/jpeg",
|
|
135
|
-
"gif": "image/gif",
|
|
136
|
-
"webp": "image/webp",
|
|
137
|
-
"xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
138
|
-
"csv": "text/csv",
|
|
139
|
-
"docx": (
|
|
140
|
-
"application/vnd.openxmlformats-officedocument.wordprocessingml"
|
|
141
|
-
".document"
|
|
142
|
-
),
|
|
143
|
-
"pptx": (
|
|
144
|
-
"application/vnd.openxmlformats-officedocument.presentationml"
|
|
145
|
-
".presentation"
|
|
146
|
-
),
|
|
147
|
-
"md": "text/markdown",
|
|
148
|
-
"txt": "text/plain",
|
|
149
|
-
}
|
|
150
|
-
return media_types.get(ext, "application/octet-stream")
|
|
151
|
-
|
|
152
|
-
def _validate_file(self, file_path: str) -> tuple[bool, str]:
|
|
153
|
-
"""Validate file for upload.
|
|
154
|
-
|
|
155
|
-
Args:
|
|
156
|
-
file_path: Path to the file
|
|
157
|
-
|
|
158
|
-
Returns:
|
|
159
|
-
Tuple of (is_valid, error_message)
|
|
160
|
-
"""
|
|
161
|
-
path = Path(file_path)
|
|
162
|
-
|
|
163
|
-
# Check if file exists
|
|
164
|
-
if not path.exists():
|
|
165
|
-
return False, f"File not found: {file_path}"
|
|
166
|
-
|
|
167
|
-
# Check extension
|
|
168
|
-
ext = self._get_file_extension(file_path)
|
|
169
|
-
if ext not in self.ALLOWED_EXTENSIONS:
|
|
170
|
-
return False, f"Unsupported file type: {ext}"
|
|
171
|
-
|
|
172
|
-
# Check file size
|
|
173
|
-
file_size = path.stat().st_size
|
|
174
|
-
if file_size > self.MAX_FILE_SIZE:
|
|
175
|
-
size_mb = file_size / (1024 * 1024)
|
|
176
|
-
return False, f"File too large: {size_mb:.1f}MB (max 5MB)"
|
|
177
|
-
|
|
178
|
-
return True, ""
|
|
@@ -1,84 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Gemini base processor for generating AI responses using Google's GenAI API.
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
|
-
import logging
|
|
6
|
-
from abc import ABC, abstractmethod
|
|
7
|
-
from collections.abc import AsyncGenerator
|
|
8
|
-
from typing import Any, Final
|
|
9
|
-
|
|
10
|
-
from google import genai
|
|
11
|
-
|
|
12
|
-
from appkit_assistant.backend.models import (
|
|
13
|
-
AIModel,
|
|
14
|
-
Chunk,
|
|
15
|
-
MCPServer,
|
|
16
|
-
Message,
|
|
17
|
-
)
|
|
18
|
-
from appkit_assistant.backend.processor import Processor
|
|
19
|
-
|
|
20
|
-
logger = logging.getLogger(__name__)
|
|
21
|
-
|
|
22
|
-
GEMINI_3_PRO: Final = AIModel(
|
|
23
|
-
id="gemini-3-pro-preview",
|
|
24
|
-
text="Gemini 3 Pro",
|
|
25
|
-
icon="googlegemini",
|
|
26
|
-
model="gemini-3-pro-preview",
|
|
27
|
-
stream=True,
|
|
28
|
-
supports_attachments=False, # Deferred to Phase 2
|
|
29
|
-
supports_tools=True,
|
|
30
|
-
)
|
|
31
|
-
|
|
32
|
-
GEMINI_3_FLASH: Final = AIModel(
|
|
33
|
-
id="gemini-3-flash-preview",
|
|
34
|
-
text="Gemini 3 Flash",
|
|
35
|
-
icon="googlegemini",
|
|
36
|
-
model="gemini-3-flash-preview",
|
|
37
|
-
stream=True,
|
|
38
|
-
supports_attachments=False, # Deferred to Phase 2
|
|
39
|
-
supports_tools=True,
|
|
40
|
-
)
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
class BaseGeminiProcessor(Processor, ABC):
|
|
44
|
-
"""Base class for Gemini processors with common initialization and utilities."""
|
|
45
|
-
|
|
46
|
-
def __init__(
|
|
47
|
-
self,
|
|
48
|
-
models: dict[str, AIModel],
|
|
49
|
-
api_key: str | None = None,
|
|
50
|
-
) -> None:
|
|
51
|
-
"""Initialize the base Gemini processor.
|
|
52
|
-
|
|
53
|
-
Args:
|
|
54
|
-
models: Dictionary of supported AI models
|
|
55
|
-
api_key: Google GenAI API key
|
|
56
|
-
"""
|
|
57
|
-
self.models = models
|
|
58
|
-
self.client: genai.Client | None = None
|
|
59
|
-
|
|
60
|
-
if api_key:
|
|
61
|
-
try:
|
|
62
|
-
self.client = genai.Client(
|
|
63
|
-
api_key=api_key, http_options={"api_version": "v1beta"}
|
|
64
|
-
)
|
|
65
|
-
except Exception as e:
|
|
66
|
-
logger.error("Failed to initialize Gemini client: %s", e)
|
|
67
|
-
else:
|
|
68
|
-
logger.warning("Gemini API key not found. Processor disabled.")
|
|
69
|
-
|
|
70
|
-
def get_supported_models(self) -> dict[str, AIModel]:
|
|
71
|
-
"""Get supported models."""
|
|
72
|
-
return self.models
|
|
73
|
-
|
|
74
|
-
@abstractmethod
|
|
75
|
-
async def process(
|
|
76
|
-
self,
|
|
77
|
-
messages: list[Message],
|
|
78
|
-
model_id: str,
|
|
79
|
-
files: list[str] | None = None,
|
|
80
|
-
mcp_servers: list[MCPServer] | None = None,
|
|
81
|
-
payload: dict[str, Any] | None = None,
|
|
82
|
-
user_id: int | None = None,
|
|
83
|
-
) -> AsyncGenerator[Chunk, None]:
|
|
84
|
-
"""Process messages."""
|
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
appkit_assistant/configuration.py,sha256=Wpo3EuGWQrV0WIQnAhkj19PzgGkJoAde5ky-MA7kJwg,429
|
|
2
|
-
appkit_assistant/pages.py,sha256=gDvBweUO2WjrhP1RE5AAkjL1_S-givWr3CkkGZKws_E,471
|
|
3
|
-
appkit_assistant/backend/file_manager.py,sha256=54SYphu6FsxbEYuMx8ohQiSAeY2gGDV1q3S6RZuNku0,3153
|
|
4
|
-
appkit_assistant/backend/mcp_auth_service.py,sha256=lYQEe4yOZ48ear6dvcuOXsaOc6RClIBMsOOkV7SG5Aw,27768
|
|
5
|
-
appkit_assistant/backend/model_manager.py,sha256=fmv3yP63LxDnne4vjT7IzETTI2aSxViC2FSUfHQajlk,4382
|
|
6
|
-
appkit_assistant/backend/models.py,sha256=GCFWUUPsVspcFEv9naQ8n2bMU8FzMNrmqm3VgW1Fqfw,8346
|
|
7
|
-
appkit_assistant/backend/processor.py,sha256=8m3pE2F45nUcT25N2C-9mNCnS7IB54DQM9WUTe4JPgk,2278
|
|
8
|
-
appkit_assistant/backend/repositories.py,sha256=R-7kYdxg4RWQrTEOU4tbcOEhJA_FlesWrt65UpItRSU,5547
|
|
9
|
-
appkit_assistant/backend/response_accumulator.py,sha256=BCK-Ut_Wmo7rhEqOb7QlqXO8TrtANjL80FbX_AvMl1Q,10056
|
|
10
|
-
appkit_assistant/backend/system_prompt_cache.py,sha256=83OIyixeTb3HKOy3XIzPyTAE-G2JyqrfcG8xVeTS2Ls,5514
|
|
11
|
-
appkit_assistant/backend/processors/claude_base.py,sha256=j0DhBn8EVAjW_bfCghXaEHyORO1raUNdQeemVWCKJlA,5376
|
|
12
|
-
appkit_assistant/backend/processors/claude_responses_processor.py,sha256=-AZrF82-LT83ziswfvwjmokPxqrv14DtJ7BIy4_XGG8,33237
|
|
13
|
-
appkit_assistant/backend/processors/gemini_base.py,sha256=ijCa8-_xdddD6ms_pkUjCj5kHZOoQ1wfw-7AhhXu6vo,2286
|
|
14
|
-
appkit_assistant/backend/processors/gemini_responses_processor.py,sha256=IodjCh63w2sqceQEtPc4vZhlkqH12xKjT_3zPxH2dhc,27116
|
|
15
|
-
appkit_assistant/backend/processors/lorem_ipsum_processor.py,sha256=PJj3WK1g2BFufzO1m13C6zDKhtzujs48-SI-oiIwg44,5119
|
|
16
|
-
appkit_assistant/backend/processors/openai_base.py,sha256=TBQCZW7RxaPMag_DX_tpLyMpZ_PZ_KiO8_sFN49OCpk,4424
|
|
17
|
-
appkit_assistant/backend/processors/openai_chat_completion_processor.py,sha256=zJdiuF8CGz7EaEUU0rcCnQmoAEJBn5hlsRQuXFKKSmQ,5026
|
|
18
|
-
appkit_assistant/backend/processors/openai_responses_processor.py,sha256=jMiAE_ZPzdOIny-2W4aYeKbkRq4EpMirBlamWQ_xhm4,29749
|
|
19
|
-
appkit_assistant/backend/processors/perplexity_processor.py,sha256=QD3bTawy-IKOV_qqjt0Pozubp-sAAqCACj_iokpI6ms,3523
|
|
20
|
-
appkit_assistant/backend/services/thread_service.py,sha256=LpM8ZZHt1o4MYEKzH_XPURSi3qS6p3pAQA53tOE53MU,4663
|
|
21
|
-
appkit_assistant/components/__init__.py,sha256=ptv0wShA4CHjgNpehlHqgoinCl-yyofjkV6rTTNIRHE,954
|
|
22
|
-
appkit_assistant/components/composer.py,sha256=RMBZOTuLPkP90P_0v6BxOEy-I2jzC6mn60YT7zVChqI,7592
|
|
23
|
-
appkit_assistant/components/composer_key_handler.py,sha256=KyZYyhxzFR8DH_7F_DrvTFNT6v5kG6JihlGTmCv2wv0,1028
|
|
24
|
-
appkit_assistant/components/mcp_oauth.py,sha256=puLwxAhmF25BjnZMdJbKIfC6bFXK2D8LybOX0kD7Ri4,1737
|
|
25
|
-
appkit_assistant/components/mcp_server_dialogs.py,sha256=afIImmhfrNyLmxDZBpCxHxvD8HKpDanIloLEC8dJgro,23444
|
|
26
|
-
appkit_assistant/components/mcp_server_table.py,sha256=1dziN7hDDvE8Y3XcdIs0wUPv1H64kP9gRAEjgH9Yvzo,2323
|
|
27
|
-
appkit_assistant/components/message.py,sha256=SJcwWfSw5XxiSHtKirJUun9jkx3-ATpZ0AHLv82KwXc,19824
|
|
28
|
-
appkit_assistant/components/system_prompt_editor.py,sha256=REl33zFmcpYRe9kxvFrBRYg40dV4L4FtVC_3ibLsmrU,2940
|
|
29
|
-
appkit_assistant/components/thread.py,sha256=-KK1vcEmITR_oDGLbQhIa2WxRCVxEqel12m4haS7y9w,8461
|
|
30
|
-
appkit_assistant/components/threadlist.py,sha256=1xVakSTQYi5-wgED3fTJVggeIjL_fkthehce0wKUYtM,4896
|
|
31
|
-
appkit_assistant/components/tools_modal.py,sha256=12iiAVahy3j4JwjGfRlegVEa4ePhGsEu7Bq92JLn1ZI,3353
|
|
32
|
-
appkit_assistant/state/mcp_oauth_state.py,sha256=6MofExrbOOEl_YUcUOqcSTN3h7KAaERI5IdVfXdVUVs,7669
|
|
33
|
-
appkit_assistant/state/mcp_server_state.py,sha256=3AFvy53xx_eLTxw-LfJklPTgq4Ohqu4xs1QlLs-kU4U,11387
|
|
34
|
-
appkit_assistant/state/system_prompt_state.py,sha256=zdnYrTnl7EszALRiodu6pcuQUd2tmtPG1eJ10j_OotI,7705
|
|
35
|
-
appkit_assistant/state/thread_list_state.py,sha256=DEOR5Nklj1qfYaxSRMXCZdZRv2iq2Jb37JSg739_wL4,10250
|
|
36
|
-
appkit_assistant/state/thread_state.py,sha256=bZpUIr_EEUmpLI9a79GYBqbi99JbV6-LGd51j3mrzF0,39962
|
|
37
|
-
appkit_assistant-0.17.3.dist-info/METADATA,sha256=C9aWKzo40NMooCUqzY7WtePCW3bxTPricqxUFzI8B4U,9498
|
|
38
|
-
appkit_assistant-0.17.3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
39
|
-
appkit_assistant-0.17.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|