appkit-assistant 0.17.3__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. appkit_assistant/backend/{models.py → database/models.py} +32 -132
  2. appkit_assistant/backend/{repositories.py → database/repositories.py} +93 -1
  3. appkit_assistant/backend/model_manager.py +5 -5
  4. appkit_assistant/backend/models/__init__.py +28 -0
  5. appkit_assistant/backend/models/anthropic.py +31 -0
  6. appkit_assistant/backend/models/google.py +27 -0
  7. appkit_assistant/backend/models/openai.py +50 -0
  8. appkit_assistant/backend/models/perplexity.py +56 -0
  9. appkit_assistant/backend/processors/__init__.py +29 -0
  10. appkit_assistant/backend/processors/claude_responses_processor.py +205 -387
  11. appkit_assistant/backend/processors/gemini_responses_processor.py +231 -299
  12. appkit_assistant/backend/processors/lorem_ipsum_processor.py +6 -4
  13. appkit_assistant/backend/processors/mcp_mixin.py +297 -0
  14. appkit_assistant/backend/processors/openai_base.py +11 -125
  15. appkit_assistant/backend/processors/openai_chat_completion_processor.py +5 -3
  16. appkit_assistant/backend/processors/openai_responses_processor.py +480 -402
  17. appkit_assistant/backend/processors/perplexity_processor.py +156 -79
  18. appkit_assistant/backend/{processor.py → processors/processor_base.py} +7 -2
  19. appkit_assistant/backend/processors/streaming_base.py +188 -0
  20. appkit_assistant/backend/schemas.py +138 -0
  21. appkit_assistant/backend/services/auth_error_detector.py +99 -0
  22. appkit_assistant/backend/services/chunk_factory.py +273 -0
  23. appkit_assistant/backend/services/citation_handler.py +292 -0
  24. appkit_assistant/backend/services/file_cleanup_service.py +316 -0
  25. appkit_assistant/backend/services/file_upload_service.py +903 -0
  26. appkit_assistant/backend/services/file_validation.py +138 -0
  27. appkit_assistant/backend/{mcp_auth_service.py → services/mcp_auth_service.py} +4 -2
  28. appkit_assistant/backend/services/mcp_token_service.py +61 -0
  29. appkit_assistant/backend/services/message_converter.py +289 -0
  30. appkit_assistant/backend/services/openai_client_service.py +120 -0
  31. appkit_assistant/backend/{response_accumulator.py → services/response_accumulator.py} +163 -1
  32. appkit_assistant/backend/services/system_prompt_builder.py +89 -0
  33. appkit_assistant/backend/services/thread_service.py +5 -3
  34. appkit_assistant/backend/system_prompt_cache.py +3 -3
  35. appkit_assistant/components/__init__.py +8 -4
  36. appkit_assistant/components/composer.py +59 -24
  37. appkit_assistant/components/file_manager.py +623 -0
  38. appkit_assistant/components/mcp_server_dialogs.py +12 -20
  39. appkit_assistant/components/mcp_server_table.py +12 -2
  40. appkit_assistant/components/message.py +119 -2
  41. appkit_assistant/components/thread.py +1 -1
  42. appkit_assistant/components/threadlist.py +4 -2
  43. appkit_assistant/components/tools_modal.py +37 -20
  44. appkit_assistant/configuration.py +12 -0
  45. appkit_assistant/state/file_manager_state.py +697 -0
  46. appkit_assistant/state/mcp_oauth_state.py +3 -3
  47. appkit_assistant/state/mcp_server_state.py +47 -2
  48. appkit_assistant/state/system_prompt_state.py +1 -1
  49. appkit_assistant/state/thread_list_state.py +99 -5
  50. appkit_assistant/state/thread_state.py +88 -9
  51. {appkit_assistant-0.17.3.dist-info → appkit_assistant-1.0.0.dist-info}/METADATA +8 -6
  52. appkit_assistant-1.0.0.dist-info/RECORD +58 -0
  53. appkit_assistant/backend/processors/claude_base.py +0 -178
  54. appkit_assistant/backend/processors/gemini_base.py +0 -84
  55. appkit_assistant-0.17.3.dist-info/RECORD +0 -39
  56. /appkit_assistant/backend/{file_manager.py → services/file_manager.py} +0 -0
  57. {appkit_assistant-0.17.3.dist-info → appkit_assistant-1.0.0.dist-info}/WHEEL +0 -0
@@ -8,14 +8,16 @@ import random
8
8
  from collections.abc import AsyncGenerator
9
9
  from typing import Any
10
10
 
11
- from appkit_assistant.backend.models import (
11
+ from appkit_assistant.backend.database.models import (
12
+ MCPServer,
13
+ )
14
+ from appkit_assistant.backend.processors.processor_base import ProcessorBase
15
+ from appkit_assistant.backend.schemas import (
12
16
  AIModel,
13
17
  Chunk,
14
18
  ChunkType,
15
- MCPServer,
16
19
  Message,
17
20
  )
18
- from appkit_assistant.backend.processor import Processor
19
21
 
20
22
  logger = logging.getLogger(__name__)
21
23
 
@@ -45,7 +47,7 @@ LOREM_MODELS = {
45
47
  }
46
48
 
47
49
 
48
- class LoremIpsumProcessor(Processor):
50
+ class LoremIpsumProcessor(ProcessorBase):
49
51
  """Processor that generates Lorem Ipsum text responses."""
50
52
 
51
53
  def __init__(self, models: dict[str, AIModel] = LOREM_MODELS) -> None:
@@ -0,0 +1,297 @@
1
+ """MCP Capabilities Mixin for processors.
2
+
3
+ Provides MCP (Model Context Protocol) authentication and tool configuration
4
+ capabilities that can be composed with processor classes.
5
+ """
6
+
7
+ import json
8
+ import logging
9
+ from collections.abc import AsyncGenerator
10
+ from typing import Any
11
+
12
+ from appkit_assistant.backend.database.models import (
13
+ AssistantMCPUserToken,
14
+ MCPServer,
15
+ )
16
+ from appkit_assistant.backend.processors.processor_base import mcp_oauth_redirect_uri
17
+ from appkit_assistant.backend.schemas import (
18
+ Chunk,
19
+ ChunkType,
20
+ MCPAuthType,
21
+ )
22
+ from appkit_assistant.backend.services.mcp_auth_service import MCPAuthService
23
+ from appkit_assistant.backend.services.mcp_token_service import MCPTokenService
24
+ from appkit_commons.database.session import get_session_manager
25
+
26
+ logger = logging.getLogger(__name__)
27
+
28
+
29
+ class MCPCapabilities:
30
+ """Mixin providing MCP authentication and tool configuration.
31
+
32
+ Add this to a processor class to enable MCP server support with:
33
+ - OAuth token management
34
+ - Auth-required chunk creation
35
+ - Header parsing
36
+ - Server configuration
37
+
38
+ Usage:
39
+ class MyProcessor(StreamingProcessorBase, MCPCapabilities):
40
+ def __init__(self, ...):
41
+ StreamingProcessorBase.__init__(self, ...)
42
+ MCPCapabilities.__init__(self, oauth_redirect_uri, processor_name)
43
+ """
44
+
45
+ def __init__(
46
+ self,
47
+ oauth_redirect_uri: str | None = None,
48
+ processor_name: str = "unknown",
49
+ ) -> None:
50
+ """Initialize MCP capabilities.
51
+
52
+ Args:
53
+ oauth_redirect_uri: OAuth redirect URI for MCP servers
54
+ processor_name: Name for chunk metadata and logging
55
+ """
56
+ redirect_uri = oauth_redirect_uri or mcp_oauth_redirect_uri()
57
+ self._mcp_auth_service = MCPAuthService(redirect_uri=redirect_uri)
58
+ self._mcp_token_service = MCPTokenService(self._mcp_auth_service)
59
+ self._pending_auth_servers: list[MCPServer] = []
60
+ self._mcp_processor_name = processor_name
61
+ self._mcp_current_user_id: int | None = None
62
+
63
+ logger.debug("Using redirect URI for MCP OAuth: %s", redirect_uri)
64
+
65
+ @property
66
+ def mcp_processor_name(self) -> str:
67
+ """Get the processor name for MCP operations."""
68
+ return self._mcp_processor_name
69
+
70
+ @property
71
+ def current_user_id(self) -> int | None:
72
+ """Get the current user ID."""
73
+ return self._mcp_current_user_id
74
+
75
+ @current_user_id.setter
76
+ def current_user_id(self, value: int | None) -> None:
77
+ """Set the current user ID."""
78
+ self._mcp_current_user_id = value
79
+
80
+ @property
81
+ def mcp_auth_service(self) -> MCPAuthService:
82
+ """Get the MCP auth service."""
83
+ return self._mcp_auth_service
84
+
85
+ @property
86
+ def mcp_token_service(self) -> MCPTokenService:
87
+ """Get the MCP token service."""
88
+ return self._mcp_token_service
89
+
90
+ @property
91
+ def pending_auth_servers(self) -> list[MCPServer]:
92
+ """Get the list of servers pending authentication."""
93
+ return self._pending_auth_servers
94
+
95
+ def clear_pending_auth_servers(self) -> None:
96
+ """Clear the pending auth servers list."""
97
+ self._pending_auth_servers = []
98
+
99
+ def add_pending_auth_server(self, server: MCPServer) -> None:
100
+ """Add a server to the pending auth list.
101
+
102
+ Args:
103
+ server: The MCP server requiring authentication
104
+ """
105
+ if server not in self._pending_auth_servers:
106
+ self._pending_auth_servers.append(server)
107
+
108
+ async def get_valid_token(
109
+ self,
110
+ server: MCPServer,
111
+ user_id: int,
112
+ ) -> AssistantMCPUserToken | None:
113
+ """Get a valid OAuth token for a server.
114
+
115
+ Args:
116
+ server: The MCP server
117
+ user_id: The user's ID
118
+
119
+ Returns:
120
+ A valid token or None
121
+ """
122
+ return await self._mcp_token_service.get_valid_token(server, user_id)
123
+
124
+ def parse_mcp_headers(self, server: MCPServer) -> dict[str, str]:
125
+ """Parse headers from server configuration.
126
+
127
+ Args:
128
+ server: The MCP server configuration
129
+
130
+ Returns:
131
+ Dictionary of HTTP headers
132
+ """
133
+ if not server.headers or server.headers == "{}":
134
+ return {}
135
+
136
+ try:
137
+ headers_dict = json.loads(server.headers)
138
+ return dict(headers_dict)
139
+ except json.JSONDecodeError:
140
+ logger.warning("Invalid headers JSON for server %s", server.name)
141
+ return {}
142
+
143
+ def parse_mcp_headers_with_auth(
144
+ self,
145
+ server: MCPServer,
146
+ ) -> tuple[dict[str, str], str | None]:
147
+ """Parse headers and extract auth token separately.
148
+
149
+ Used by Claude which sends auth token separately from headers.
150
+
151
+ Args:
152
+ server: The MCP server configuration
153
+
154
+ Returns:
155
+ Tuple of (headers_without_auth, auth_token)
156
+ """
157
+ headers = self.parse_mcp_headers(server)
158
+ auth_token: str | None = None
159
+
160
+ # Extract Bearer token from Authorization header
161
+ auth_header = headers.pop("Authorization", "")
162
+ if auth_header.startswith("Bearer "):
163
+ auth_token = auth_header[7:] # Remove "Bearer " prefix
164
+ elif auth_header:
165
+ auth_token = auth_header
166
+
167
+ return headers, auth_token
168
+
169
+ async def create_auth_required_chunk(
170
+ self,
171
+ server: MCPServer,
172
+ user_id: int | None = None,
173
+ processor_name: str | None = None,
174
+ ) -> Chunk:
175
+ """Create an AUTH_REQUIRED chunk for a server.
176
+
177
+ Builds the authorization URL and returns a chunk that signals
178
+ the UI to show an authentication dialog.
179
+
180
+ Args:
181
+ server: The MCP server requiring authentication
182
+ user_id: The current user's ID (uses internal state if None)
183
+ processor_name: Name of the processor (uses internal state if None)
184
+
185
+ Returns:
186
+ An AUTH_REQUIRED Chunk with auth URL
187
+ """
188
+ effective_user_id = (
189
+ user_id if user_id is not None else self._mcp_current_user_id
190
+ )
191
+ effective_processor = processor_name or self._mcp_processor_name
192
+ auth_url = ""
193
+ state = ""
194
+
195
+ try:
196
+ with get_session_manager().session() as session:
197
+ auth_service = self._mcp_auth_service
198
+ (
199
+ auth_url,
200
+ state,
201
+ ) = await auth_service.build_authorization_url_with_registration(
202
+ server,
203
+ session=session,
204
+ user_id=effective_user_id,
205
+ )
206
+ logger.info(
207
+ "Built auth URL for server %s, state=%s, url=%s",
208
+ server.name,
209
+ state,
210
+ auth_url[:100] if auth_url else "None",
211
+ )
212
+ except Exception as e:
213
+ logger.error("Cannot build auth URL for server %s: %s", server.name, str(e))
214
+
215
+ return Chunk(
216
+ type=ChunkType.AUTH_REQUIRED,
217
+ text=f"{server.name} benötigt Ihre Autorisierung",
218
+ chunk_metadata={
219
+ "server_id": str(server.id) if server.id else "",
220
+ "server_name": server.name,
221
+ "auth_url": auth_url,
222
+ "state": state,
223
+ "processor": effective_processor,
224
+ },
225
+ )
226
+
227
+ async def yield_pending_auth_chunks(
228
+ self,
229
+ ) -> AsyncGenerator[Chunk, None]:
230
+ """Yield auth chunks for all pending servers.
231
+
232
+ Generator that yields AUTH_REQUIRED chunks for each server
233
+ that needs authentication. Uses internal current_user_id and
234
+ mcp_processor_name state.
235
+
236
+ Yields:
237
+ AUTH_REQUIRED Chunk for each pending server
238
+ """
239
+ logger.debug(
240
+ "Processing pending auth servers: %d", len(self._pending_auth_servers)
241
+ )
242
+ for server in self._pending_auth_servers:
243
+ logger.debug("Yielding auth chunk for server: %s", server.name)
244
+ yield await self.create_auth_required_chunk(server)
245
+
246
+ async def configure_mcp_servers_with_tokens(
247
+ self,
248
+ servers: list[MCPServer] | None,
249
+ user_id: int | None,
250
+ ) -> tuple[list[dict[str, Any]], str]:
251
+ """Configure MCP servers with OAuth tokens.
252
+
253
+ For each server:
254
+ 1. Parse headers
255
+ 2. If OAuth required, inject token or mark for auth
256
+
257
+ Args:
258
+ servers: List of MCP server configurations
259
+ user_id: The current user's ID
260
+
261
+ Returns:
262
+ Tuple of (server_configs, mcp_prompt)
263
+ """
264
+ if not servers:
265
+ return [], ""
266
+
267
+ server_configs = []
268
+ prompts = []
269
+
270
+ for server in servers:
271
+ headers = self.parse_mcp_headers(server)
272
+
273
+ # Handle OAuth servers
274
+ if server.auth_type == MCPAuthType.OAUTH_DISCOVERY and user_id is not None:
275
+ token = await self.get_valid_token(server, user_id)
276
+ if token:
277
+ headers["Authorization"] = f"Bearer {token.access_token}"
278
+ logger.debug("Injected OAuth token for server %s", server.name)
279
+ else:
280
+ self.add_pending_auth_server(server)
281
+ logger.debug(
282
+ "No valid token for OAuth server %s, auth may be required",
283
+ server.name,
284
+ )
285
+
286
+ server_configs.append(
287
+ {
288
+ "name": server.name,
289
+ "url": server.url,
290
+ "headers": headers,
291
+ }
292
+ )
293
+
294
+ if server.prompt:
295
+ prompts.append(f"- {server.prompt}")
296
+
297
+ return server_configs, "\n".join(prompts) if prompts else ""
@@ -5,138 +5,24 @@ OpenAI processor for generating AI responses using OpenAI's API.
5
5
  import logging
6
6
  from abc import ABC, abstractmethod
7
7
  from collections.abc import AsyncGenerator
8
- from typing import Any, Final
8
+ from typing import Any
9
9
 
10
- from openai import AsyncAzureOpenAI, AsyncOpenAI
10
+ from openai import AsyncOpenAI
11
11
 
12
- from appkit_assistant.backend.models import (
12
+ from appkit_assistant.backend.database.models import (
13
+ MCPServer,
14
+ )
15
+ from appkit_assistant.backend.processors.processor_base import ProcessorBase
16
+ from appkit_assistant.backend.schemas import (
13
17
  AIModel,
14
18
  Chunk,
15
- MCPServer,
16
19
  Message,
17
20
  )
18
- from appkit_assistant.backend.processor import Processor
19
21
 
20
22
  logger = logging.getLogger(__name__)
21
23
 
22
- DEFAULT: Final = AIModel(
23
- id="default",
24
- text="Default (GPT 4.1 Mini)",
25
- icon="avvia_intelligence",
26
- model="default",
27
- stream=True,
28
- )
29
-
30
- GPT_4o: Final = AIModel(
31
- id="gpt-4o",
32
- text="GPT 4o",
33
- icon="openai",
34
- model="gpt-4o",
35
- stream=True,
36
- supports_attachments=False,
37
- supports_tools=True,
38
- )
39
-
40
- GPT_4_1: Final = AIModel(
41
- id="gpt-4.1",
42
- text="GPT-4.1",
43
- icon="openai",
44
- model="gpt-4.1",
45
- stream=True,
46
- supports_attachments=False,
47
- supports_tools=True,
48
- )
49
-
50
- O3: Final = AIModel(
51
- id="o3",
52
- text="o3 Reasoning",
53
- icon="openai",
54
- model="o3",
55
- temperature=1,
56
- stream=True,
57
- supports_attachments=False,
58
- supports_tools=True,
59
- )
60
-
61
- O4_MINI: Final = AIModel(
62
- id="o4-mini",
63
- text="o4 Mini Reasoning",
64
- icon="openai",
65
- model="o4-mini",
66
- stream=True,
67
- supports_attachments=False,
68
- supports_tools=True,
69
- temperature=1,
70
- )
71
-
72
- GPT_5: Final = AIModel(
73
- id="gpt-5",
74
- text="GPT 5",
75
- icon="openai",
76
- model="gpt-5",
77
- stream=True,
78
- supports_attachments=False,
79
- supports_tools=True,
80
- temperature=1,
81
- )
82
-
83
- GPT_5_1: Final = AIModel(
84
- id="gpt-5.1",
85
- text="GPT 5.1",
86
- icon="openai",
87
- model="gpt-5.1",
88
- stream=True,
89
- supports_attachments=False,
90
- supports_tools=True,
91
- temperature=1,
92
- )
93
-
94
- GPT_5_2: Final = AIModel(
95
- id="gpt-5.2",
96
- text="GPT 5.2",
97
- icon="openai",
98
- model="gpt-5.2",
99
- stream=True,
100
- supports_attachments=False,
101
- supports_tools=True,
102
- temperature=1,
103
- )
104
-
105
- GPT_5_MINI: Final = AIModel(
106
- id="gpt-5-mini",
107
- text="GPT 5 Mini",
108
- icon="openai",
109
- model="gpt-5-mini",
110
- stream=True,
111
- supports_attachments=False,
112
- supports_tools=True,
113
- temperature=1,
114
- )
115
-
116
- GPT_5_1_MINI: Final = AIModel(
117
- id="gpt-5.1-mini",
118
- text="GPT 5.1 Mini",
119
- icon="openai",
120
- model="gpt-5.1-mini",
121
- stream=True,
122
- supports_attachments=False,
123
- supports_tools=True,
124
- temperature=1,
125
- )
126
-
127
- GPT_5_NANO: Final = AIModel(
128
- id="gpt-5-nano",
129
- text="GPT 5 Nano",
130
- icon="openai",
131
- model="gpt-5-nano",
132
- stream=True,
133
- supports_attachments=False,
134
- supports_tools=True,
135
- temperature=1,
136
- )
137
-
138
24
 
139
- class BaseOpenAIProcessor(Processor, ABC):
25
+ class BaseOpenAIProcessor(ProcessorBase, ABC):
140
26
  """Base class for OpenAI processors with common initialization and utilities."""
141
27
 
142
28
  def __init__(
@@ -161,10 +47,10 @@ class BaseOpenAIProcessor(Processor, ABC):
161
47
  self.client = None
162
48
 
163
49
  if self.api_key and self.base_url and is_azure:
164
- self.client = AsyncAzureOpenAI(
50
+ self.client = AsyncOpenAI(
165
51
  api_key=self.api_key,
166
- azure_endpoint=self.base_url,
167
- api_version="2025-04-01-preview",
52
+ base_url=f"{self.base_url}/openai/v1",
53
+ default_query={"api-version": "preview"},
168
54
  )
169
55
  elif self.api_key and self.base_url:
170
56
  self.client = AsyncOpenAI(api_key=self.api_key, base_url=self.base_url)
@@ -6,14 +6,16 @@ from typing import Any
6
6
  from openai import AsyncStream
7
7
  from openai.types.chat import ChatCompletionMessageParam
8
8
 
9
- from appkit_assistant.backend.models import (
9
+ from appkit_assistant.backend.database.models import (
10
+ MCPServer,
11
+ )
12
+ from appkit_assistant.backend.processors.openai_base import BaseOpenAIProcessor
13
+ from appkit_assistant.backend.schemas import (
10
14
  Chunk,
11
15
  ChunkType,
12
- MCPServer,
13
16
  Message,
14
17
  MessageType,
15
18
  )
16
- from appkit_assistant.backend.processors.openai_base import BaseOpenAIProcessor
17
19
 
18
20
  logger = logging.getLogger(__name__)
19
21