appkit-assistant 1.0.3__py3-none-any.whl → 1.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. appkit_assistant/backend/database/models.py +1 -0
  2. appkit_assistant/backend/database/repositories.py +34 -0
  3. appkit_assistant/backend/models/anthropic.py +8 -0
  4. appkit_assistant/backend/models/google.py +8 -0
  5. appkit_assistant/backend/models/openai.py +12 -0
  6. appkit_assistant/backend/models/perplexity.py +6 -0
  7. appkit_assistant/backend/processors/lorem_ipsum_processor.py +3 -0
  8. appkit_assistant/backend/processors/openai_responses_processor.py +2 -1
  9. appkit_assistant/backend/services/auth_error_detector.py +1 -1
  10. appkit_assistant/backend/services/file_cleanup_service.py +42 -29
  11. appkit_assistant/backend/services/file_upload_service.py +99 -63
  12. appkit_assistant/backend/services/file_validation.py +1 -1
  13. appkit_assistant/backend/services/system_prompt_builder.py +1 -1
  14. appkit_assistant/components/composer.py +5 -9
  15. appkit_assistant/components/file_manager.py +2 -0
  16. appkit_assistant/components/mcp_server_dialogs.py +46 -0
  17. appkit_assistant/components/mcp_server_table.py +29 -3
  18. appkit_assistant/components/thread.py +10 -1
  19. appkit_assistant/roles.py +42 -0
  20. appkit_assistant/state/file_manager_state.py +53 -8
  21. appkit_assistant/state/mcp_server_state.py +15 -0
  22. appkit_assistant/state/thread_list_state.py +3 -1
  23. appkit_assistant/state/thread_state.py +23 -4
  24. {appkit_assistant-1.0.3.dist-info → appkit_assistant-1.0.5.dist-info}/METADATA +2 -1
  25. {appkit_assistant-1.0.3.dist-info → appkit_assistant-1.0.5.dist-info}/RECORD +26 -25
  26. {appkit_assistant-1.0.3.dist-info → appkit_assistant-1.0.5.dist-info}/WHEEL +0 -0
@@ -67,6 +67,7 @@ class MCPServer(rx.Model, table=True):
67
67
  default=None, sa_column=Column(DateTime(timezone=True), nullable=True)
68
68
  )
69
69
  active: bool = Field(default=True, nullable=False)
70
+ required_role: str | None = Field(default=None, nullable=True)
70
71
 
71
72
 
72
73
  class SystemPrompt(rx.Model, table=True):
@@ -168,6 +168,40 @@ class ThreadRepository(BaseRepository[AssistantThread, AsyncSession]):
168
168
  result = await session.execute(stmt)
169
169
  return list(result.scalars().all())
170
170
 
171
+ async def find_unique_vector_store_ids(self, session: AsyncSession) -> list[str]:
172
+ """Get unique vector store IDs from all threads.
173
+
174
+ Returns:
175
+ List of unique vector store IDs (excluding None/empty).
176
+ """
177
+ stmt = select(AssistantThread.vector_store_id).distinct()
178
+ result = await session.execute(stmt)
179
+ return [row[0] for row in result.all() if row[0]]
180
+
181
+ async def clear_vector_store_id(
182
+ self, session: AsyncSession, vector_store_id: str
183
+ ) -> int:
184
+ """Clear vector_store_id from all threads referencing the given store.
185
+
186
+ Args:
187
+ vector_store_id: The vector store ID to clear from threads.
188
+
189
+ Returns:
190
+ Number of threads updated.
191
+ """
192
+ stmt = select(AssistantThread).where(
193
+ AssistantThread.vector_store_id == vector_store_id
194
+ )
195
+ result = await session.execute(stmt)
196
+ threads = list(result.scalars().all())
197
+
198
+ for thread in threads:
199
+ thread.vector_store_id = None
200
+ session.add(thread)
201
+
202
+ await session.flush()
203
+ return len(threads)
204
+
171
205
 
172
206
  class FileUploadRepository(BaseRepository[AssistantFileUpload, AsyncSession]):
173
207
  """Repository class for file upload database operations."""
@@ -7,6 +7,10 @@ from typing import Final
7
7
  from appkit_assistant.backend.schemas import (
8
8
  AIModel,
9
9
  )
10
+ from appkit_assistant.roles import (
11
+ ASSISTANT_ADVANCED_MODELS_ROLE,
12
+ ASSISTANT_BASIC_MODELS_ROLE,
13
+ )
10
14
 
11
15
  CLAUDE_HAIKU_4_5: Final = AIModel(
12
16
  id="claude-haiku-4.5",
@@ -17,6 +21,8 @@ CLAUDE_HAIKU_4_5: Final = AIModel(
17
21
  supports_attachments=False,
18
22
  supports_tools=True,
19
23
  temperature=1.0,
24
+ keywords=["haiku", "claude"],
25
+ requires_role=ASSISTANT_BASIC_MODELS_ROLE.name,
20
26
  )
21
27
 
22
28
  CLAUDE_SONNET_4_5: Final = AIModel(
@@ -28,4 +34,6 @@ CLAUDE_SONNET_4_5: Final = AIModel(
28
34
  supports_attachments=False,
29
35
  supports_tools=True,
30
36
  temperature=1.0,
37
+ keywords=["sonnet", "claude"],
38
+ requires_role=ASSISTANT_ADVANCED_MODELS_ROLE.name,
31
39
  )
@@ -5,6 +5,10 @@ Gemini model definitions for Google's GenAI API.
5
5
  from typing import Final
6
6
 
7
7
  from appkit_assistant.backend.schemas import AIModel
8
+ from appkit_assistant.roles import (
9
+ ASSISTANT_ADVANCED_MODELS_ROLE,
10
+ ASSISTANT_BASIC_MODELS_ROLE,
11
+ )
8
12
 
9
13
  GEMINI_3_PRO: Final = AIModel(
10
14
  id="gemini-3-pro-preview",
@@ -14,6 +18,8 @@ GEMINI_3_PRO: Final = AIModel(
14
18
  stream=True,
15
19
  supports_attachments=False,
16
20
  supports_tools=True,
21
+ keywords=["pro", "gemini"],
22
+ requires_role=ASSISTANT_ADVANCED_MODELS_ROLE.name,
17
23
  )
18
24
 
19
25
  GEMINI_3_FLASH: Final = AIModel(
@@ -24,4 +30,6 @@ GEMINI_3_FLASH: Final = AIModel(
24
30
  stream=True,
25
31
  supports_attachments=False,
26
32
  supports_tools=True,
33
+ keywords=["flash", "gemini"],
34
+ requires_role=ASSISTANT_BASIC_MODELS_ROLE.name,
27
35
  )
@@ -1,6 +1,10 @@
1
1
  from typing import Final
2
2
 
3
3
  from appkit_assistant.backend.schemas import AIModel
4
+ from appkit_assistant.roles import (
5
+ ASSISTANT_ADVANCED_MODELS_ROLE,
6
+ ASSISTANT_BASIC_MODELS_ROLE,
7
+ )
4
8
 
5
9
  O3: Final = AIModel(
6
10
  id="o3",
@@ -11,6 +15,8 @@ O3: Final = AIModel(
11
15
  stream=True,
12
16
  supports_attachments=False,
13
17
  supports_tools=True,
18
+ keywords=["reasoning", "o3"],
19
+ requires_role=ASSISTANT_ADVANCED_MODELS_ROLE.name,
14
20
  )
15
21
 
16
22
  GPT_5_MINI: Final = AIModel(
@@ -23,6 +29,8 @@ GPT_5_MINI: Final = AIModel(
23
29
  supports_tools=True,
24
30
  supports_search=True,
25
31
  temperature=1,
32
+ keywords=["gpt-5", "mini"],
33
+ requires_role=ASSISTANT_BASIC_MODELS_ROLE.name,
26
34
  )
27
35
 
28
36
  GPT_5_1: Final = AIModel(
@@ -35,6 +43,8 @@ GPT_5_1: Final = AIModel(
35
43
  supports_tools=True,
36
44
  supports_search=True,
37
45
  temperature=1,
46
+ keywords=["gpt-5", "5.1"],
47
+ requires_role=ASSISTANT_ADVANCED_MODELS_ROLE.name,
38
48
  )
39
49
 
40
50
  GPT_5_2: Final = AIModel(
@@ -47,4 +57,6 @@ GPT_5_2: Final = AIModel(
47
57
  supports_tools=True,
48
58
  supports_search=True,
49
59
  temperature=1,
60
+ keywords=["gpt-5", "5.2"],
61
+ requires_role=ASSISTANT_ADVANCED_MODELS_ROLE.name,
50
62
  )
@@ -1,6 +1,7 @@
1
1
  import enum
2
2
 
3
3
  from appkit_assistant.backend.schemas import AIModel
4
+ from appkit_assistant.roles import ASSISTANT_PERPLEXITY_MODEL_ROLE
4
5
 
5
6
 
6
7
  class ContextSize(enum.StrEnum):
@@ -24,6 +25,8 @@ SONAR = PerplexityAIModel(
24
25
  icon="perplexity",
25
26
  model="sonar",
26
27
  stream=True,
28
+ keywords=["sonar", "perplexity"],
29
+ requires_role=ASSISTANT_PERPLEXITY_MODEL_ROLE.name,
27
30
  )
28
31
 
29
32
  SONAR_PRO = PerplexityAIModel(
@@ -33,6 +36,7 @@ SONAR_PRO = PerplexityAIModel(
33
36
  model="sonar-pro",
34
37
  stream=True,
35
38
  keywords=["sonar", "perplexity"],
39
+ requires_role=ASSISTANT_PERPLEXITY_MODEL_ROLE.name,
36
40
  )
37
41
 
38
42
  SONAR_DEEP_RESEARCH = PerplexityAIModel(
@@ -43,6 +47,7 @@ SONAR_DEEP_RESEARCH = PerplexityAIModel(
43
47
  search_context_size=ContextSize.HIGH,
44
48
  stream=True,
45
49
  keywords=["reasoning", "deep", "research", "perplexity"],
50
+ requires_role=ASSISTANT_PERPLEXITY_MODEL_ROLE.name,
46
51
  )
47
52
 
48
53
  SONAR_REASONING = PerplexityAIModel(
@@ -53,4 +58,5 @@ SONAR_REASONING = PerplexityAIModel(
53
58
  search_context_size=ContextSize.HIGH,
54
59
  stream=True,
55
60
  keywords=["reasoning", "perplexity"],
61
+ requires_role=ASSISTANT_PERPLEXITY_MODEL_ROLE.name,
56
62
  )
@@ -18,6 +18,7 @@ from appkit_assistant.backend.schemas import (
18
18
  ChunkType,
19
19
  Message,
20
20
  )
21
+ from appkit_assistant.roles import ASSISTANT_BASIC_MODELS_ROLE
21
22
 
22
23
  logger = logging.getLogger(__name__)
23
24
 
@@ -43,6 +44,8 @@ LOREM_MODELS = {
43
44
  stream=True,
44
45
  supports_attachments=True,
45
46
  supports_tools=True,
47
+ keywords=["lorem", "ipsum", "short"],
48
+ requires_role=ASSISTANT_BASIC_MODELS_ROLE.name,
46
49
  )
47
50
  }
48
51
 
@@ -462,7 +462,8 @@ class OpenAIResponsesProcessor(StreamingProcessorBase, MCPCapabilities):
462
462
  reasoning_session=self.current_reasoning_session,
463
463
  )
464
464
 
465
- # file_search_call / web_search_call done events are handled in _handle_search_events
465
+ # file_search_call / web_search_call done events are handled in
466
+ # _handle_search_events
466
467
  if item.type in ("file_search_call", "web_search_call"):
467
468
  return None
468
469
 
@@ -93,7 +93,7 @@ def get_auth_error_detector() -> AuthErrorDetector:
93
93
  Returns:
94
94
  The AuthErrorDetector instance
95
95
  """
96
- global _auth_error_detector
96
+ global _auth_error_detector # noqa: PLW0603
97
97
  if _auth_error_detector is None:
98
98
  _auth_error_detector = AuthErrorDetector()
99
99
  return _auth_error_detector
@@ -14,11 +14,10 @@ from typing import Any
14
14
  from apscheduler.schedulers.asyncio import AsyncIOScheduler
15
15
  from apscheduler.triggers.interval import IntervalTrigger
16
16
  from openai import AsyncOpenAI, NotFoundError
17
- from sqlalchemy import select
18
17
 
19
- from appkit_assistant.backend.database.models import (
20
- AssistantFileUpload,
21
- AssistantThread,
18
+ from appkit_assistant.backend.database.repositories import (
19
+ file_upload_repo,
20
+ thread_repo,
22
21
  )
23
22
  from appkit_assistant.backend.services.file_upload_service import FileUploadService
24
23
  from appkit_assistant.backend.services.openai_client_service import (
@@ -76,6 +75,8 @@ class FileCleanupService:
76
75
  "vector_stores_checked": 0,
77
76
  "vector_stores_expired": 0,
78
77
  "vector_stores_deleted": 0,
78
+ "files_found": 0,
79
+ "files_deleted": 0,
79
80
  "threads_updated": 0,
80
81
  "current_vector_store": None,
81
82
  "total_vector_stores": 0,
@@ -83,12 +84,19 @@ class FileCleanupService:
83
84
  }
84
85
 
85
86
  try:
86
- # Get all unique vector store IDs from file uploads
87
+ # Get all unique vector store IDs from BOTH file uploads AND threads
87
88
  async with get_asyncdb_session() as session:
88
- result = await session.execute(
89
- select(AssistantFileUpload.vector_store_id).distinct()
89
+ # Vector stores from file uploads
90
+ file_stores = await file_upload_repo.find_unique_vector_stores(session)
91
+ file_store_ids = {store_id for store_id, _ in file_stores if store_id}
92
+
93
+ # Vector stores from threads (may have orphaned references)
94
+ thread_store_ids = set(
95
+ await thread_repo.find_unique_vector_store_ids(session)
90
96
  )
91
- vector_store_ids = [row[0] for row in result.all() if row[0]]
97
+
98
+ # Combine both sets
99
+ vector_store_ids = list(file_store_ids | thread_store_ids)
92
100
 
93
101
  stats["total_vector_stores"] = len(vector_store_ids)
94
102
  stats["status"] = "checking"
@@ -111,9 +119,11 @@ class FileCleanupService:
111
119
  yield stats.copy()
112
120
 
113
121
  # Delegate cleanup to FileUploadService
114
- deleted = await self._file_upload_service.delete_vector_store(vs_id)
115
- if deleted:
122
+ result = await self._file_upload_service.delete_vector_store(vs_id)
123
+ if result["deleted"]:
116
124
  stats["vector_stores_deleted"] += 1
125
+ stats["files_found"] += result["files_found"]
126
+ stats["files_deleted"] += result["files_deleted"]
117
127
  # Clear vector_store_id from associated threads
118
128
  threads_updated = await self._clear_thread_vector_store_ids(vs_id)
119
129
  stats["threads_updated"] += threads_updated
@@ -142,12 +152,24 @@ class FileCleanupService:
142
152
  True if the vector store is expired/deleted, False otherwise.
143
153
  """
144
154
  try:
145
- await self._client.vector_stores.retrieve(vector_store_id=vector_store_id)
155
+ vector_store = await self._client.vector_stores.retrieve(
156
+ vector_store_id=vector_store_id
157
+ )
158
+ # Check if the vector store has expired status
159
+ if vector_store.status == "expired":
160
+ logger.info(
161
+ "Vector store %s has expired status",
162
+ vector_store_id,
163
+ )
164
+ return True
165
+ return False
146
166
  except NotFoundError:
167
+ logger.info(
168
+ "Vector store %s not found (deleted)",
169
+ vector_store_id,
170
+ )
147
171
  return True
148
172
 
149
- return False
150
-
151
173
  async def _clear_thread_vector_store_ids(self, vector_store_id: str) -> int:
152
174
  """Clear vector_store_id from all threads associated with the store.
153
175
 
@@ -157,25 +179,16 @@ class FileCleanupService:
157
179
  Returns:
158
180
  Number of threads updated.
159
181
  """
160
- updated_count = 0
161
182
  async with get_asyncdb_session() as session:
162
- thread_result = await session.execute(
163
- select(AssistantThread).where(
164
- AssistantThread.vector_store_id == vector_store_id
165
- )
183
+ updated_count = await thread_repo.clear_vector_store_id(
184
+ session, vector_store_id
166
185
  )
167
- threads = list(thread_result.scalars().all())
168
-
169
- for thread in threads:
170
- thread.vector_store_id = None
171
- session.add(thread)
172
- updated_count += 1
173
- logger.debug(
174
- "Cleared vector_store_id from thread %s",
175
- thread.thread_id,
176
- )
177
-
178
186
  await session.commit()
187
+ logger.debug(
188
+ "Cleared vector_store_id from %d threads for store %s",
189
+ updated_count,
190
+ vector_store_id,
191
+ )
179
192
 
180
193
  return updated_count
181
194
 
@@ -426,19 +426,13 @@ class FileUploadService:
426
426
  ) from last_error
427
427
 
428
428
  async def _delete_files_from_vector_stores(
429
- self, db_files: list[AssistantFileUpload]
429
+ self, vector_store_files: dict[str, list[str]]
430
430
  ) -> None:
431
- """Delete files FROM their vector stores (Level 1)."""
432
- # Build map of vector_store_id -> file_ids
433
- vector_store_files: dict[str, list[str]] = {}
434
- for db_file in db_files:
435
- if db_file.vector_store_id:
436
- if db_file.vector_store_id not in vector_store_files:
437
- vector_store_files[db_file.vector_store_id] = []
438
- vector_store_files[db_file.vector_store_id].append(
439
- db_file.openai_file_id
440
- )
431
+ """Delete files FROM their vector stores (Level 1).
441
432
 
433
+ Args:
434
+ vector_store_files: Map of vector_store_id -> list of file_ids.
435
+ """
442
436
  # Delete from each vector store
443
437
  for vs_id, vs_file_ids in vector_store_files.items():
444
438
  for file_id in vs_file_ids:
@@ -471,28 +465,36 @@ class FileUploadService:
471
465
 
472
466
  async def _delete_file_db_records(
473
467
  self,
474
- db_files: list[AssistantFileUpload],
475
- deletion_results: dict[str, bool],
468
+ openai_file_ids: list[str],
476
469
  ) -> None:
477
- """Delete database records for successfully deleted files (Level 3)."""
478
- deleted_file_ids = [fid for fid, success in deletion_results.items() if success]
479
- if not deleted_file_ids:
470
+ """Delete database records for files by their OpenAI file IDs (Level 3).
471
+
472
+ Args:
473
+ openai_file_ids: List of OpenAI file IDs to delete from database.
474
+ """
475
+ if not openai_file_ids:
480
476
  return
481
477
 
482
478
  async with get_asyncdb_session() as session:
479
+ result = await session.execute(
480
+ select(AssistantFileUpload).where(
481
+ AssistantFileUpload.openai_file_id.in_(openai_file_ids)
482
+ )
483
+ )
484
+ db_files = result.scalars().all()
485
+
483
486
  for db_file in db_files:
484
- if db_file.openai_file_id in deleted_file_ids:
485
- try:
486
- await session.delete(db_file)
487
- logger.debug(
488
- "Deleted DB record for file: %s", db_file.openai_file_id
489
- )
490
- except Exception as e:
491
- logger.warning(
492
- "Failed to delete DB record for file %s: %s",
493
- db_file.openai_file_id,
494
- e,
495
- )
487
+ try:
488
+ await session.delete(db_file)
489
+ logger.debug(
490
+ "Deleted DB record for file: %s", db_file.openai_file_id
491
+ )
492
+ except Exception as e:
493
+ logger.warning(
494
+ "Failed to delete DB record for file %s: %s",
495
+ db_file.openai_file_id,
496
+ e,
497
+ )
496
498
  await session.commit()
497
499
 
498
500
  async def upload_file(
@@ -524,7 +526,8 @@ class FileUploadService:
524
526
  file_size = path.stat().st_size
525
527
  if file_size > self._max_file_size_bytes:
526
528
  raise FileUploadError(
527
- f"Datei überschreitet die maximale Größe von {self.config.max_file_size_mb}MB"
529
+ "Datei überschreitet die maximale Größe von "
530
+ f"{self.config.max_file_size_mb}MB"
528
531
  )
529
532
 
530
533
  # Validate file count for thread
@@ -769,84 +772,117 @@ class FileUploadService:
769
772
  if not file_ids:
770
773
  return {}
771
774
 
772
- # Get file records from database to know which vector stores they belong to
775
+ # Get file records and extract needed data within session
776
+ vector_store_files: dict[str, list[str]] = {}
773
777
  async with get_asyncdb_session() as session:
774
778
  file_records = await session.execute(
775
779
  select(AssistantFileUpload).where(
776
780
  AssistantFileUpload.openai_file_id.in_(file_ids)
777
781
  )
778
782
  )
779
- db_files = file_records.scalars().all()
783
+ for db_file in file_records.scalars().all():
784
+ if db_file.vector_store_id:
785
+ if db_file.vector_store_id not in vector_store_files:
786
+ vector_store_files[db_file.vector_store_id] = []
787
+ vector_store_files[db_file.vector_store_id].append(
788
+ db_file.openai_file_id
789
+ )
780
790
 
781
791
  # LEVEL 1: Delete files FROM their vector stores
782
- await self._delete_files_from_vector_stores(db_files)
792
+ await self._delete_files_from_vector_stores(vector_store_files)
783
793
 
784
794
  # LEVEL 2: Delete files from OpenAI
785
795
  results = await self._delete_files_from_openai(file_ids)
786
796
 
787
797
  # LEVEL 3: Delete database records (only for successfully deleted files)
788
- await self._delete_file_db_records(db_files, results)
798
+ deleted_file_ids = [fid for fid, success in results.items() if success]
799
+ await self._delete_file_db_records(deleted_file_ids)
789
800
 
790
801
  return results
791
802
 
792
- async def delete_vector_store(self, vector_store_id: str) -> bool:
803
+ async def delete_vector_store(self, vector_store_id: str) -> dict[str, Any]:
793
804
  """Delete a vector store with proper ordering.
794
805
 
795
806
  Order:
796
- 1. Delete all files in the vector store (via delete_files - 3-level deletion)
797
- 2. Delete the vector store container itself
807
+ 1. Get file IDs from database (reliable even for expired stores)
808
+ 2. Try to get file IDs from OpenAI vector store (may fail if expired)
809
+ 3. Delete all files (3-level deletion)
810
+ 4. Delete the vector store container itself
811
+ 5. Clean up database records
798
812
 
799
813
  Args:
800
814
  vector_store_id: The vector store ID to delete.
801
815
 
802
816
  Returns:
803
- True if vector store was successfully deleted, False otherwise.
817
+ Dict with 'deleted' (bool), 'files_found' (int), 'files_deleted' (int).
804
818
  """
819
+ result = {"deleted": False, "files_found": 0, "files_deleted": 0}
820
+
805
821
  if not vector_store_id:
806
- return False
822
+ return result
807
823
 
808
824
  logger.info("Deleting vector store: %s", vector_store_id)
809
825
 
810
- # Step 1: List and delete all files in the vector store
826
+ # Step 1: Get file IDs from database (reliable even for expired stores)
827
+ db_file_ids: set[str] = set()
828
+ async with get_asyncdb_session() as session:
829
+ db_files = await file_upload_repo.find_by_vector_store(
830
+ session, vector_store_id
831
+ )
832
+ db_file_ids = {f.openai_file_id for f in db_files}
833
+
834
+ # Step 2: Try to get file IDs from OpenAI (may fail for expired stores)
835
+ openai_file_ids: set[str] = set()
811
836
  try:
812
837
  vs_files = await self.client.vector_stores.files.list(
813
838
  vector_store_id=vector_store_id
814
839
  )
815
- file_ids = [vs_file.id for vs_file in vs_files.data]
816
-
817
- if file_ids:
818
- logger.info(
819
- "Deleting %d files from vector store %s",
820
- len(file_ids),
821
- vector_store_id,
822
- )
823
- deletion_results = await self.delete_files(file_ids)
824
- successful = sum(1 for success in deletion_results.values() if success)
825
- logger.info(
826
- "Successfully deleted %d/%d files from vector store %s",
827
- successful,
828
- len(file_ids),
829
- vector_store_id,
830
- )
840
+ openai_file_ids = {vs_file.id for vs_file in vs_files.data}
831
841
  except Exception as e:
832
842
  logger.warning(
833
- "Failed to delete files from vector store %s: %s",
843
+ "Could not list files from vector store %s (may be expired): %s",
834
844
  vector_store_id,
835
845
  e,
836
846
  )
837
847
 
838
- # Step 2: Delete the vector store container itself
848
+ # Merge file IDs from both sources
849
+ all_file_ids = list(db_file_ids | openai_file_ids)
850
+ result["files_found"] = len(all_file_ids)
851
+
852
+ # Step 3: Delete all files (3-level deletion)
853
+ if all_file_ids:
854
+ logger.info(
855
+ "Deleting %d files from vector store %s (db: %d, openai: %d)",
856
+ len(all_file_ids),
857
+ vector_store_id,
858
+ len(db_file_ids),
859
+ len(openai_file_ids),
860
+ )
861
+ deletion_results = await self.delete_files(all_file_ids)
862
+ successful = sum(1 for success in deletion_results.values() if success)
863
+ result["files_deleted"] = successful
864
+ logger.info(
865
+ "Successfully deleted %d/%d files from vector store %s",
866
+ successful,
867
+ len(all_file_ids),
868
+ vector_store_id,
869
+ )
870
+
871
+ # Step 4: Delete the vector store container itself
839
872
  try:
840
873
  await self.client.vector_stores.delete(vector_store_id=vector_store_id)
841
874
  logger.info("Deleted vector store: %s", vector_store_id)
842
- return True
875
+ result["deleted"] = True
876
+ return result
843
877
  except Exception as e:
844
878
  logger.warning(
845
- "Failed to delete vector store %s (will auto-expire): %s",
879
+ "Failed to delete vector store %s (may already be expired): %s",
846
880
  vector_store_id,
847
881
  e,
848
882
  )
849
- return False
883
+ # Still return True if files were cleaned up - store may auto-expire
884
+ result["deleted"] = len(all_file_ids) > 0
885
+ return result
850
886
 
851
887
  async def cleanup_deleted_thread(
852
888
  self,
@@ -887,10 +923,10 @@ class FileUploadService:
887
923
  return result
888
924
 
889
925
  # Delete vector store (which handles all file deletion internally)
890
- vs_deleted = await self.delete_vector_store(vector_store_id)
891
- result["vector_store_deleted"] = vs_deleted
926
+ vs_result = await self.delete_vector_store(vector_store_id)
927
+ result["vector_store_deleted"] = vs_result["deleted"]
892
928
 
893
- if not vs_deleted:
929
+ if not vs_result["deleted"]:
894
930
  result["errors"].append(f"Failed to delete vector store {vector_store_id}")
895
931
 
896
932
  logger.info(
@@ -132,7 +132,7 @@ def get_file_validation_service() -> FileValidationService:
132
132
  Returns:
133
133
  The FileValidationService instance
134
134
  """
135
- global _file_validation_service
135
+ global _file_validation_service # noqa: PLW0603
136
136
  if _file_validation_service is None:
137
137
  _file_validation_service = FileValidationService()
138
138
  return _file_validation_service
@@ -83,7 +83,7 @@ def get_system_prompt_builder() -> SystemPromptBuilder:
83
83
  Returns:
84
84
  The SystemPromptBuilder instance
85
85
  """
86
- global _system_prompt_builder
86
+ global _system_prompt_builder # noqa: PLW0603
87
87
  if _system_prompt_builder is None:
88
88
  _system_prompt_builder = SystemPromptBuilder()
89
89
  return _system_prompt_builder
@@ -174,17 +174,15 @@ def file_upload(show: bool = False) -> rx.Component:
174
174
  rx.upload_files(upload_id="composer_file_upload")
175
175
  ),
176
176
  ),
177
- content=f"Dateien hochladen (max. {ThreadState.max_files_per_thread}, {ThreadState.max_file_size_mb}MB pro Datei)",
177
+ content=(
178
+ f"Dateien hochladen (max. {ThreadState.max_files_per_thread}, "
179
+ f"{ThreadState.max_file_size_mb}MB pro Datei)"
180
+ ),
178
181
  ),
179
182
  rx.fragment(),
180
183
  )
181
184
 
182
185
 
183
- def add_attachment(show: bool = False) -> rx.Component:
184
- """Legacy attachment function - now wraps file_upload."""
185
- return file_upload(show=show)
186
-
187
-
188
186
  def choose_model(show: bool = False) -> rx.Component | None:
189
187
  if not show:
190
188
  return None
@@ -220,7 +218,7 @@ def web_search_toggle() -> rx.Component:
220
218
  color_scheme=rx.cond(ThreadState.web_search_enabled, "blue", "accent"),
221
219
  padding="8px",
222
220
  margin_right=rx.cond(
223
- ThreadState.selected_model_supports_attachments, "6px", "14px"
221
+ ThreadState.selected_model_supports_attachments, "0px", "8px"
224
222
  ),
225
223
  margin_left="-6px",
226
224
  on_click=ThreadState.toggle_web_search,
@@ -237,7 +235,6 @@ def tools(show: bool = False) -> rx.Component:
237
235
  return rx.cond(
238
236
  show,
239
237
  rx.hstack(
240
- web_search_toggle(),
241
238
  tools_popover(),
242
239
  spacing="1",
243
240
  align="center",
@@ -274,7 +271,6 @@ def composer(*children, on_submit: Callable, **kwargs) -> rx.Component:
274
271
 
275
272
  class ComposerComponent(rx.ComponentNamespace):
276
273
  __call__ = staticmethod(composer)
277
- add_attachment = staticmethod(add_attachment)
278
274
  choose_model = staticmethod(choose_model)
279
275
  clear = staticmethod(clear)
280
276
  file_upload = staticmethod(file_upload)
@@ -293,6 +293,8 @@ def cleanup_progress_modal() -> rx.Component:
293
293
  cleanup_stat_row(
294
294
  "Gelöschte Stores:", stats.vector_stores_deleted
295
295
  ),
296
+ cleanup_stat_row("Gefundene Dateien:", stats.files_found),
297
+ cleanup_stat_row("Gelöschte Dateien:", stats.files_deleted),
296
298
  cleanup_stat_row(
297
299
  "Aktualisierte Threads:", stats.threads_updated
298
300
  ),
@@ -11,6 +11,7 @@ from reflex.vars.base import RETURN, CustomVarOperationReturn
11
11
  import appkit_mantine as mn
12
12
  from appkit_assistant.backend.database.models import MCPAuthType, MCPServer
13
13
  from appkit_assistant.backend.services.mcp_auth_service import MCPAuthService
14
+ from appkit_assistant.roles import ASSISTANT_USER_ROLE
14
15
  from appkit_assistant.state.mcp_server_state import MCPServerState
15
16
  from appkit_ui.components.dialogs import (
16
17
  delete_dialog,
@@ -30,6 +31,8 @@ class ValidationState(rx.State):
30
31
  name: str = ""
31
32
  desciption: str = ""
32
33
  prompt: str = ""
34
+ required_role: str = ASSISTANT_USER_ROLE.name
35
+ active: bool = False
33
36
 
34
37
  # Authentication type selection
35
38
  auth_type: str = AUTH_TYPE_API_KEY
@@ -60,6 +63,7 @@ class ValidationState(rx.State):
60
63
  self.name = ""
61
64
  self.desciption = ""
62
65
  self.prompt = ""
66
+ self.required_role = ASSISTANT_USER_ROLE.name
63
67
  self.auth_type = AUTH_TYPE_API_KEY
64
68
  self.oauth_client_id = ""
65
69
  self.oauth_client_secret = ""
@@ -72,6 +76,9 @@ class ValidationState(rx.State):
72
76
  self.name = server.name
73
77
  self.desciption = server.description
74
78
  self.prompt = server.prompt or ""
79
+ self.active = server.active
80
+ # Use sentinel value if no role is set
81
+ self.required_role = server.required_role or ASSISTANT_USER_ROLE.name
75
82
  # Determine auth type from server
76
83
  if server.oauth_client_id:
77
84
  self.auth_type = AUTH_TYPE_OAUTH
@@ -232,6 +239,10 @@ class ValidationState(rx.State):
232
239
  """Set the OAuth scopes."""
233
240
  self.oauth_scopes = value
234
241
 
242
+ def set_required_role(self, role: str) -> None:
243
+ """Set the required role for accessing this MCP server."""
244
+ self.required_role = role
245
+
235
246
  async def check_discovery(self) -> AsyncGenerator[Any, Any]:
236
247
  """Check for OAuth configuration at the given URL."""
237
248
  if not self.url or self.url_error:
@@ -438,6 +449,40 @@ def _oauth_auth_fields(server: MCPServer | None = None) -> rx.Component:
438
449
  )
439
450
 
440
451
 
452
+ def _role_select() -> rx.Component:
453
+ """Role selection dropdown for MCP server access control."""
454
+ return rx.box(
455
+ rx.text("Erforderliche Rolle", size="2", weight="medium"),
456
+ rx.text(
457
+ "Nur Benutzer mit dieser Rolle können den MCP Server verwenden.",
458
+ size="1",
459
+ color="gray",
460
+ margin_bottom="3px",
461
+ ),
462
+ rx.select.root(
463
+ rx.select.trigger(
464
+ placeholder="Rolle auswählen",
465
+ width="100%",
466
+ ),
467
+ rx.select.content(
468
+ rx.foreach(
469
+ MCPServerState.available_roles,
470
+ lambda role: rx.select.item(
471
+ role["label"],
472
+ value=role["value"],
473
+ ),
474
+ ),
475
+ ),
476
+ value=ValidationState.required_role,
477
+ on_change=ValidationState.set_required_role,
478
+ name="required_role",
479
+ width="100%",
480
+ ),
481
+ width="100%",
482
+ margin_bottom="12px",
483
+ )
484
+
485
+
441
486
  def mcp_server_form_fields(server: MCPServer | None = None) -> rx.Component:
442
487
  """Reusable form fields for MCP server add/update dialogs."""
443
488
  is_edit_mode = server is not None
@@ -528,6 +573,7 @@ def mcp_server_form_fields(server: MCPServer | None = None) -> rx.Component:
528
573
  spacing="0",
529
574
  width="100%",
530
575
  ),
576
+ _role_select(),
531
577
  # Authentication type selector and conditional fields
532
578
  _auth_type_selector(),
533
579
  _api_key_auth_fields(server),
@@ -36,6 +36,19 @@ def mcp_server_table_row(server: MCPServer) -> TableRow:
36
36
  "width": "100%",
37
37
  },
38
38
  ),
39
+ rx.table.cell(
40
+ rx.cond(
41
+ server.required_role,
42
+ rx.text(
43
+ MCPServerState.role_labels.get(
44
+ server.required_role, server.required_role
45
+ ),
46
+ size="2",
47
+ ),
48
+ rx.text("-", size="2", color="gray"),
49
+ ),
50
+ white_space="nowrap",
51
+ ),
39
52
  rx.table.cell(
40
53
  rx.switch(
41
54
  checked=server.active,
@@ -60,7 +73,16 @@ def mcp_server_table_row(server: MCPServer) -> TableRow:
60
73
  )
61
74
 
62
75
 
63
- def mcp_servers_table() -> rx.Fragment:
76
+ def mcp_servers_table(
77
+ role_labels: dict[str, str] | None = None,
78
+ available_roles: list[dict[str, str]] | None = None,
79
+ ) -> rx.Fragment:
80
+ # Set default empty values if not provided
81
+ if role_labels is None:
82
+ role_labels = {}
83
+ if available_roles is None:
84
+ available_roles = []
85
+
64
86
  return rx.fragment(
65
87
  rx.flex(
66
88
  add_mcp_server_button(),
@@ -71,8 +93,9 @@ def mcp_servers_table() -> rx.Fragment:
71
93
  rx.table.row(
72
94
  rx.table.column_header_cell("Name", width="20%"),
73
95
  rx.table.column_header_cell(
74
- "Beschreibung", width="calc(80% - 230px)"
96
+ "Beschreibung", width="calc(80% - 350px)"
75
97
  ),
98
+ rx.table.column_header_cell("Rolle", width="120px"),
76
99
  rx.table.column_header_cell("Aktiv", width="90px"),
77
100
  rx.table.column_header_cell("", width="140px"),
78
101
  ),
@@ -81,6 +104,9 @@ def mcp_servers_table() -> rx.Fragment:
81
104
  size="3",
82
105
  width="100%",
83
106
  table_layout="fixed",
84
- on_mount=MCPServerState.load_servers_with_toast,
107
+ on_mount=[
108
+ MCPServerState.set_available_roles(available_roles, role_labels),
109
+ MCPServerState.load_servers_with_toast,
110
+ ],
85
111
  ),
86
112
  )
@@ -8,7 +8,9 @@ from appkit_assistant.backend.schemas import Message, MessageType
8
8
  from appkit_assistant.components import composer
9
9
  from appkit_assistant.components.message import AuthCardComponent, MessageComponent
10
10
  from appkit_assistant.components.threadlist import ThreadList
11
+ from appkit_assistant.roles import ASSISTANT_FILE_UPLOAD_ROLE, ASSISTANT_WEB_SEARCH_ROLE
11
12
  from appkit_assistant.state.thread_state import ThreadState
13
+ from appkit_user.authentication.components.components import requires_role
12
14
 
13
15
  logger = logging.getLogger(__name__)
14
16
 
@@ -120,7 +122,14 @@ class Assistant:
120
122
  composer.choose_model(show=with_model_chooser),
121
123
  ),
122
124
  rx.hstack(
123
- composer.file_upload(show=with_attachments),
125
+ requires_role(
126
+ composer.file_upload(show=with_attachments),
127
+ role=ASSISTANT_FILE_UPLOAD_ROLE.name,
128
+ ),
129
+ requires_role(
130
+ composer.web_search_toggle(),
131
+ role=ASSISTANT_WEB_SEARCH_ROLE.name,
132
+ ),
124
133
  composer.tools(
125
134
  show=with_tools and ThreadState.selected_model_supports_tools
126
135
  ),
@@ -0,0 +1,42 @@
1
+ from typing import Final
2
+
3
+ from appkit_commons.roles import Role
4
+
5
+ ASSISTANT_GROUP: Final[str] = "Assistent"
6
+
7
+ ASSISTANT_USER_ROLE: Final[Role] = Role(
8
+ name="assistant", label="Chat", group=ASSISTANT_GROUP
9
+ )
10
+ ASSISTANT_ADMIN_ROLE: Final[Role] = Role(
11
+ name="assistant-admin", label="Administration", group=ASSISTANT_GROUP
12
+ )
13
+
14
+ ASSISTANT_BASIC_MODELS_ROLE: Final[Role] = Role(
15
+ name="assistant-basic_models", label="Basis-Modelle", group=ASSISTANT_GROUP
16
+ )
17
+ ASSISTANT_ADVANCED_MODELS_ROLE: Final[Role] = Role(
18
+ name="assistant-advanced_models", label="Erweiterte Modelle", group=ASSISTANT_GROUP
19
+ )
20
+ ASSISTANT_PERPLEXITY_MODEL_ROLE = Role(
21
+ id=10001,
22
+ name="perplexity_models",
23
+ label="Perplexity Modelle",
24
+ description="Berechtigung für Perplexity KI-Modelle",
25
+ group=ASSISTANT_GROUP,
26
+ )
27
+ ASSISTANT_WEB_SEARCH_ROLE: Final[Role] = Role(
28
+ name="assistant-web_search", label="Websuche", group=ASSISTANT_GROUP
29
+ )
30
+ ASSISTANT_FILE_UPLOAD_ROLE: Final[Role] = Role(
31
+ name="file_upload", label="Datei-Upload", group=ASSISTANT_GROUP
32
+ )
33
+
34
+ ASSISTANT_ROLES: Final[list[Role]] = [
35
+ ASSISTANT_USER_ROLE,
36
+ ASSISTANT_BASIC_MODELS_ROLE,
37
+ ASSISTANT_ADVANCED_MODELS_ROLE,
38
+ ASSISTANT_PERPLEXITY_MODEL_ROLE,
39
+ ASSISTANT_WEB_SEARCH_ROLE,
40
+ ASSISTANT_FILE_UPLOAD_ROLE,
41
+ ASSISTANT_ADMIN_ROLE,
42
+ ]
@@ -8,7 +8,7 @@ from typing import Any, Final
8
8
  import reflex as rx
9
9
  from pydantic import BaseModel
10
10
 
11
- from appkit_assistant.backend.database.repositories import file_upload_repo
11
+ from appkit_assistant.backend.database.repositories import file_upload_repo, thread_repo
12
12
  from appkit_assistant.backend.services.file_cleanup_service import run_cleanup
13
13
  from appkit_assistant.backend.services.openai_client_service import (
14
14
  get_openai_client_service,
@@ -118,6 +118,8 @@ class CleanupStats(BaseModel):
118
118
  vector_stores_checked: int = 0
119
119
  vector_stores_expired: int = 0
120
120
  vector_stores_deleted: int = 0
121
+ files_found: int = 0
122
+ files_deleted: int = 0
121
123
  threads_updated: int = 0
122
124
  current_vector_store: str | None = None
123
125
  total_vector_stores: int = 0
@@ -257,10 +259,17 @@ class FileManagerState(rx.State):
257
259
 
258
260
  # Delete records from database
259
261
  await file_upload_repo.delete_by_vector_store(session, store_id)
262
+
263
+ # Clear vector_store_id from any threads referencing this store
264
+ threads_updated = await thread_repo.clear_vector_store_id(
265
+ session, store_id
266
+ )
267
+
260
268
  await session.commit()
261
269
  logger.info(
262
- "Deleted %d files for vector store %s",
270
+ "Deleted %d files and cleared %d threads for vector store %s",
263
271
  len(files),
272
+ threads_updated,
264
273
  store_id,
265
274
  )
266
275
 
@@ -300,20 +309,31 @@ class FileManagerState(rx.State):
300
309
  self.loading = True
301
310
  yield
302
311
  try:
303
- # First validate the vector store exists in OpenAI
312
+ # First validate the vector store exists and is not expired in OpenAI
304
313
  openai_service = get_openai_client_service()
305
314
  if openai_service.is_available:
306
315
  client = openai_service.create_client()
307
316
  if client:
308
317
  try:
309
- await client.vector_stores.retrieve(store_id)
318
+ vector_store = await client.vector_stores.retrieve(store_id)
319
+ # Check if the vector store has expired
320
+ if vector_store.status == "expired":
321
+ logger.info(
322
+ "Vector store %s has expired status, cleaning up",
323
+ store_id,
324
+ )
325
+ async for event in self._cleanup_expired_vector_store(
326
+ store_id
327
+ ):
328
+ yield event
329
+ return
310
330
  logger.debug("Vector store %s exists in OpenAI", store_id)
311
331
  except Exception as e:
312
332
  # Vector store not found - clean up
313
333
  error_msg = str(e).lower()
314
334
  if "not found" in error_msg or "404" in error_msg:
315
335
  logger.info(
316
- "Vector store %s expired/deleted, cleaning up",
336
+ "Vector store %s not found, cleaning up",
317
337
  store_id,
318
338
  )
319
339
  async for event in self._cleanup_expired_vector_store(
@@ -338,18 +358,19 @@ class FileManagerState(rx.State):
338
358
  async def _cleanup_expired_vector_store(
339
359
  self, store_id: str
340
360
  ) -> AsyncGenerator[Any, Any]:
341
- """Clean up an expired vector store: delete DB records and OpenAI files."""
361
+ """Clean up an expired vector store: delete OpenAI files, store, and DB."""
342
362
  try:
343
363
  # Get files from DB to know which OpenAI files to delete
344
364
  async with get_asyncdb_session() as session:
345
365
  files = await file_upload_repo.find_by_vector_store(session, store_id)
346
366
  openai_file_ids = [f.openai_file_id for f in files]
347
367
 
348
- # Delete files from OpenAI
368
+ # Delete files and vector store from OpenAI
349
369
  openai_service = get_openai_client_service()
350
370
  if openai_service.is_available:
351
371
  client = openai_service.create_client()
352
372
  if client:
373
+ # Delete files from OpenAI storage
353
374
  for file_id in openai_file_ids:
354
375
  try:
355
376
  await client.files.delete(file_id=file_id)
@@ -363,12 +384,34 @@ class FileManagerState(rx.State):
363
384
  e,
364
385
  )
365
386
 
387
+ # Delete the vector store from OpenAI
388
+ try:
389
+ await client.vector_stores.delete(vector_store_id=store_id)
390
+ logger.info(
391
+ "Deleted expired vector store from OpenAI: %s",
392
+ store_id,
393
+ )
394
+ except Exception as e:
395
+ logger.warning(
396
+ "Failed to delete vector store %s from OpenAI "
397
+ "(may already be deleted): %s",
398
+ store_id,
399
+ e,
400
+ )
401
+
366
402
  # Delete records from database
367
403
  await file_upload_repo.delete_by_vector_store(session, store_id)
404
+
405
+ # Clear vector_store_id from any threads referencing this store
406
+ threads_updated = await thread_repo.clear_vector_store_id(
407
+ session, store_id
408
+ )
409
+
368
410
  await session.commit()
369
411
  logger.info(
370
- "Cleaned up %d files for expired vector store %s",
412
+ "Cleaned up %d files and %d threads for expired vector store %s",
371
413
  len(files),
414
+ threads_updated,
372
415
  store_id,
373
416
  )
374
417
 
@@ -667,6 +710,8 @@ class FileManagerState(rx.State):
667
710
  vector_stores_checked=stats.get("vector_stores_checked", 0),
668
711
  vector_stores_expired=stats.get("vector_stores_expired", 0),
669
712
  vector_stores_deleted=stats.get("vector_stores_deleted", 0),
713
+ files_found=stats.get("files_found", 0),
714
+ files_deleted=stats.get("files_deleted", 0),
670
715
  threads_updated=stats.get("threads_updated", 0),
671
716
  current_vector_store=stats.get("current_vector_store"),
672
717
  total_vector_stores=stats.get("total_vector_stores", 0),
@@ -23,6 +23,17 @@ class MCPServerState(rx.State):
23
23
  servers: list[MCPServer] = []
24
24
  current_server: MCPServer | None = None
25
25
  loading: bool = False
26
+ available_roles: list[dict[str, str]] = []
27
+ role_labels: dict[str, str] = {}
28
+
29
+ def set_available_roles(
30
+ self,
31
+ available_roles: list[dict[str, str]],
32
+ role_labels: dict[str, str],
33
+ ) -> None:
34
+ """Set the available roles for MCP server access control."""
35
+ self.available_roles = available_roles
36
+ self.role_labels = role_labels
26
37
 
27
38
  async def load_servers(self) -> None:
28
39
  """Load all MCP servers from the database.
@@ -82,6 +93,7 @@ class MCPServerState(rx.State):
82
93
  headers=headers,
83
94
  description=form_data.get("description") or None,
84
95
  prompt=form_data.get("prompt") or None,
96
+ required_role=form_data.get("required_role") or None,
85
97
  auth_type=auth_type,
86
98
  oauth_client_id=(
87
99
  form_data.get("oauth_client_id")
@@ -156,6 +168,9 @@ class MCPServerState(rx.State):
156
168
  existing_server.headers = headers
157
169
  existing_server.description = form_data.get("description") or None
158
170
  existing_server.prompt = form_data.get("prompt") or None
171
+ existing_server.required_role = (
172
+ form_data.get("required_role") or None
173
+ )
159
174
  existing_server.auth_type = auth_type
160
175
  existing_server.oauth_client_id = (
161
176
  form_data.get("oauth_client_id")
@@ -69,7 +69,9 @@ class ThreadListState(rx.State):
69
69
  async def initialize(self) -> AsyncGenerator[Any, Any]:
70
70
  """Initialize thread list - load summaries from database."""
71
71
  async with self:
72
- if self._initialized or self.loading:
72
+ if self._initialized:
73
+ self.loading = False
74
+ yield
73
75
  return
74
76
  self.loading = True
75
77
  yield
@@ -331,6 +331,7 @@ class ThreadState(rx.State):
331
331
  ThreadListState
332
332
  )
333
333
  threadlist_state.loading_thread_id = ""
334
+ yield
334
335
  return
335
336
 
336
337
  try:
@@ -343,6 +344,7 @@ class ThreadState(rx.State):
343
344
  ThreadListState
344
345
  )
345
346
  threadlist_state.loading_thread_id = ""
347
+ yield
346
348
  return
347
349
 
348
350
  # Mark all messages as done (loaded from DB)
@@ -371,6 +373,7 @@ class ThreadState(rx.State):
371
373
  threadlist_state.loading_thread_id = ""
372
374
 
373
375
  logger.debug("Loaded thread: %s", thread_id)
376
+ yield
374
377
 
375
378
  except Exception as e:
376
379
  logger.error("Error loading thread %s: %s", thread_id, e)
@@ -379,6 +382,7 @@ class ThreadState(rx.State):
379
382
  ThreadListState
380
383
  )
381
384
  threadlist_state.loading_thread_id = ""
385
+ yield
382
386
 
383
387
  # -------------------------------------------------------------------------
384
388
  # Prompt and model management
@@ -431,11 +435,23 @@ class ThreadState(rx.State):
431
435
 
432
436
  @rx.event
433
437
  async def load_mcp_servers(self) -> None:
434
- """Load available active MCP servers from the database."""
438
+ """Load available active MCP servers filtered by user roles."""
439
+ # Get the user session to check roles
440
+ user_session = await self.get_state(UserSession)
441
+ user = await user_session.authenticated_user
442
+ user_roles: list[str] = user.roles if user else []
443
+
435
444
  async with get_asyncdb_session() as session:
436
445
  servers = await mcp_server_repo.find_all_active_ordered_by_name(session)
437
- # Create detached copies
438
- self.available_mcp_servers = [MCPServer(**s.model_dump()) for s in servers]
446
+ # Filter servers by user roles:
447
+ # - Include if required_role is empty/None (no restriction)
448
+ # - Include if user has the required_role
449
+ filtered_servers = [
450
+ MCPServer(**s.model_dump())
451
+ for s in servers
452
+ if not s.required_role or s.required_role in user_roles
453
+ ]
454
+ self.available_mcp_servers = filtered_servers
439
455
 
440
456
  @rx.event
441
457
  def toogle_tools_modal(self, show: bool) -> None:
@@ -505,7 +521,10 @@ class ThreadState(rx.State):
505
521
  # Validate file count (using state variables from config)
506
522
  if len(files) > self.max_files_per_thread:
507
523
  yield rx.toast.error(
508
- f"Bitte laden Sie maximal {self.max_files_per_thread} Dateien gleichzeitig hoch.",
524
+ (
525
+ f"Bitte laden Sie maximal {self.max_files_per_thread} "
526
+ "Dateien gleichzeitig hoch."
527
+ ),
509
528
  position="top-right",
510
529
  close_button=True,
511
530
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: appkit-assistant
3
- Version: 1.0.3
3
+ Version: 1.0.5
4
4
  Summary: Add your description here
5
5
  Project-URL: Homepage, https://github.com/jenreh/appkit
6
6
  Project-URL: Documentation, https://github.com/jenreh/appkit/tree/main/docs
@@ -20,6 +20,7 @@ Requires-Dist: anthropic>=0.77.0
20
20
  Requires-Dist: appkit-commons
21
21
  Requires-Dist: appkit-mantine
22
22
  Requires-Dist: appkit-ui
23
+ Requires-Dist: appkit-user
23
24
  Requires-Dist: apscheduler>=3.11.2
24
25
  Requires-Dist: google-genai>=1.60.0
25
26
  Requires-Dist: mcp>=1.26.0
@@ -1,58 +1,59 @@
1
1
  appkit_assistant/configuration.py,sha256=9sz75L2ZJA7LXIkZG-uSSI_OTvIRuW9WQEusaMdYBLw,809
2
2
  appkit_assistant/pages.py,sha256=gDvBweUO2WjrhP1RE5AAkjL1_S-givWr3CkkGZKws_E,471
3
+ appkit_assistant/roles.py,sha256=iumhMu4_3uW0cgBmRallHsUVWNEDxEMx1dXqcXAuECo,1307
3
4
  appkit_assistant/backend/model_manager.py,sha256=ebXAjWsWMJBZ-ecbfbiEMNhJRnMl-N0VARCpmXhWYwA,4415
4
5
  appkit_assistant/backend/schemas.py,sha256=jD78oIEs5a3R2dR-q9YBWF_udfnkuPWHuOVC6JOaIbc,3409
5
6
  appkit_assistant/backend/system_prompt_cache.py,sha256=uITevz2x304FBlR9bvDNchl0GkCouT-lxWe-SmEbhn8,5514
6
- appkit_assistant/backend/database/models.py,sha256=gFMJffF47iLOIYfuUR01mZeffjpXAwOZsarYaUe9pko,6460
7
- appkit_assistant/backend/database/repositories.py,sha256=CRUJDEMSPeqIHj-pGerraNvfcE6W9TNW8K1P3Govkkc,8940
7
+ appkit_assistant/backend/database/models.py,sha256=6Uh1Euebf2u39s7kf-gAsyaryubMRG6Qv61ZGA0Uh18,6527
8
+ appkit_assistant/backend/database/repositories.py,sha256=14ccdjhB4SEz-UrvD5jerhGa6SLDPgpr2GGfWU5p_jw,10072
8
9
  appkit_assistant/backend/models/__init__.py,sha256=PbY4qgpDxOmw-80YxjQngKZ52k9J_1KT3UfYxW1_Ut0,696
9
- appkit_assistant/backend/models/anthropic.py,sha256=-GsS_xvlrniPlEXMDZgSz8nvRO8KdTkXiyDD38cb6nk,640
10
- appkit_assistant/backend/models/google.py,sha256=Tfeg-JIbtgn4p5SSlAy38-S8xfOhyLyHMtCKC8ro1r8,591
11
- appkit_assistant/backend/models/openai.py,sha256=VZ9bxOZFM4VVALdx1yfV9kIwD9N1fAAkVIP1cJZ18PY,956
12
- appkit_assistant/backend/models/perplexity.py,sha256=16-pYghTIJdXAghHozVfpv_hHn2T8mVIC-2E3bhC0bQ,1237
10
+ appkit_assistant/backend/models/anthropic.py,sha256=wetJXW-GIgBTvgQXOV7N1MBNfr_ZS7t3kI9Acmk4lYI,924
11
+ appkit_assistant/backend/models/google.py,sha256=1KrQkDBbWT3sOjnDagsXGHScWfCFqdKYbG97w5v-mUQ,872
12
+ appkit_assistant/backend/models/openai.py,sha256=R0mxVNNnTRwJgJKmXm8DMSLkbRJHgBlTh_tee1HZHos,1409
13
+ appkit_assistant/backend/models/perplexity.py,sha256=O2wVHe7lihRrNKRZ_Dy4Qd4AIEtS8iw9NuwSAP_xLAA,1566
13
14
  appkit_assistant/backend/processors/__init__.py,sha256=DqN8cgPNUNCkCzwqeuYIlwj_S9Nh-t4K4sm3KkyJx0M,1053
14
15
  appkit_assistant/backend/processors/claude_responses_processor.py,sha256=PfA9KRVcMxOpDd8AXYHWU_YZYSByymB1-r-SAYTPEHY,27430
15
16
  appkit_assistant/backend/processors/gemini_responses_processor.py,sha256=wuhQNomj8_qnQ7_ZRMk_nAsNCkAx21mkDIziyzKuCuE,25210
16
- appkit_assistant/backend/processors/lorem_ipsum_processor.py,sha256=iZLVCuYPb_lBG-p3Ug2QvuL28kEhtwhWL2Yy_WiYbrU,5201
17
+ appkit_assistant/backend/processors/lorem_ipsum_processor.py,sha256=r87iT_O8tQ2Gb8od4t61twpHU1LpLPFd5qGLg305tXs,5366
17
18
  appkit_assistant/backend/processors/mcp_mixin.py,sha256=Uj60p21GXAeNSmcfwMhUOuaWwGfr2ssAKSDjGwFMwls,9824
18
19
  appkit_assistant/backend/processors/openai_base.py,sha256=hLg1uIlrcfQjsewQhBKg8_1kjnk1-Pc9Y1KVYUe8_BA,2348
19
20
  appkit_assistant/backend/processors/openai_chat_completion_processor.py,sha256=WaYamOWazqjBxpEDhlpSysklAH0WWN86EhTGEOyPQyc,5084
20
- appkit_assistant/backend/processors/openai_responses_processor.py,sha256=jVOnRxEzoPF9c3VJqecH_8-xEb7g9MFqC_qiQ4vcbY0,34078
21
+ appkit_assistant/backend/processors/openai_responses_processor.py,sha256=Dfx9azG6CUGMS5hz8CrWAPpRVJDBVkamV_VuEBFVmG0,34088
21
22
  appkit_assistant/backend/processors/perplexity_processor.py,sha256=U6YahaBdYtGOg6y9cOIGCjEer9aoNlDX7hHAXXQUupU,7092
22
23
  appkit_assistant/backend/processors/processor_base.py,sha256=82WyLsOMjnnsmXao7lWF9HTtH6QcC7UD5gBkr4NSFHA,2352
23
24
  appkit_assistant/backend/processors/streaming_base.py,sha256=fvkkbS2AJO1JEIJ3xKvfHU2dElJryorIVMqtF-EV7e0,5926
24
- appkit_assistant/backend/services/auth_error_detector.py,sha256=eNdpxemtSEVKHkeD_bM52lEyv9G7etA5NNmhFCV4oZk,2692
25
+ appkit_assistant/backend/services/auth_error_detector.py,sha256=ktAYU51B2JozhTaztrJaO1Es5qNIMk5euvbVjF5erhw,2709
25
26
  appkit_assistant/backend/services/chunk_factory.py,sha256=CYeurM8v5VGloVV11lKmNksI4Ilu45XoPY-Qyp3Wl7E,8078
26
27
  appkit_assistant/backend/services/citation_handler.py,sha256=Qfzn5knLRlW7hi7qoSS_R9y1HyfClT3_opbJRtWapSg,9521
27
- appkit_assistant/backend/services/file_cleanup_service.py,sha256=sF7qRYZuSE9vXPlayBbPMg2yAEgtmAHi6zQ-kJYu4HI,11025
28
+ appkit_assistant/backend/services/file_cleanup_service.py,sha256=4Hj1YQ1CgX0AWBQY6QioPpQhZIh_5ZpkcF1baKMg_cw,11713
28
29
  appkit_assistant/backend/services/file_manager.py,sha256=54SYphu6FsxbEYuMx8ohQiSAeY2gGDV1q3S6RZuNku0,3153
29
- appkit_assistant/backend/services/file_upload_service.py,sha256=xQnp70MT2yxsFrq4rw9JSu-_4we2pQAlKIfT69wDnMU,32426
30
- appkit_assistant/backend/services/file_validation.py,sha256=ULsfSa19jO7RZigKGmxqEqssfbvn-BFIAaDUlqKu8gM,3893
30
+ appkit_assistant/backend/services/file_upload_service.py,sha256=TiOUWzhm5XcfOA7sWtuLSFQ4ilc_5nVze9fOuQGquP8,33862
31
+ appkit_assistant/backend/services/file_validation.py,sha256=K-TNpX8o3p0vN0zFSBagA6hinbMt_TpwH3tBM1-PxIk,3910
31
32
  appkit_assistant/backend/services/mcp_auth_service.py,sha256=4qMLmoQWDalQ9kRsylJD2maeTyhklbYOHh2XTQJdh7A,27826
32
33
  appkit_assistant/backend/services/mcp_token_service.py,sha256=sgGU6Zy5bIV4q2J9Yl1ReMWxP8UzAQMiV_4h2VQgj0s,1839
33
34
  appkit_assistant/backend/services/message_converter.py,sha256=23W9mdY6g-OOf-p-uk0246GSvSJqqY2rozmHEtRISsw,9195
34
35
  appkit_assistant/backend/services/openai_client_service.py,sha256=LxaAsXd18Tec5PRPJPHO5iVyAX84F9ovC-RT7EKJqJ4,3819
35
36
  appkit_assistant/backend/services/response_accumulator.py,sha256=qiBVsjg3qaKbMQocv0l4lgSmipG-uiHsLaR2SRPhf-I,16070
36
- appkit_assistant/backend/services/system_prompt_builder.py,sha256=_xIslLgHoxWZEYOqBGzk7iyu0-tQjifh-HXIi9Mh4ZY,2545
37
+ appkit_assistant/backend/services/system_prompt_builder.py,sha256=EruxjdkccnmaBf8xtAWF1-tL9DoCSkVgT9CkMr9xs7k,2562
37
38
  appkit_assistant/backend/services/thread_service.py,sha256=X7YC-SG18N-f-LhztVAuWJu0Z3z43ATJsLT-N9PAYbM,4730
38
39
  appkit_assistant/components/__init__.py,sha256=XRU-I5HHx9Zf4ROlZKkONTEIRkR8JwQzWELS8xfN1g0,1059
39
- appkit_assistant/components/composer.py,sha256=c3OF3bzeCv9bcOq0Y8wjEXL-0gEC8PLuDdDTXi-UfAc,8780
40
+ appkit_assistant/components/composer.py,sha256=l2y01Q3WMLcmMKNqh8kPON4S5u-SYtaNcVvrI_Fuchc,8594
40
41
  appkit_assistant/components/composer_key_handler.py,sha256=KyZYyhxzFR8DH_7F_DrvTFNT6v5kG6JihlGTmCv2wv0,1028
41
- appkit_assistant/components/file_manager.py,sha256=O_7PTZJIYlF0m-bh2ZpBlrgctdnfAIeLblK5s_OD_2w,24750
42
+ appkit_assistant/components/file_manager.py,sha256=LQ8SvvWqCREIDnTXPDnK_bMfuORmVPwi2V841SIFMWY,24919
42
43
  appkit_assistant/components/mcp_oauth.py,sha256=puLwxAhmF25BjnZMdJbKIfC6bFXK2D8LybOX0kD7Ri4,1737
43
- appkit_assistant/components/mcp_server_dialogs.py,sha256=UHsqZL1uED9dpFwggC4maJGrkSJrJqHAeABXK7DmAXY,23153
44
- appkit_assistant/components/mcp_server_table.py,sha256=iRuhiZkh-lldd8ik5SBpOSYlL-Yws5l_SKyAbhENvC0,2686
44
+ appkit_assistant/components/mcp_server_dialogs.py,sha256=SaN2vpobDxcs5L3vBhxh1Tnpxh1vE3XUC5sP6YppALc,24753
45
+ appkit_assistant/components/mcp_server_table.py,sha256=12inxvHN3Wz0ldYAf5m-saFRLYvAOdIy9L1AGu0hL3g,3553
45
46
  appkit_assistant/components/message.py,sha256=risxedRZsaOCaH0ldL14D2HaBA9y0_Jk5nFxSc99F38,23686
46
47
  appkit_assistant/components/system_prompt_editor.py,sha256=REl33zFmcpYRe9kxvFrBRYg40dV4L4FtVC_3ibLsmrU,2940
47
- appkit_assistant/components/thread.py,sha256=N9JcPk2wKycAr9LjZCun2ogy5wfZme6JrQ-Dw-dprys,8462
48
+ appkit_assistant/components/thread.py,sha256=1-i3Nmcqmji8giY5Wh1wdMnyyYYmVzlPzJIbZaGbU-s,8923
48
49
  appkit_assistant/components/threadlist.py,sha256=vw9nHcJ0ICeOvLRj8VewJo2cXVYqIiAMiJRZLSK_V90,4968
49
50
  appkit_assistant/components/tools_modal.py,sha256=T3YVR5srunQFLIgrWXEnxcFXbrdsX38WetJGAC9z9kA,4076
50
- appkit_assistant/state/file_manager_state.py,sha256=G0ZkgzFIqa6H2ctc5uID2aSxt-e78NWwdbyGvby8Q0E,25993
51
+ appkit_assistant/state/file_manager_state.py,sha256=R6KWx23bAiF5-Zo2VRrx7TOVovuQVR7dFQ0n9k7KKCE,28104
51
52
  appkit_assistant/state/mcp_oauth_state.py,sha256=vWiCWolRY-sSUJGPEGHS-rUwlpomGKfpejZck9EImas,7703
52
- appkit_assistant/state/mcp_server_state.py,sha256=TQOhnXEfuA5bWFh-5f5R5LfTZErXFXNZa4t0_vP1bGM,13174
53
+ appkit_assistant/state/mcp_server_state.py,sha256=AHCrEGec33rtfUuKA4ZxLlWFbLGqJcUJSHkhDH0OwTg,13765
53
54
  appkit_assistant/state/system_prompt_state.py,sha256=E2jbBIGfgifvJRZFmEmeooWv5xihUfPbhFe8MzZAS0E,7714
54
- appkit_assistant/state/thread_list_state.py,sha256=LQs7sjBTszORhr_Czm4mZxWVjb2vO7Z5UutMOX6ansM,14082
55
- appkit_assistant/state/thread_state.py,sha256=udIJWCORMuASIOZMnZidoyC6Te-qyDBkZfi7MOIHmKs,42937
56
- appkit_assistant-1.0.3.dist-info/METADATA,sha256=D6TBbLdXZeJ5mo9OnzkkeiuKr-VNSLI3gQAUd3VzBME,9574
57
- appkit_assistant-1.0.3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
58
- appkit_assistant-1.0.3.dist-info/RECORD,,
55
+ appkit_assistant/state/thread_list_state.py,sha256=0K_1COSNk7WycY5mE_o15vjPttXq4J27d6zDn0pDiss,14125
56
+ appkit_assistant/state/thread_state.py,sha256=RLVKzRJs1neQHqkZMPI61TdLPY_LPsL4s-EF4jbWuVI,43596
57
+ appkit_assistant-1.0.5.dist-info/METADATA,sha256=hK91qcbK1sN1EPpB1WKvAyE8KkJlRAvfV2cE7ZUxAWo,9601
58
+ appkit_assistant-1.0.5.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
59
+ appkit_assistant-1.0.5.dist-info/RECORD,,