appkit-assistant 1.0.3__tar.gz → 1.0.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/PKG-INFO +2 -1
  2. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/pyproject.toml +2 -1
  3. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/database/models.py +1 -0
  4. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/database/repositories.py +34 -0
  5. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/models/anthropic.py +8 -0
  6. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/models/google.py +8 -0
  7. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/models/openai.py +12 -0
  8. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/models/perplexity.py +6 -0
  9. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/processors/lorem_ipsum_processor.py +3 -0
  10. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/processors/openai_responses_processor.py +2 -1
  11. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/auth_error_detector.py +1 -1
  12. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/file_cleanup_service.py +42 -29
  13. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/file_upload_service.py +99 -63
  14. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/file_validation.py +1 -1
  15. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/system_prompt_builder.py +1 -1
  16. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/components/composer.py +4 -8
  17. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/components/file_manager.py +2 -0
  18. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/components/mcp_server_dialogs.py +46 -0
  19. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/components/mcp_server_table.py +29 -3
  20. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/components/thread.py +10 -1
  21. appkit_assistant-1.0.4/src/appkit_assistant/roles.py +42 -0
  22. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/state/file_manager_state.py +53 -8
  23. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/state/mcp_server_state.py +15 -0
  24. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/state/thread_state.py +19 -4
  25. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/.gitignore +0 -0
  26. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/README.md +0 -0
  27. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/docs/assistant.png +0 -0
  28. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/model_manager.py +0 -0
  29. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/models/__init__.py +0 -0
  30. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/processors/__init__.py +0 -0
  31. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/processors/claude_responses_processor.py +0 -0
  32. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/processors/gemini_responses_processor.py +0 -0
  33. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/processors/mcp_mixin.py +0 -0
  34. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/processors/openai_base.py +0 -0
  35. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/processors/openai_chat_completion_processor.py +0 -0
  36. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/processors/perplexity_processor.py +0 -0
  37. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/processors/processor_base.py +0 -0
  38. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/processors/streaming_base.py +0 -0
  39. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/schemas.py +0 -0
  40. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/chunk_factory.py +0 -0
  41. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/citation_handler.py +0 -0
  42. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/file_manager.py +0 -0
  43. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/mcp_auth_service.py +0 -0
  44. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/mcp_token_service.py +0 -0
  45. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/message_converter.py +0 -0
  46. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/openai_client_service.py +0 -0
  47. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/response_accumulator.py +0 -0
  48. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/services/thread_service.py +0 -0
  49. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/backend/system_prompt_cache.py +0 -0
  50. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/components/__init__.py +0 -0
  51. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/components/composer_key_handler.py +0 -0
  52. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/components/mcp_oauth.py +0 -0
  53. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/components/message.py +0 -0
  54. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/components/system_prompt_editor.py +0 -0
  55. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/components/threadlist.py +0 -0
  56. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/components/tools_modal.py +0 -0
  57. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/configuration.py +0 -0
  58. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/pages.py +0 -0
  59. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/state/mcp_oauth_state.py +0 -0
  60. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/state/system_prompt_state.py +0 -0
  61. {appkit_assistant-1.0.3 → appkit_assistant-1.0.4}/src/appkit_assistant/state/thread_list_state.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: appkit-assistant
3
- Version: 1.0.3
3
+ Version: 1.0.4
4
4
  Summary: Add your description here
5
5
  Project-URL: Homepage, https://github.com/jenreh/appkit
6
6
  Project-URL: Documentation, https://github.com/jenreh/appkit/tree/main/docs
@@ -20,6 +20,7 @@ Requires-Dist: anthropic>=0.77.0
20
20
  Requires-Dist: appkit-commons
21
21
  Requires-Dist: appkit-mantine
22
22
  Requires-Dist: appkit-ui
23
+ Requires-Dist: appkit-user
23
24
  Requires-Dist: apscheduler>=3.11.2
24
25
  Requires-Dist: google-genai>=1.60.0
25
26
  Requires-Dist: mcp>=1.26.0
@@ -5,6 +5,7 @@ dependencies = [
5
5
  "appkit-commons",
6
6
  "appkit-mantine",
7
7
  "appkit-ui",
8
+ "appkit_user",
8
9
  "google-genai>=1.60.0",
9
10
  "mcp>=1.26.0",
10
11
  "openai>=2.16.0",
@@ -12,7 +13,7 @@ dependencies = [
12
13
  "python-multipart>=0.0.22",
13
14
  ]
14
15
  name = "appkit-assistant"
15
- version = "1.0.3"
16
+ version = "1.0.4"
16
17
  description = "Add your description here"
17
18
  readme = "README.md"
18
19
  authors = [{ name = "Jens Rehpöhler" }]
@@ -67,6 +67,7 @@ class MCPServer(rx.Model, table=True):
67
67
  default=None, sa_column=Column(DateTime(timezone=True), nullable=True)
68
68
  )
69
69
  active: bool = Field(default=True, nullable=False)
70
+ required_role: str | None = Field(default=None, nullable=True)
70
71
 
71
72
 
72
73
  class SystemPrompt(rx.Model, table=True):
@@ -168,6 +168,40 @@ class ThreadRepository(BaseRepository[AssistantThread, AsyncSession]):
168
168
  result = await session.execute(stmt)
169
169
  return list(result.scalars().all())
170
170
 
171
+ async def find_unique_vector_store_ids(self, session: AsyncSession) -> list[str]:
172
+ """Get unique vector store IDs from all threads.
173
+
174
+ Returns:
175
+ List of unique vector store IDs (excluding None/empty).
176
+ """
177
+ stmt = select(AssistantThread.vector_store_id).distinct()
178
+ result = await session.execute(stmt)
179
+ return [row[0] for row in result.all() if row[0]]
180
+
181
+ async def clear_vector_store_id(
182
+ self, session: AsyncSession, vector_store_id: str
183
+ ) -> int:
184
+ """Clear vector_store_id from all threads referencing the given store.
185
+
186
+ Args:
187
+ vector_store_id: The vector store ID to clear from threads.
188
+
189
+ Returns:
190
+ Number of threads updated.
191
+ """
192
+ stmt = select(AssistantThread).where(
193
+ AssistantThread.vector_store_id == vector_store_id
194
+ )
195
+ result = await session.execute(stmt)
196
+ threads = list(result.scalars().all())
197
+
198
+ for thread in threads:
199
+ thread.vector_store_id = None
200
+ session.add(thread)
201
+
202
+ await session.flush()
203
+ return len(threads)
204
+
171
205
 
172
206
  class FileUploadRepository(BaseRepository[AssistantFileUpload, AsyncSession]):
173
207
  """Repository class for file upload database operations."""
@@ -7,6 +7,10 @@ from typing import Final
7
7
  from appkit_assistant.backend.schemas import (
8
8
  AIModel,
9
9
  )
10
+ from appkit_assistant.roles import (
11
+ ASSISTANT_ADVANCED_MODELS_ROLE,
12
+ ASSISTANT_BASIC_MODELS_ROLE,
13
+ )
10
14
 
11
15
  CLAUDE_HAIKU_4_5: Final = AIModel(
12
16
  id="claude-haiku-4.5",
@@ -17,6 +21,8 @@ CLAUDE_HAIKU_4_5: Final = AIModel(
17
21
  supports_attachments=False,
18
22
  supports_tools=True,
19
23
  temperature=1.0,
24
+ keywords=["haiku", "claude"],
25
+ requires_role=ASSISTANT_BASIC_MODELS_ROLE.name,
20
26
  )
21
27
 
22
28
  CLAUDE_SONNET_4_5: Final = AIModel(
@@ -28,4 +34,6 @@ CLAUDE_SONNET_4_5: Final = AIModel(
28
34
  supports_attachments=False,
29
35
  supports_tools=True,
30
36
  temperature=1.0,
37
+ keywords=["sonnet", "claude"],
38
+ requires_role=ASSISTANT_ADVANCED_MODELS_ROLE.name,
31
39
  )
@@ -5,6 +5,10 @@ Gemini model definitions for Google's GenAI API.
5
5
  from typing import Final
6
6
 
7
7
  from appkit_assistant.backend.schemas import AIModel
8
+ from appkit_assistant.roles import (
9
+ ASSISTANT_ADVANCED_MODELS_ROLE,
10
+ ASSISTANT_BASIC_MODELS_ROLE,
11
+ )
8
12
 
9
13
  GEMINI_3_PRO: Final = AIModel(
10
14
  id="gemini-3-pro-preview",
@@ -14,6 +18,8 @@ GEMINI_3_PRO: Final = AIModel(
14
18
  stream=True,
15
19
  supports_attachments=False,
16
20
  supports_tools=True,
21
+ keywords=["pro", "gemini"],
22
+ requires_role=ASSISTANT_ADVANCED_MODELS_ROLE.name,
17
23
  )
18
24
 
19
25
  GEMINI_3_FLASH: Final = AIModel(
@@ -24,4 +30,6 @@ GEMINI_3_FLASH: Final = AIModel(
24
30
  stream=True,
25
31
  supports_attachments=False,
26
32
  supports_tools=True,
33
+ keywords=["flash", "gemini"],
34
+ requires_role=ASSISTANT_BASIC_MODELS_ROLE.name,
27
35
  )
@@ -1,6 +1,10 @@
1
1
  from typing import Final
2
2
 
3
3
  from appkit_assistant.backend.schemas import AIModel
4
+ from appkit_assistant.roles import (
5
+ ASSISTANT_ADVANCED_MODELS_ROLE,
6
+ ASSISTANT_BASIC_MODELS_ROLE,
7
+ )
4
8
 
5
9
  O3: Final = AIModel(
6
10
  id="o3",
@@ -11,6 +15,8 @@ O3: Final = AIModel(
11
15
  stream=True,
12
16
  supports_attachments=False,
13
17
  supports_tools=True,
18
+ keywords=["reasoning", "o3"],
19
+ requires_role=ASSISTANT_ADVANCED_MODELS_ROLE.name,
14
20
  )
15
21
 
16
22
  GPT_5_MINI: Final = AIModel(
@@ -23,6 +29,8 @@ GPT_5_MINI: Final = AIModel(
23
29
  supports_tools=True,
24
30
  supports_search=True,
25
31
  temperature=1,
32
+ keywords=["gpt-5", "mini"],
33
+ requires_role=ASSISTANT_BASIC_MODELS_ROLE.name,
26
34
  )
27
35
 
28
36
  GPT_5_1: Final = AIModel(
@@ -35,6 +43,8 @@ GPT_5_1: Final = AIModel(
35
43
  supports_tools=True,
36
44
  supports_search=True,
37
45
  temperature=1,
46
+ keywords=["gpt-5", "5.1"],
47
+ requires_role=ASSISTANT_ADVANCED_MODELS_ROLE.name,
38
48
  )
39
49
 
40
50
  GPT_5_2: Final = AIModel(
@@ -47,4 +57,6 @@ GPT_5_2: Final = AIModel(
47
57
  supports_tools=True,
48
58
  supports_search=True,
49
59
  temperature=1,
60
+ keywords=["gpt-5", "5.2"],
61
+ requires_role=ASSISTANT_ADVANCED_MODELS_ROLE.name,
50
62
  )
@@ -1,6 +1,7 @@
1
1
  import enum
2
2
 
3
3
  from appkit_assistant.backend.schemas import AIModel
4
+ from appkit_assistant.roles import ASSISTANT_PERPLEXITY_MODEL_ROLE
4
5
 
5
6
 
6
7
  class ContextSize(enum.StrEnum):
@@ -24,6 +25,8 @@ SONAR = PerplexityAIModel(
24
25
  icon="perplexity",
25
26
  model="sonar",
26
27
  stream=True,
28
+ keywords=["sonar", "perplexity"],
29
+ requires_role=ASSISTANT_PERPLEXITY_MODEL_ROLE.name,
27
30
  )
28
31
 
29
32
  SONAR_PRO = PerplexityAIModel(
@@ -33,6 +36,7 @@ SONAR_PRO = PerplexityAIModel(
33
36
  model="sonar-pro",
34
37
  stream=True,
35
38
  keywords=["sonar", "perplexity"],
39
+ requires_role=ASSISTANT_PERPLEXITY_MODEL_ROLE.name,
36
40
  )
37
41
 
38
42
  SONAR_DEEP_RESEARCH = PerplexityAIModel(
@@ -43,6 +47,7 @@ SONAR_DEEP_RESEARCH = PerplexityAIModel(
43
47
  search_context_size=ContextSize.HIGH,
44
48
  stream=True,
45
49
  keywords=["reasoning", "deep", "research", "perplexity"],
50
+ requires_role=ASSISTANT_PERPLEXITY_MODEL_ROLE.name,
46
51
  )
47
52
 
48
53
  SONAR_REASONING = PerplexityAIModel(
@@ -53,4 +58,5 @@ SONAR_REASONING = PerplexityAIModel(
53
58
  search_context_size=ContextSize.HIGH,
54
59
  stream=True,
55
60
  keywords=["reasoning", "perplexity"],
61
+ requires_role=ASSISTANT_PERPLEXITY_MODEL_ROLE.name,
56
62
  )
@@ -18,6 +18,7 @@ from appkit_assistant.backend.schemas import (
18
18
  ChunkType,
19
19
  Message,
20
20
  )
21
+ from appkit_assistant.roles import ASSISTANT_BASIC_MODELS_ROLE
21
22
 
22
23
  logger = logging.getLogger(__name__)
23
24
 
@@ -43,6 +44,8 @@ LOREM_MODELS = {
43
44
  stream=True,
44
45
  supports_attachments=True,
45
46
  supports_tools=True,
47
+ keywords=["lorem", "ipsum", "short"],
48
+ requires_role=ASSISTANT_BASIC_MODELS_ROLE.name,
46
49
  )
47
50
  }
48
51
 
@@ -462,7 +462,8 @@ class OpenAIResponsesProcessor(StreamingProcessorBase, MCPCapabilities):
462
462
  reasoning_session=self.current_reasoning_session,
463
463
  )
464
464
 
465
- # file_search_call / web_search_call done events are handled in _handle_search_events
465
+ # file_search_call / web_search_call done events are handled in
466
+ # _handle_search_events
466
467
  if item.type in ("file_search_call", "web_search_call"):
467
468
  return None
468
469
 
@@ -93,7 +93,7 @@ def get_auth_error_detector() -> AuthErrorDetector:
93
93
  Returns:
94
94
  The AuthErrorDetector instance
95
95
  """
96
- global _auth_error_detector
96
+ global _auth_error_detector # noqa: PLW0603
97
97
  if _auth_error_detector is None:
98
98
  _auth_error_detector = AuthErrorDetector()
99
99
  return _auth_error_detector
@@ -14,11 +14,10 @@ from typing import Any
14
14
  from apscheduler.schedulers.asyncio import AsyncIOScheduler
15
15
  from apscheduler.triggers.interval import IntervalTrigger
16
16
  from openai import AsyncOpenAI, NotFoundError
17
- from sqlalchemy import select
18
17
 
19
- from appkit_assistant.backend.database.models import (
20
- AssistantFileUpload,
21
- AssistantThread,
18
+ from appkit_assistant.backend.database.repositories import (
19
+ file_upload_repo,
20
+ thread_repo,
22
21
  )
23
22
  from appkit_assistant.backend.services.file_upload_service import FileUploadService
24
23
  from appkit_assistant.backend.services.openai_client_service import (
@@ -76,6 +75,8 @@ class FileCleanupService:
76
75
  "vector_stores_checked": 0,
77
76
  "vector_stores_expired": 0,
78
77
  "vector_stores_deleted": 0,
78
+ "files_found": 0,
79
+ "files_deleted": 0,
79
80
  "threads_updated": 0,
80
81
  "current_vector_store": None,
81
82
  "total_vector_stores": 0,
@@ -83,12 +84,19 @@ class FileCleanupService:
83
84
  }
84
85
 
85
86
  try:
86
- # Get all unique vector store IDs from file uploads
87
+ # Get all unique vector store IDs from BOTH file uploads AND threads
87
88
  async with get_asyncdb_session() as session:
88
- result = await session.execute(
89
- select(AssistantFileUpload.vector_store_id).distinct()
89
+ # Vector stores from file uploads
90
+ file_stores = await file_upload_repo.find_unique_vector_stores(session)
91
+ file_store_ids = {store_id for store_id, _ in file_stores if store_id}
92
+
93
+ # Vector stores from threads (may have orphaned references)
94
+ thread_store_ids = set(
95
+ await thread_repo.find_unique_vector_store_ids(session)
90
96
  )
91
- vector_store_ids = [row[0] for row in result.all() if row[0]]
97
+
98
+ # Combine both sets
99
+ vector_store_ids = list(file_store_ids | thread_store_ids)
92
100
 
93
101
  stats["total_vector_stores"] = len(vector_store_ids)
94
102
  stats["status"] = "checking"
@@ -111,9 +119,11 @@ class FileCleanupService:
111
119
  yield stats.copy()
112
120
 
113
121
  # Delegate cleanup to FileUploadService
114
- deleted = await self._file_upload_service.delete_vector_store(vs_id)
115
- if deleted:
122
+ result = await self._file_upload_service.delete_vector_store(vs_id)
123
+ if result["deleted"]:
116
124
  stats["vector_stores_deleted"] += 1
125
+ stats["files_found"] += result["files_found"]
126
+ stats["files_deleted"] += result["files_deleted"]
117
127
  # Clear vector_store_id from associated threads
118
128
  threads_updated = await self._clear_thread_vector_store_ids(vs_id)
119
129
  stats["threads_updated"] += threads_updated
@@ -142,12 +152,24 @@ class FileCleanupService:
142
152
  True if the vector store is expired/deleted, False otherwise.
143
153
  """
144
154
  try:
145
- await self._client.vector_stores.retrieve(vector_store_id=vector_store_id)
155
+ vector_store = await self._client.vector_stores.retrieve(
156
+ vector_store_id=vector_store_id
157
+ )
158
+ # Check if the vector store has expired status
159
+ if vector_store.status == "expired":
160
+ logger.info(
161
+ "Vector store %s has expired status",
162
+ vector_store_id,
163
+ )
164
+ return True
165
+ return False
146
166
  except NotFoundError:
167
+ logger.info(
168
+ "Vector store %s not found (deleted)",
169
+ vector_store_id,
170
+ )
147
171
  return True
148
172
 
149
- return False
150
-
151
173
  async def _clear_thread_vector_store_ids(self, vector_store_id: str) -> int:
152
174
  """Clear vector_store_id from all threads associated with the store.
153
175
 
@@ -157,25 +179,16 @@ class FileCleanupService:
157
179
  Returns:
158
180
  Number of threads updated.
159
181
  """
160
- updated_count = 0
161
182
  async with get_asyncdb_session() as session:
162
- thread_result = await session.execute(
163
- select(AssistantThread).where(
164
- AssistantThread.vector_store_id == vector_store_id
165
- )
183
+ updated_count = await thread_repo.clear_vector_store_id(
184
+ session, vector_store_id
166
185
  )
167
- threads = list(thread_result.scalars().all())
168
-
169
- for thread in threads:
170
- thread.vector_store_id = None
171
- session.add(thread)
172
- updated_count += 1
173
- logger.debug(
174
- "Cleared vector_store_id from thread %s",
175
- thread.thread_id,
176
- )
177
-
178
186
  await session.commit()
187
+ logger.debug(
188
+ "Cleared vector_store_id from %d threads for store %s",
189
+ updated_count,
190
+ vector_store_id,
191
+ )
179
192
 
180
193
  return updated_count
181
194
 
@@ -426,19 +426,13 @@ class FileUploadService:
426
426
  ) from last_error
427
427
 
428
428
  async def _delete_files_from_vector_stores(
429
- self, db_files: list[AssistantFileUpload]
429
+ self, vector_store_files: dict[str, list[str]]
430
430
  ) -> None:
431
- """Delete files FROM their vector stores (Level 1)."""
432
- # Build map of vector_store_id -> file_ids
433
- vector_store_files: dict[str, list[str]] = {}
434
- for db_file in db_files:
435
- if db_file.vector_store_id:
436
- if db_file.vector_store_id not in vector_store_files:
437
- vector_store_files[db_file.vector_store_id] = []
438
- vector_store_files[db_file.vector_store_id].append(
439
- db_file.openai_file_id
440
- )
431
+ """Delete files FROM their vector stores (Level 1).
441
432
 
433
+ Args:
434
+ vector_store_files: Map of vector_store_id -> list of file_ids.
435
+ """
442
436
  # Delete from each vector store
443
437
  for vs_id, vs_file_ids in vector_store_files.items():
444
438
  for file_id in vs_file_ids:
@@ -471,28 +465,36 @@ class FileUploadService:
471
465
 
472
466
  async def _delete_file_db_records(
473
467
  self,
474
- db_files: list[AssistantFileUpload],
475
- deletion_results: dict[str, bool],
468
+ openai_file_ids: list[str],
476
469
  ) -> None:
477
- """Delete database records for successfully deleted files (Level 3)."""
478
- deleted_file_ids = [fid for fid, success in deletion_results.items() if success]
479
- if not deleted_file_ids:
470
+ """Delete database records for files by their OpenAI file IDs (Level 3).
471
+
472
+ Args:
473
+ openai_file_ids: List of OpenAI file IDs to delete from database.
474
+ """
475
+ if not openai_file_ids:
480
476
  return
481
477
 
482
478
  async with get_asyncdb_session() as session:
479
+ result = await session.execute(
480
+ select(AssistantFileUpload).where(
481
+ AssistantFileUpload.openai_file_id.in_(openai_file_ids)
482
+ )
483
+ )
484
+ db_files = result.scalars().all()
485
+
483
486
  for db_file in db_files:
484
- if db_file.openai_file_id in deleted_file_ids:
485
- try:
486
- await session.delete(db_file)
487
- logger.debug(
488
- "Deleted DB record for file: %s", db_file.openai_file_id
489
- )
490
- except Exception as e:
491
- logger.warning(
492
- "Failed to delete DB record for file %s: %s",
493
- db_file.openai_file_id,
494
- e,
495
- )
487
+ try:
488
+ await session.delete(db_file)
489
+ logger.debug(
490
+ "Deleted DB record for file: %s", db_file.openai_file_id
491
+ )
492
+ except Exception as e:
493
+ logger.warning(
494
+ "Failed to delete DB record for file %s: %s",
495
+ db_file.openai_file_id,
496
+ e,
497
+ )
496
498
  await session.commit()
497
499
 
498
500
  async def upload_file(
@@ -524,7 +526,8 @@ class FileUploadService:
524
526
  file_size = path.stat().st_size
525
527
  if file_size > self._max_file_size_bytes:
526
528
  raise FileUploadError(
527
- f"Datei überschreitet die maximale Größe von {self.config.max_file_size_mb}MB"
529
+ "Datei überschreitet die maximale Größe von "
530
+ f"{self.config.max_file_size_mb}MB"
528
531
  )
529
532
 
530
533
  # Validate file count for thread
@@ -769,84 +772,117 @@ class FileUploadService:
769
772
  if not file_ids:
770
773
  return {}
771
774
 
772
- # Get file records from database to know which vector stores they belong to
775
+ # Get file records and extract needed data within session
776
+ vector_store_files: dict[str, list[str]] = {}
773
777
  async with get_asyncdb_session() as session:
774
778
  file_records = await session.execute(
775
779
  select(AssistantFileUpload).where(
776
780
  AssistantFileUpload.openai_file_id.in_(file_ids)
777
781
  )
778
782
  )
779
- db_files = file_records.scalars().all()
783
+ for db_file in file_records.scalars().all():
784
+ if db_file.vector_store_id:
785
+ if db_file.vector_store_id not in vector_store_files:
786
+ vector_store_files[db_file.vector_store_id] = []
787
+ vector_store_files[db_file.vector_store_id].append(
788
+ db_file.openai_file_id
789
+ )
780
790
 
781
791
  # LEVEL 1: Delete files FROM their vector stores
782
- await self._delete_files_from_vector_stores(db_files)
792
+ await self._delete_files_from_vector_stores(vector_store_files)
783
793
 
784
794
  # LEVEL 2: Delete files from OpenAI
785
795
  results = await self._delete_files_from_openai(file_ids)
786
796
 
787
797
  # LEVEL 3: Delete database records (only for successfully deleted files)
788
- await self._delete_file_db_records(db_files, results)
798
+ deleted_file_ids = [fid for fid, success in results.items() if success]
799
+ await self._delete_file_db_records(deleted_file_ids)
789
800
 
790
801
  return results
791
802
 
792
- async def delete_vector_store(self, vector_store_id: str) -> bool:
803
+ async def delete_vector_store(self, vector_store_id: str) -> dict[str, Any]:
793
804
  """Delete a vector store with proper ordering.
794
805
 
795
806
  Order:
796
- 1. Delete all files in the vector store (via delete_files - 3-level deletion)
797
- 2. Delete the vector store container itself
807
+ 1. Get file IDs from database (reliable even for expired stores)
808
+ 2. Try to get file IDs from OpenAI vector store (may fail if expired)
809
+ 3. Delete all files (3-level deletion)
810
+ 4. Delete the vector store container itself
811
+ 5. Clean up database records
798
812
 
799
813
  Args:
800
814
  vector_store_id: The vector store ID to delete.
801
815
 
802
816
  Returns:
803
- True if vector store was successfully deleted, False otherwise.
817
+ Dict with 'deleted' (bool), 'files_found' (int), 'files_deleted' (int).
804
818
  """
819
+ result = {"deleted": False, "files_found": 0, "files_deleted": 0}
820
+
805
821
  if not vector_store_id:
806
- return False
822
+ return result
807
823
 
808
824
  logger.info("Deleting vector store: %s", vector_store_id)
809
825
 
810
- # Step 1: List and delete all files in the vector store
826
+ # Step 1: Get file IDs from database (reliable even for expired stores)
827
+ db_file_ids: set[str] = set()
828
+ async with get_asyncdb_session() as session:
829
+ db_files = await file_upload_repo.find_by_vector_store(
830
+ session, vector_store_id
831
+ )
832
+ db_file_ids = {f.openai_file_id for f in db_files}
833
+
834
+ # Step 2: Try to get file IDs from OpenAI (may fail for expired stores)
835
+ openai_file_ids: set[str] = set()
811
836
  try:
812
837
  vs_files = await self.client.vector_stores.files.list(
813
838
  vector_store_id=vector_store_id
814
839
  )
815
- file_ids = [vs_file.id for vs_file in vs_files.data]
816
-
817
- if file_ids:
818
- logger.info(
819
- "Deleting %d files from vector store %s",
820
- len(file_ids),
821
- vector_store_id,
822
- )
823
- deletion_results = await self.delete_files(file_ids)
824
- successful = sum(1 for success in deletion_results.values() if success)
825
- logger.info(
826
- "Successfully deleted %d/%d files from vector store %s",
827
- successful,
828
- len(file_ids),
829
- vector_store_id,
830
- )
840
+ openai_file_ids = {vs_file.id for vs_file in vs_files.data}
831
841
  except Exception as e:
832
842
  logger.warning(
833
- "Failed to delete files from vector store %s: %s",
843
+ "Could not list files from vector store %s (may be expired): %s",
834
844
  vector_store_id,
835
845
  e,
836
846
  )
837
847
 
838
- # Step 2: Delete the vector store container itself
848
+ # Merge file IDs from both sources
849
+ all_file_ids = list(db_file_ids | openai_file_ids)
850
+ result["files_found"] = len(all_file_ids)
851
+
852
+ # Step 3: Delete all files (3-level deletion)
853
+ if all_file_ids:
854
+ logger.info(
855
+ "Deleting %d files from vector store %s (db: %d, openai: %d)",
856
+ len(all_file_ids),
857
+ vector_store_id,
858
+ len(db_file_ids),
859
+ len(openai_file_ids),
860
+ )
861
+ deletion_results = await self.delete_files(all_file_ids)
862
+ successful = sum(1 for success in deletion_results.values() if success)
863
+ result["files_deleted"] = successful
864
+ logger.info(
865
+ "Successfully deleted %d/%d files from vector store %s",
866
+ successful,
867
+ len(all_file_ids),
868
+ vector_store_id,
869
+ )
870
+
871
+ # Step 4: Delete the vector store container itself
839
872
  try:
840
873
  await self.client.vector_stores.delete(vector_store_id=vector_store_id)
841
874
  logger.info("Deleted vector store: %s", vector_store_id)
842
- return True
875
+ result["deleted"] = True
876
+ return result
843
877
  except Exception as e:
844
878
  logger.warning(
845
- "Failed to delete vector store %s (will auto-expire): %s",
879
+ "Failed to delete vector store %s (may already be expired): %s",
846
880
  vector_store_id,
847
881
  e,
848
882
  )
849
- return False
883
+ # Still return True if files were cleaned up - store may auto-expire
884
+ result["deleted"] = len(all_file_ids) > 0
885
+ return result
850
886
 
851
887
  async def cleanup_deleted_thread(
852
888
  self,
@@ -887,10 +923,10 @@ class FileUploadService:
887
923
  return result
888
924
 
889
925
  # Delete vector store (which handles all file deletion internally)
890
- vs_deleted = await self.delete_vector_store(vector_store_id)
891
- result["vector_store_deleted"] = vs_deleted
926
+ vs_result = await self.delete_vector_store(vector_store_id)
927
+ result["vector_store_deleted"] = vs_result["deleted"]
892
928
 
893
- if not vs_deleted:
929
+ if not vs_result["deleted"]:
894
930
  result["errors"].append(f"Failed to delete vector store {vector_store_id}")
895
931
 
896
932
  logger.info(
@@ -132,7 +132,7 @@ def get_file_validation_service() -> FileValidationService:
132
132
  Returns:
133
133
  The FileValidationService instance
134
134
  """
135
- global _file_validation_service
135
+ global _file_validation_service # noqa: PLW0603
136
136
  if _file_validation_service is None:
137
137
  _file_validation_service = FileValidationService()
138
138
  return _file_validation_service
@@ -83,7 +83,7 @@ def get_system_prompt_builder() -> SystemPromptBuilder:
83
83
  Returns:
84
84
  The SystemPromptBuilder instance
85
85
  """
86
- global _system_prompt_builder
86
+ global _system_prompt_builder # noqa: PLW0603
87
87
  if _system_prompt_builder is None:
88
88
  _system_prompt_builder = SystemPromptBuilder()
89
89
  return _system_prompt_builder