appkit-assistant 0.17.3__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. appkit_assistant/backend/{models.py → database/models.py} +32 -132
  2. appkit_assistant/backend/{repositories.py → database/repositories.py} +93 -1
  3. appkit_assistant/backend/model_manager.py +5 -5
  4. appkit_assistant/backend/models/__init__.py +28 -0
  5. appkit_assistant/backend/models/anthropic.py +31 -0
  6. appkit_assistant/backend/models/google.py +27 -0
  7. appkit_assistant/backend/models/openai.py +50 -0
  8. appkit_assistant/backend/models/perplexity.py +56 -0
  9. appkit_assistant/backend/processors/__init__.py +29 -0
  10. appkit_assistant/backend/processors/claude_responses_processor.py +205 -387
  11. appkit_assistant/backend/processors/gemini_responses_processor.py +231 -299
  12. appkit_assistant/backend/processors/lorem_ipsum_processor.py +6 -4
  13. appkit_assistant/backend/processors/mcp_mixin.py +297 -0
  14. appkit_assistant/backend/processors/openai_base.py +11 -125
  15. appkit_assistant/backend/processors/openai_chat_completion_processor.py +5 -3
  16. appkit_assistant/backend/processors/openai_responses_processor.py +480 -402
  17. appkit_assistant/backend/processors/perplexity_processor.py +156 -79
  18. appkit_assistant/backend/{processor.py → processors/processor_base.py} +7 -2
  19. appkit_assistant/backend/processors/streaming_base.py +188 -0
  20. appkit_assistant/backend/schemas.py +138 -0
  21. appkit_assistant/backend/services/auth_error_detector.py +99 -0
  22. appkit_assistant/backend/services/chunk_factory.py +273 -0
  23. appkit_assistant/backend/services/citation_handler.py +292 -0
  24. appkit_assistant/backend/services/file_cleanup_service.py +316 -0
  25. appkit_assistant/backend/services/file_upload_service.py +903 -0
  26. appkit_assistant/backend/services/file_validation.py +138 -0
  27. appkit_assistant/backend/{mcp_auth_service.py → services/mcp_auth_service.py} +4 -2
  28. appkit_assistant/backend/services/mcp_token_service.py +61 -0
  29. appkit_assistant/backend/services/message_converter.py +289 -0
  30. appkit_assistant/backend/services/openai_client_service.py +120 -0
  31. appkit_assistant/backend/{response_accumulator.py → services/response_accumulator.py} +163 -1
  32. appkit_assistant/backend/services/system_prompt_builder.py +89 -0
  33. appkit_assistant/backend/services/thread_service.py +5 -3
  34. appkit_assistant/backend/system_prompt_cache.py +3 -3
  35. appkit_assistant/components/__init__.py +8 -4
  36. appkit_assistant/components/composer.py +59 -24
  37. appkit_assistant/components/file_manager.py +623 -0
  38. appkit_assistant/components/mcp_server_dialogs.py +12 -20
  39. appkit_assistant/components/mcp_server_table.py +12 -2
  40. appkit_assistant/components/message.py +119 -2
  41. appkit_assistant/components/thread.py +1 -1
  42. appkit_assistant/components/threadlist.py +4 -2
  43. appkit_assistant/components/tools_modal.py +37 -20
  44. appkit_assistant/configuration.py +12 -0
  45. appkit_assistant/state/file_manager_state.py +697 -0
  46. appkit_assistant/state/mcp_oauth_state.py +3 -3
  47. appkit_assistant/state/mcp_server_state.py +47 -2
  48. appkit_assistant/state/system_prompt_state.py +1 -1
  49. appkit_assistant/state/thread_list_state.py +99 -5
  50. appkit_assistant/state/thread_state.py +88 -9
  51. {appkit_assistant-0.17.3.dist-info → appkit_assistant-1.0.0.dist-info}/METADATA +8 -6
  52. appkit_assistant-1.0.0.dist-info/RECORD +58 -0
  53. appkit_assistant/backend/processors/claude_base.py +0 -178
  54. appkit_assistant/backend/processors/gemini_base.py +0 -84
  55. appkit_assistant-0.17.3.dist-info/RECORD +0 -39
  56. /appkit_assistant/backend/{file_manager.py → services/file_manager.py} +0 -0
  57. {appkit_assistant-0.17.3.dist-info → appkit_assistant-1.0.0.dist-info}/WHEEL +0 -0
@@ -0,0 +1,697 @@
1
+ """State management for file manager in assistant administration."""
2
+
3
+ import logging
4
+ from collections.abc import AsyncGenerator
5
+ from datetime import UTC, datetime
6
+ from typing import Any, Final
7
+
8
+ import reflex as rx
9
+ from pydantic import BaseModel
10
+
11
+ from appkit_assistant.backend.database.repositories import file_upload_repo
12
+ from appkit_assistant.backend.services.file_cleanup_service import run_cleanup
13
+ from appkit_assistant.backend.services.openai_client_service import (
14
+ get_openai_client_service,
15
+ )
16
+ from appkit_commons.database.session import get_asyncdb_session
17
+ from appkit_user.authentication.backend.user_repository import user_repo
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+ # Toast messages
22
+ ERROR_LOAD_STORES: Final[str] = "Fehler beim Laden der Vector Stores."
23
+ ERROR_LOAD_FILES: Final[str] = "Fehler beim Laden der Dateien."
24
+ ERROR_FILE_NOT_FOUND: Final[str] = "Datei nicht gefunden."
25
+ ERROR_DELETE_FAILED: Final[str] = "Datei konnte nicht gelöscht werden."
26
+ ERROR_DELETE_GENERAL: Final[str] = "Fehler beim Löschen der Datei."
27
+ ERROR_LOAD_OPENAI_FILES: Final[str] = "Fehler beim Laden der OpenAI-Dateien."
28
+ ERROR_DELETE_OPENAI_FILE: Final[str] = "Fehler beim Löschen der OpenAI-Datei."
29
+ ERROR_OPENAI_NOT_CONFIGURED: Final[str] = "OpenAI API Key ist nicht konfiguriert."
30
+ ERROR_DELETE_VECTOR_STORE: Final[str] = "Fehler beim Löschen des Vector Stores."
31
+ INFO_VECTOR_STORE_EXPIRED: Final[str] = (
32
+ "Vector Store ist abgelaufen und wurde bereinigt."
33
+ )
34
+ INFO_VECTOR_STORE_DELETED: Final[str] = "Vector Store wurde gelöscht."
35
+ INFO_CLEANUP_COMPLETED: Final[str] = "Bereinigung abgeschlossen."
36
+ ERROR_CLEANUP_FAILED: Final[str] = "Fehler bei der Bereinigung."
37
+
38
+ # File size constants
39
+ KB: Final[int] = 1024
40
+ MB: Final[int] = 1024 * 1024
41
+ GB: Final[int] = 1024 * 1024 * 1024
42
+
43
+
44
+ def format_file_size_for_display(size_bytes: int) -> tuple[float, str]:
45
+ """Format file size to appropriate unit and return (formatted_value, suffix).
46
+
47
+ Args:
48
+ size_bytes: File size in bytes.
49
+
50
+ Returns:
51
+ Tuple of (formatted_value, suffix) e.g., (2.5, " MB")
52
+ """
53
+ if size_bytes >= GB:
54
+ return (size_bytes / GB, " GB")
55
+ if size_bytes >= MB:
56
+ return (size_bytes / MB, " MB")
57
+ if size_bytes >= KB:
58
+ return (size_bytes / KB, " KB")
59
+ return (float(size_bytes), " B")
60
+
61
+
62
+ def _format_unix_timestamp(timestamp: int | None) -> str:
63
+ """Format a Unix timestamp to a human-readable date string.
64
+
65
+ Args:
66
+ timestamp: Unix timestamp in seconds, or None.
67
+
68
+ Returns:
69
+ Formatted date string or "-" if timestamp is None/invalid.
70
+ """
71
+ if timestamp is None:
72
+ return "-"
73
+ try:
74
+ # Convert UTC timestamp to local time for display
75
+ dt = datetime.fromtimestamp(timestamp, tz=UTC).astimezone()
76
+ return dt.strftime("%d.%m.%Y %H:%M")
77
+ except (ValueError, OSError, TypeError):
78
+ return "-"
79
+
80
+
81
+ class FileInfo(BaseModel):
82
+ """Model for file information displayed in the table."""
83
+
84
+ id: int
85
+ filename: str
86
+ created_at: str
87
+ user_name: str
88
+ file_size: int
89
+ formatted_size: float
90
+ size_suffix: str
91
+ openai_file_id: str
92
+
93
+
94
+ class OpenAIFileInfo(BaseModel):
95
+ """Model for OpenAI file information."""
96
+
97
+ openai_id: str
98
+ filename: str
99
+ created_at: str
100
+ expires_at: str
101
+ purpose: str
102
+ file_size: int
103
+ formatted_size: float
104
+ size_suffix: str
105
+
106
+
107
+ class VectorStoreInfo(BaseModel):
108
+ """Model for vector store information."""
109
+
110
+ store_id: str
111
+ name: str
112
+
113
+
114
+ class CleanupStats(BaseModel):
115
+ """Model for cleanup progress statistics."""
116
+
117
+ status: str = "idle" # idle, starting, checking, deleting, completed, error
118
+ vector_stores_checked: int = 0
119
+ vector_stores_expired: int = 0
120
+ vector_stores_deleted: int = 0
121
+ threads_updated: int = 0
122
+ current_vector_store: str | None = None
123
+ total_vector_stores: int = 0
124
+ error: str | None = None
125
+
126
+
127
+ class FileManagerState(rx.State):
128
+ """State class for managing uploaded files in vector stores."""
129
+
130
+ vector_stores: list[VectorStoreInfo] = []
131
+ selected_vector_store_id: str = ""
132
+ selected_vector_store_name: str = ""
133
+ files: list[FileInfo] = []
134
+ openai_files: list[OpenAIFileInfo] = []
135
+ loading: bool = False
136
+ deleting_file_id: int | None = None
137
+ deleting_openai_file_id: str | None = None
138
+ deleting_vector_store_id: str | None = None
139
+
140
+ # Cleanup state
141
+ cleanup_modal_open: bool = False
142
+ cleanup_running: bool = False
143
+ cleanup_stats: CleanupStats = CleanupStats()
144
+
145
+ def _get_file_by_id(self, file_id: int) -> FileInfo | None:
146
+ """Get a file by ID from the current files list."""
147
+ return next((f for f in self.files if f.id == file_id), None)
148
+
149
+ def _get_openai_file_by_id(self, openai_id: str) -> OpenAIFileInfo | None:
150
+ """Get an OpenAI file by ID from the current OpenAI files list."""
151
+ return next((f for f in self.openai_files if f.openai_id == openai_id), None)
152
+
153
+ async def on_tab_change(self, tab_value: str) -> AsyncGenerator[Any, Any]:
154
+ """Handle tab change events."""
155
+ if tab_value == "openai_files":
156
+ yield FileManagerState.load_openai_files
157
+ else:
158
+ yield FileManagerState.load_vector_stores
159
+
160
+ async def load_vector_stores(self) -> AsyncGenerator[Any, Any]:
161
+ """Load all unique vector stores from the database."""
162
+ self.loading = True
163
+ yield
164
+ try:
165
+ async with get_asyncdb_session() as session:
166
+ stores = await file_upload_repo.find_unique_vector_stores(session)
167
+ self.vector_stores = [
168
+ VectorStoreInfo(store_id=store_id, name=name)
169
+ for store_id, name in stores
170
+ ]
171
+
172
+ logger.debug("Loaded %d vector stores", len(self.vector_stores))
173
+
174
+ # If no vector stores exist, clear selection and files
175
+ if not self.vector_stores:
176
+ self.selected_vector_store_id = ""
177
+ self.selected_vector_store_name = ""
178
+ self.files = []
179
+ return
180
+
181
+ # Check if currently selected store still exists
182
+ store_ids = {s.store_id for s in self.vector_stores}
183
+ if self.selected_vector_store_id and (
184
+ self.selected_vector_store_id not in store_ids
185
+ ):
186
+ # Selected store no longer exists, clear it
187
+ self.selected_vector_store_id = ""
188
+ self.selected_vector_store_name = ""
189
+ self.files = []
190
+
191
+ except Exception as e:
192
+ logger.error("Failed to load vector stores: %s", e)
193
+ yield rx.toast.error(
194
+ ERROR_LOAD_STORES,
195
+ position="top-right",
196
+ )
197
+ finally:
198
+ self.loading = False
199
+
200
+ async def delete_vector_store(self, store_id: str) -> AsyncGenerator[Any, Any]:
201
+ """Delete a vector store and all its associated files.
202
+
203
+ Deletes the vector store from OpenAI, all associated files from OpenAI,
204
+ and removes all database records.
205
+
206
+ Args:
207
+ store_id: The ID of the vector store to delete.
208
+ """
209
+ self.deleting_vector_store_id = store_id
210
+ yield
211
+ try:
212
+ openai_service = get_openai_client_service()
213
+ if not openai_service.is_available:
214
+ yield rx.toast.error(
215
+ ERROR_OPENAI_NOT_CONFIGURED,
216
+ position="top-right",
217
+ )
218
+ return
219
+
220
+ client = openai_service.create_client()
221
+ if not client:
222
+ yield rx.toast.error(
223
+ ERROR_OPENAI_NOT_CONFIGURED,
224
+ position="top-right",
225
+ )
226
+ return
227
+
228
+ # Get files from DB to know which OpenAI files to delete
229
+ async with get_asyncdb_session() as session:
230
+ files = await file_upload_repo.find_by_vector_store(session, store_id)
231
+ openai_file_ids = [f.openai_file_id for f in files]
232
+
233
+ # Delete vector store from OpenAI
234
+ try:
235
+ await client.vector_stores.delete(vector_store_id=store_id)
236
+ logger.info("Deleted vector store from OpenAI: %s", store_id)
237
+ except Exception as e:
238
+ error_msg = str(e).lower()
239
+ if "not found" not in error_msg and "404" not in error_msg:
240
+ logger.error(
241
+ "Failed to delete vector store %s from OpenAI: %s",
242
+ store_id,
243
+ e,
244
+ )
245
+
246
+ # Delete files from OpenAI
247
+ for file_id in openai_file_ids:
248
+ try:
249
+ await client.files.delete(file_id=file_id)
250
+ logger.debug("Deleted file from OpenAI: %s", file_id)
251
+ except Exception as e:
252
+ logger.warning(
253
+ "Failed to delete file %s from OpenAI: %s",
254
+ file_id,
255
+ e,
256
+ )
257
+
258
+ # Delete records from database
259
+ await file_upload_repo.delete_by_vector_store(session, store_id)
260
+ await session.commit()
261
+ logger.info(
262
+ "Deleted %d files for vector store %s",
263
+ len(files),
264
+ store_id,
265
+ )
266
+
267
+ # Reset selection if this was the selected store
268
+ if self.selected_vector_store_id == store_id:
269
+ self.selected_vector_store_id = ""
270
+ self.selected_vector_store_name = ""
271
+ self.files = []
272
+
273
+ # Remove from local list
274
+ self.vector_stores = [
275
+ s for s in self.vector_stores if s.store_id != store_id
276
+ ]
277
+
278
+ yield rx.toast.success(
279
+ INFO_VECTOR_STORE_DELETED,
280
+ position="top-right",
281
+ )
282
+
283
+ except Exception as e:
284
+ logger.error("Failed to delete vector store %s: %s", store_id, e)
285
+ yield rx.toast.error(
286
+ ERROR_DELETE_VECTOR_STORE,
287
+ position="top-right",
288
+ )
289
+ finally:
290
+ self.deleting_vector_store_id = None
291
+
292
+ async def select_vector_store(
293
+ self, store_id: str, store_name: str = ""
294
+ ) -> AsyncGenerator[Any, Any]:
295
+ """Select a vector store and load its files.
296
+
297
+ Validates that the vector store exists in OpenAI. If expired/deleted,
298
+ cleans up the database records and associated OpenAI files.
299
+ """
300
+ self.loading = True
301
+ yield
302
+ try:
303
+ # First validate the vector store exists in OpenAI
304
+ openai_service = get_openai_client_service()
305
+ if openai_service.is_available:
306
+ client = openai_service.create_client()
307
+ if client:
308
+ try:
309
+ await client.vector_stores.retrieve(store_id)
310
+ logger.debug("Vector store %s exists in OpenAI", store_id)
311
+ except Exception as e:
312
+ # Vector store not found - clean up
313
+ error_msg = str(e).lower()
314
+ if "not found" in error_msg or "404" in error_msg:
315
+ logger.info(
316
+ "Vector store %s expired/deleted, cleaning up",
317
+ store_id,
318
+ )
319
+ async for event in self._cleanup_expired_vector_store(
320
+ store_id
321
+ ):
322
+ yield event
323
+ return
324
+ # Other error - log and continue
325
+ logger.warning(
326
+ "Error checking vector store %s: %s",
327
+ store_id,
328
+ e,
329
+ )
330
+
331
+ self.selected_vector_store_id = store_id
332
+ self.selected_vector_store_name = store_name
333
+ async for event in self.load_files():
334
+ yield event
335
+ finally:
336
+ self.loading = False
337
+
338
+ async def _cleanup_expired_vector_store(
339
+ self, store_id: str
340
+ ) -> AsyncGenerator[Any, Any]:
341
+ """Clean up an expired vector store: delete DB records and OpenAI files."""
342
+ try:
343
+ # Get files from DB to know which OpenAI files to delete
344
+ async with get_asyncdb_session() as session:
345
+ files = await file_upload_repo.find_by_vector_store(session, store_id)
346
+ openai_file_ids = [f.openai_file_id for f in files]
347
+
348
+ # Delete files from OpenAI
349
+ openai_service = get_openai_client_service()
350
+ if openai_service.is_available:
351
+ client = openai_service.create_client()
352
+ if client:
353
+ for file_id in openai_file_ids:
354
+ try:
355
+ await client.files.delete(file_id=file_id)
356
+ logger.debug(
357
+ "Deleted expired file from OpenAI: %s", file_id
358
+ )
359
+ except Exception as e:
360
+ logger.warning(
361
+ "Failed to delete file %s from OpenAI: %s",
362
+ file_id,
363
+ e,
364
+ )
365
+
366
+ # Delete records from database
367
+ await file_upload_repo.delete_by_vector_store(session, store_id)
368
+ await session.commit()
369
+ logger.info(
370
+ "Cleaned up %d files for expired vector store %s",
371
+ len(files),
372
+ store_id,
373
+ )
374
+
375
+ # Reset selection and reload
376
+ self.selected_vector_store_id = ""
377
+ self.selected_vector_store_name = ""
378
+ yield rx.toast.info(
379
+ INFO_VECTOR_STORE_EXPIRED,
380
+ position="top-right",
381
+ )
382
+ yield FileManagerState.load_vector_stores
383
+
384
+ except Exception as e:
385
+ logger.error("Failed to cleanup expired vector store %s: %s", store_id, e)
386
+ yield rx.toast.error(
387
+ "Fehler beim Bereinigen des abgelaufenen Vector Stores.",
388
+ position="top-right",
389
+ )
390
+
391
+ async def load_files(self) -> AsyncGenerator[Any, Any]:
392
+ """Load files for the selected vector store."""
393
+ if not self.selected_vector_store_id:
394
+ self.files = []
395
+ return
396
+
397
+ self.loading = True
398
+ yield
399
+ try:
400
+ # Cache for user names to avoid repeated queries
401
+ user_cache: dict[int, str] = {}
402
+
403
+ async with get_asyncdb_session() as session:
404
+ file_uploads = await file_upload_repo.find_by_vector_store(
405
+ session, self.selected_vector_store_id
406
+ )
407
+
408
+ files_list = []
409
+ for upload in file_uploads:
410
+ # Get user name from cache or database
411
+ if upload.user_id not in user_cache:
412
+ user = await user_repo.find_by_id(session, upload.user_id)
413
+ user_cache[upload.user_id] = (
414
+ user.name or user.email if user else "Unbekannt"
415
+ )
416
+
417
+ # Format file size for display
418
+ formatted_size, size_suffix = format_file_size_for_display(
419
+ upload.file_size
420
+ )
421
+
422
+ files_list.append(
423
+ FileInfo(
424
+ id=upload.id,
425
+ filename=upload.filename,
426
+ created_at=upload.created_at.strftime("%d.%m.%Y %H:%M"),
427
+ user_name=user_cache[upload.user_id],
428
+ file_size=upload.file_size,
429
+ formatted_size=formatted_size,
430
+ size_suffix=size_suffix,
431
+ openai_file_id=upload.openai_file_id,
432
+ )
433
+ )
434
+
435
+ self.files = files_list
436
+
437
+ logger.debug(
438
+ "Loaded %d files for vector store %s",
439
+ len(self.files),
440
+ self.selected_vector_store_id,
441
+ )
442
+
443
+ except Exception as e:
444
+ logger.error("Failed to load files: %s", e)
445
+ yield rx.toast.error(
446
+ ERROR_LOAD_FILES,
447
+ position="top-right",
448
+ )
449
+ finally:
450
+ self.loading = False
451
+
452
+ async def load_openai_files(self) -> AsyncGenerator[Any, Any]:
453
+ """Load files directly from OpenAI API."""
454
+ self.loading = True
455
+ yield
456
+ try:
457
+ openai_service = get_openai_client_service()
458
+ if not openai_service.is_available:
459
+ yield rx.toast.error(
460
+ ERROR_OPENAI_NOT_CONFIGURED,
461
+ position="top-right",
462
+ )
463
+ return
464
+
465
+ client = openai_service.create_client()
466
+ if not client:
467
+ yield rx.toast.error(
468
+ ERROR_OPENAI_NOT_CONFIGURED,
469
+ position="top-right",
470
+ )
471
+ return
472
+
473
+ # Fetch files from OpenAI
474
+ response = await client.files.list()
475
+ openai_files_list = []
476
+
477
+ for file in response.data:
478
+ # Format file size for display (OpenAI uses 'bytes' attribute)
479
+ formatted_size, size_suffix = format_file_size_for_display(file.bytes)
480
+
481
+ # Convert Unix timestamp to formatted date
482
+ created_at = _format_unix_timestamp(file.created_at)
483
+ expires_at = _format_unix_timestamp(getattr(file, "expires_at", None))
484
+
485
+ openai_files_list.append(
486
+ OpenAIFileInfo(
487
+ openai_id=file.id,
488
+ filename=file.filename,
489
+ created_at=created_at,
490
+ expires_at=expires_at,
491
+ purpose=file.purpose or "-",
492
+ file_size=file.bytes,
493
+ formatted_size=formatted_size,
494
+ size_suffix=size_suffix,
495
+ )
496
+ )
497
+
498
+ self.openai_files = openai_files_list
499
+ logger.debug("Loaded %d files from OpenAI", len(self.openai_files))
500
+
501
+ except Exception as e:
502
+ logger.error("Failed to load OpenAI files: %s", e)
503
+ yield rx.toast.error(
504
+ ERROR_LOAD_OPENAI_FILES,
505
+ position="top-right",
506
+ )
507
+ finally:
508
+ self.loading = False
509
+
510
+ async def delete_file(self, file_id: int) -> AsyncGenerator[Any, Any]:
511
+ """Delete a file from OpenAI and the database."""
512
+ self.deleting_file_id = file_id
513
+ yield
514
+
515
+ try:
516
+ # Find the file to get OpenAI file ID
517
+ file_info = self._get_file_by_id(file_id)
518
+ if not file_info:
519
+ yield rx.toast.error(
520
+ ERROR_FILE_NOT_FOUND,
521
+ position="top-right",
522
+ )
523
+ return
524
+
525
+ openai_file_id = file_info.openai_file_id
526
+ filename = file_info.filename
527
+
528
+ # Delete from OpenAI
529
+ try:
530
+ openai_service = get_openai_client_service()
531
+ if openai_service.is_available:
532
+ client = openai_service.create_client()
533
+ if client:
534
+ await client.files.delete(file_id=openai_file_id)
535
+ logger.debug("Deleted OpenAI file: %s", openai_file_id)
536
+ else:
537
+ logger.warning(
538
+ "OpenAI API key not configured, skipping OpenAI deletion"
539
+ )
540
+ except Exception as e:
541
+ logger.warning(
542
+ "Failed to delete file from OpenAI %s: %s",
543
+ openai_file_id,
544
+ e,
545
+ )
546
+ # Continue with DB deletion even if OpenAI deletion fails
547
+
548
+ # Delete from database
549
+ async with get_asyncdb_session() as session:
550
+ deleted = await file_upload_repo.delete_file(session, file_id)
551
+ if deleted:
552
+ await session.commit()
553
+ logger.debug("Deleted file record: %s", file_id)
554
+ else:
555
+ yield rx.toast.error(
556
+ ERROR_DELETE_FAILED,
557
+ position="top-right",
558
+ )
559
+ return
560
+
561
+ yield rx.toast.success(
562
+ f"Datei '{filename}' wurde gelöscht.",
563
+ position="top-right",
564
+ )
565
+
566
+ # Reload files
567
+ yield FileManagerState.load_files
568
+
569
+ # Check if vector store is now empty and reload stores
570
+ if not self.files:
571
+ self.selected_vector_store_id = ""
572
+ yield FileManagerState.load_vector_stores
573
+
574
+ except Exception as e:
575
+ logger.error("Failed to delete file %d: %s", file_id, e)
576
+ yield rx.toast.error(
577
+ ERROR_DELETE_GENERAL,
578
+ position="top-right",
579
+ )
580
+ finally:
581
+ self.deleting_file_id = None
582
+
583
+ async def delete_openai_file(self, openai_id: str) -> AsyncGenerator[Any, Any]:
584
+ """Delete a file directly from OpenAI API."""
585
+ self.deleting_openai_file_id = openai_id
586
+ yield
587
+
588
+ try:
589
+ file_info = self._get_openai_file_by_id(openai_id)
590
+ if not file_info:
591
+ yield rx.toast.error(
592
+ ERROR_FILE_NOT_FOUND,
593
+ position="top-right",
594
+ )
595
+ return
596
+
597
+ filename = file_info.filename
598
+
599
+ openai_service = get_openai_client_service()
600
+ if not openai_service.is_available:
601
+ yield rx.toast.error(
602
+ ERROR_OPENAI_NOT_CONFIGURED,
603
+ position="top-right",
604
+ )
605
+ return
606
+
607
+ client = openai_service.create_client()
608
+ if not client:
609
+ yield rx.toast.error(
610
+ ERROR_OPENAI_NOT_CONFIGURED,
611
+ position="top-right",
612
+ )
613
+ return
614
+
615
+ await client.files.delete(file_id=openai_id)
616
+ logger.debug("Deleted OpenAI file: %s", openai_id)
617
+
618
+ yield rx.toast.success(
619
+ f"Datei '{filename}' wurde von OpenAI gelöscht.",
620
+ position="top-right",
621
+ )
622
+
623
+ # Reload OpenAI files
624
+ yield FileManagerState.load_openai_files
625
+
626
+ except Exception as e:
627
+ logger.error("Failed to delete OpenAI file %s: %s", openai_id, e)
628
+ yield rx.toast.error(
629
+ ERROR_DELETE_OPENAI_FILE,
630
+ position="top-right",
631
+ )
632
+ finally:
633
+ self.deleting_openai_file_id = None
634
+
635
+ def open_cleanup_modal(self) -> None:
636
+ """Open the cleanup modal and reset stats."""
637
+ self.cleanup_stats = CleanupStats()
638
+ self.cleanup_modal_open = True
639
+
640
+ def close_cleanup_modal(self) -> None:
641
+ """Close the cleanup modal."""
642
+ self.cleanup_modal_open = False
643
+
644
+ def set_cleanup_modal_open(self, is_open: bool) -> None:
645
+ """Set the cleanup modal open state.
646
+
647
+ Used by on_open_change handler which receives a boolean.
648
+ """
649
+ self.cleanup_modal_open = is_open
650
+
651
+ @rx.event(background=True)
652
+ async def start_cleanup(self) -> AsyncGenerator[Any, Any]:
653
+ """Start the cleanup process and track progress.
654
+
655
+ This is a background task that iterates through the run_cleanup()
656
+ async generator and updates the cleanup_stats for each progress update.
657
+ """
658
+ async with self:
659
+ self.cleanup_running = True
660
+ self.cleanup_stats = CleanupStats(status="starting")
661
+
662
+ try:
663
+ async for stats in run_cleanup():
664
+ async with self:
665
+ self.cleanup_stats = CleanupStats(
666
+ status=stats.get("status", "checking"),
667
+ vector_stores_checked=stats.get("vector_stores_checked", 0),
668
+ vector_stores_expired=stats.get("vector_stores_expired", 0),
669
+ vector_stores_deleted=stats.get("vector_stores_deleted", 0),
670
+ threads_updated=stats.get("threads_updated", 0),
671
+ current_vector_store=stats.get("current_vector_store"),
672
+ total_vector_stores=stats.get("total_vector_stores", 0),
673
+ error=stats.get("error"),
674
+ )
675
+
676
+ async with self:
677
+ self.cleanup_running = False
678
+ if self.cleanup_stats.status == "completed":
679
+ yield rx.toast.success(
680
+ INFO_CLEANUP_COMPLETED,
681
+ position="top-right",
682
+ )
683
+ # Reload vector stores to reflect changes
684
+ yield FileManagerState.load_vector_stores
685
+
686
+ except Exception as e:
687
+ logger.error("Cleanup failed: %s", e)
688
+ async with self:
689
+ self.cleanup_running = False
690
+ self.cleanup_stats = CleanupStats(
691
+ status="error",
692
+ error=str(e),
693
+ )
694
+ yield rx.toast.error(
695
+ ERROR_CLEANUP_FAILED,
696
+ position="top-right",
697
+ )