signalpilot-ai-internal 0.3.2__py3-none-any.whl → 0.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. signalpilot_ai_internal/_version.py +1 -1
  2. signalpilot_ai_internal/cache_handlers.py +383 -0
  3. signalpilot_ai_internal/cache_service.py +549 -0
  4. signalpilot_ai_internal/handlers.py +35 -915
  5. signalpilot_ai_internal/snowflake_schema_service.py +671 -0
  6. signalpilot_ai_internal/unified_database_schema_service.py +742 -0
  7. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json +3 -2
  8. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig +2 -1
  9. signalpilot_ai_internal-0.3.4.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.fa432bdb7fb6b1c95ad6.js +1 -0
  10. signalpilot_ai_internal-0.3.4.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.d8bc4aeaf8ddeacb2486.js +1 -0
  11. signalpilot_ai_internal-0.3.4.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/839.694baa59818fdf19fba9.js +1 -0
  12. signalpilot_ai_internal-0.3.4.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.a9d6eb0edda396db6779.js +1 -0
  13. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/third-party-licenses.json +6 -0
  14. {signalpilot_ai_internal-0.3.2.dist-info → signalpilot_ai_internal-0.3.4.dist-info}/METADATA +1 -1
  15. signalpilot_ai_internal-0.3.4.dist-info/RECORD +45 -0
  16. signalpilot_ai_internal-0.3.2.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.8d3d5d0480ba7396f2f5.js +0 -1
  17. signalpilot_ai_internal-0.3.2.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/839.5a362da0c4b891e005b3.js +0 -1
  18. signalpilot_ai_internal-0.3.2.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.57019ad0ad044a0f8ad8.js +0 -1
  19. signalpilot_ai_internal-0.3.2.dist-info/RECORD +0 -40
  20. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/etc/jupyter/jupyter_server_config.d/signalpilot_ai.json +0 -0
  21. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/install.json +0 -0
  22. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/plugin.json +0 -0
  23. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/104.04e170724f369fcbaf19.js +0 -0
  24. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/104.04e170724f369fcbaf19.js.LICENSE.txt +0 -0
  25. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.e2dadf63dc64d7b5f1ee.js +0 -0
  26. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.328403b5545f268b95c6.js +0 -0
  27. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.726e1da31a50868cb297.js +0 -0
  28. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/280.35d8c8b68815702a5238.js +0 -0
  29. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/280.35d8c8b68815702a5238.js.LICENSE.txt +0 -0
  30. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.72484b768a04f89bd3dd.js +0 -0
  31. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.dbec4c2dc12e7b050dcc.js +0 -0
  32. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.37e271d7a80336daabe2.js +0 -0
  33. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.9b4f05a99f5003f82094.js +0 -0
  34. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.73c7a9290b7d35a8b9c1.js +0 -0
  35. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.b58fc0093d080b8ee61c.js +0 -0
  36. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js +0 -0
  37. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js.LICENSE.txt +0 -0
  38. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/606.90aaaae46b73dc3c08fb.js +0 -0
  39. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.9720593ee20b768da3ca.js +0 -0
  40. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.8e6edc9a965bdd578ca7.js +0 -0
  41. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/742.91e7b516c8699eea3373.js +0 -0
  42. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.3aa564fc148b37d1d719.js +0 -0
  43. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/888.34054db17bcf6e87ec95.js +0 -0
  44. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.4.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/style.js +0 -0
  45. {signalpilot_ai_internal-0.3.2.dist-info → signalpilot_ai_internal-0.3.4.dist-info}/WHEEL +0 -0
  46. {signalpilot_ai_internal-0.3.2.dist-info → signalpilot_ai_internal-0.3.4.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,549 @@
1
+ """
2
+ Persistent caching service for SignalPilot AI.
3
+ Handles OS-specific cache directory management and robust file operations.
4
+ """
5
+
6
+ import json
7
+ import os
8
+ import platform
9
+ import shutil
10
+ import tempfile
11
+ import threading
12
+ import time
13
+ import uuid
14
+ from pathlib import Path
15
+ from typing import Any, Dict, Optional
16
+
17
+
18
+ class CacheDirectoryManager:
19
+ """OS-specific cache directory management with fallbacks"""
20
+
21
+ @staticmethod
22
+ def get_cache_directories() -> list[Path]:
23
+ """Get ordered list of cache directories from most to least preferred"""
24
+ system = platform.system().lower()
25
+ directories = []
26
+
27
+ try:
28
+ if system == "windows":
29
+ # Primary: AppData\Local
30
+ appdata_local = os.environ.get('LOCALAPPDATA')
31
+ if appdata_local:
32
+ directories.append(Path(appdata_local) / "SignalPilotAI" / "Cache")
33
+
34
+ # Secondary: AppData\Roaming
35
+ appdata_roaming = os.environ.get('APPDATA')
36
+ if appdata_roaming:
37
+ directories.append(Path(appdata_roaming) / "SignalPilotAI" / "Cache")
38
+
39
+ # Tertiary: User profile
40
+ userprofile = os.environ.get('USERPROFILE')
41
+ if userprofile:
42
+ directories.append(Path(userprofile) / ".signalpilot-cache")
43
+
44
+ elif system == "darwin": # macOS
45
+ # Primary: ~/Library/Caches
46
+ home = Path.home()
47
+ directories.append(home / "Library" / "Caches" / "SignalPilotAI")
48
+
49
+ # Secondary: ~/Library/Application Support
50
+ directories.append(home / "Library" / "Application Support" / "SignalPilotAI")
51
+
52
+ # Tertiary: ~/.signalpilot-cache
53
+ directories.append(home / ".signalpilot-cache")
54
+
55
+ else: # Linux and other Unix-like
56
+ # Primary: XDG_CACHE_HOME or ~/.cache
57
+ cache_home = os.environ.get('XDG_CACHE_HOME')
58
+ if cache_home:
59
+ directories.append(Path(cache_home) / "signalpilot-ai-internal")
60
+ else:
61
+ directories.append(Path.home() / ".cache" / "signalpilot-ai-internal")
62
+
63
+ # Secondary: XDG_DATA_HOME or ~/.local/share
64
+ data_home = os.environ.get('XDG_DATA_HOME')
65
+ if data_home:
66
+ directories.append(Path(data_home) / "signalpilot-ai-internal")
67
+ else:
68
+ directories.append(Path.home() / ".local" / "share" / "signalpilot-ai-internal")
69
+
70
+ # Tertiary: ~/.signalpilot-cache
71
+ directories.append(Path.home() / ".signalpilot-cache")
72
+
73
+ # Final fallback: temp directory
74
+ directories.append(Path(tempfile.gettempdir()) / f"signalpilot-ai-internal-{os.getuid() if hasattr(os, 'getuid') else 'user'}")
75
+
76
+ except Exception as e:
77
+ print(f"Error determining cache directories: {e}")
78
+ # Emergency fallback
79
+ directories.append(Path(tempfile.gettempdir()) / "signalpilot-ai-internal-emergency")
80
+
81
+ return directories
82
+
83
+ @staticmethod
84
+ def find_usable_cache_directory() -> Optional[Path]:
85
+ """Find the first usable cache directory with write permissions"""
86
+ for cache_dir in CacheDirectoryManager.get_cache_directories():
87
+ try:
88
+ # Create directory if it doesn't exist
89
+ cache_dir.mkdir(parents=True, exist_ok=True)
90
+
91
+ # Test write permissions
92
+ test_file = cache_dir / f"test_write_{uuid.uuid4().hex[:8]}.tmp"
93
+ test_file.write_text("test")
94
+ test_file.unlink()
95
+
96
+ print(f"Using cache directory: {cache_dir}")
97
+ return cache_dir
98
+
99
+ except Exception as e:
100
+ print(f"Cannot use cache directory {cache_dir}: {e}")
101
+ continue
102
+
103
+ print("ERROR: No usable cache directory found!")
104
+ return None
105
+
106
+
107
+ class RobustFileOperations:
108
+ """Extremely safe file operations with atomic writes and recovery"""
109
+
110
+ @staticmethod
111
+ def safe_write_json(file_path: Path, data: Any, max_retries: int = 3) -> bool:
112
+ """Safely write JSON data with atomic operations and backups"""
113
+ # print(f"Attempting to write JSON to: {file_path}")
114
+
115
+ if not file_path.parent.exists():
116
+ try:
117
+ print(f"Creating parent directory: {file_path.parent}")
118
+ file_path.parent.mkdir(parents=True, exist_ok=True)
119
+ except Exception as e:
120
+ print(f"Failed to create directory {file_path.parent}: {e}")
121
+ return False
122
+
123
+ # Create backup if file exists and is valid, but only if last backup is older than 1 hour
124
+ backup_path = None
125
+ if file_path.exists():
126
+ try:
127
+ # Verify current file is valid JSON before backing up
128
+ with open(file_path, 'r', encoding='utf-8') as f:
129
+ json.load(f)
130
+
131
+ # Check if we need a new backup (only if last backup is > 1 hour old)
132
+ should_create_backup = RobustFileOperations._should_create_backup(file_path)
133
+
134
+ if should_create_backup:
135
+ backup_path = file_path.with_suffix(f".backup.{int(time.time())}")
136
+ shutil.copy2(file_path, backup_path)
137
+ print(f"Created backup: {backup_path}")
138
+
139
+ # Keep only the most recent backup that's at least 1 hour old
140
+ RobustFileOperations._cleanup_backups(file_path)
141
+
142
+ except Exception as e:
143
+ print(f"Warning: Could not create backup for {file_path}: {e}")
144
+
145
+ # Attempt atomic write with retries
146
+ for attempt in range(max_retries):
147
+ temp_path = file_path.with_suffix(f".tmp.{uuid.uuid4().hex[:8]}")
148
+
149
+ try:
150
+ # Write to temporary file first
151
+ with open(temp_path, 'w', encoding='utf-8') as f:
152
+ json.dump(data, f, indent=2, ensure_ascii=False)
153
+
154
+ # Verify the written data
155
+ with open(temp_path, 'r', encoding='utf-8') as f:
156
+ verification_data = json.load(f)
157
+
158
+ # Atomic move to final location
159
+ if platform.system().lower() == "windows":
160
+ # Windows requires removing target first
161
+ if file_path.exists():
162
+ file_path.unlink()
163
+
164
+ shutil.move(str(temp_path), str(file_path))
165
+
166
+ return True
167
+
168
+ except Exception as e:
169
+ print(f"Write attempt {attempt + 1} failed for {file_path}: {e}")
170
+
171
+ # Clean up temp file
172
+ try:
173
+ if temp_path.exists():
174
+ temp_path.unlink()
175
+ except:
176
+ pass
177
+
178
+ if attempt == max_retries - 1:
179
+ # Restore from backup if all attempts failed
180
+ if backup_path and backup_path.exists():
181
+ try:
182
+ shutil.copy2(backup_path, file_path)
183
+ print(f"Restored {file_path} from backup")
184
+ except Exception as restore_error:
185
+ print(f"Failed to restore backup: {restore_error}")
186
+
187
+ return False
188
+
189
+ # Wait before retry
190
+ time.sleep(0.1 * (attempt + 1))
191
+
192
+ return False
193
+
194
+ @staticmethod
195
+ def safe_read_json(file_path: Path, default: Any = None) -> Any:
196
+ """Safely read JSON data with corruption recovery"""
197
+ if not file_path.exists():
198
+ return default
199
+
200
+ # Try reading main file
201
+ try:
202
+ with open(file_path, 'r', encoding='utf-8') as f:
203
+ return json.load(f)
204
+ except Exception as e:
205
+ print(f"Failed to read {file_path}: {e}")
206
+
207
+ # Try to recover from backup
208
+ backup_files = sorted(
209
+ file_path.parent.glob(f"{file_path.stem}.backup.*"),
210
+ key=lambda x: x.stat().st_mtime,
211
+ reverse=True
212
+ )
213
+
214
+ for backup_path in backup_files:
215
+ try:
216
+ with open(backup_path, 'r', encoding='utf-8') as f:
217
+ data = json.load(f)
218
+
219
+ print(f"Recovered data from backup: {backup_path}")
220
+
221
+ # Try to restore the main file
222
+ try:
223
+ shutil.copy2(backup_path, file_path)
224
+ print(f"Restored {file_path} from {backup_path}")
225
+ except Exception as restore_error:
226
+ print(f"Could not restore main file: {restore_error}")
227
+
228
+ return data
229
+
230
+ except Exception as backup_error:
231
+ print(f"Backup {backup_path} also corrupted: {backup_error}")
232
+ continue
233
+
234
+ print(f"All recovery attempts failed for {file_path}, using default")
235
+ return default
236
+
237
+ @staticmethod
238
+ def _should_create_backup(file_path: Path) -> bool:
239
+ """Check if we should create a new backup (only if last backup is > 1 hour old)"""
240
+ try:
241
+ backup_files = sorted(
242
+ file_path.parent.glob(f"{file_path.stem}.backup.*"),
243
+ key=lambda x: x.stat().st_mtime,
244
+ reverse=True
245
+ )
246
+
247
+ if not backup_files:
248
+ return True # No backups exist, create first one
249
+
250
+ # Check if the most recent backup is older than 1 hour
251
+ most_recent_backup = backup_files[0]
252
+ backup_age = time.time() - most_recent_backup.stat().st_mtime
253
+ return backup_age > 3600 # 3600 seconds = 1 hour
254
+
255
+ except Exception as e:
256
+ print(f"Error checking backup age: {e}")
257
+ return True # If we can't check, err on the side of creating a backup
258
+
259
+ @staticmethod
260
+ def _cleanup_backups(file_path: Path, keep_count: int = 1):
261
+ """Keep only the most recent backup file (limit to 1 backup)"""
262
+ try:
263
+ backup_files = sorted(
264
+ file_path.parent.glob(f"{file_path.stem}.backup.*"),
265
+ key=lambda x: x.stat().st_mtime,
266
+ reverse=True
267
+ )
268
+
269
+ # Keep only the most recent backup, delete all others
270
+ for old_backup in backup_files[keep_count:]:
271
+ try:
272
+ old_backup.unlink()
273
+ print(f"Cleaned up old backup: {old_backup}")
274
+ except Exception as cleanup_error:
275
+ print(f"Failed to cleanup backup {old_backup}: {cleanup_error}")
276
+
277
+ except Exception as e:
278
+ print(f"Error cleaning up backups: {e}")
279
+
280
+
281
+ class PersistentCacheService:
282
+ """Extremely robust persistent caching service for SignalPilot AI"""
283
+
284
+ def __init__(self):
285
+ self.cache_dir = CacheDirectoryManager.find_usable_cache_directory()
286
+ self.chat_histories_file = None
287
+ self.app_values_file = None
288
+ self._lock = threading.RLock()
289
+
290
+ if self.cache_dir:
291
+ print(f"Cache service initialized with directory: {self.cache_dir}")
292
+ self.chat_histories_file = self.cache_dir / "chat_histories.json"
293
+ self.app_values_file = self.cache_dir / "app_values.json"
294
+
295
+ print(f"Chat histories file: {self.chat_histories_file}")
296
+ print(f"App values file: {self.app_values_file}")
297
+
298
+ # Initialize files if they don't exist
299
+ try:
300
+ self._initialize_cache_files()
301
+ print("Cache files initialized successfully")
302
+ except Exception as e:
303
+ print(f"ERROR: Failed to initialize cache files: {e}")
304
+ import traceback
305
+ traceback.print_exc()
306
+ else:
307
+ print("WARNING: Cache service running without persistent storage!")
308
+
309
+ def _initialize_cache_files(self):
310
+ """Initialize cache files with empty structures if they don't exist"""
311
+ try:
312
+ if not self.chat_histories_file.exists():
313
+ print(f"Creating new chat histories file: {self.chat_histories_file}")
314
+ success = RobustFileOperations.safe_write_json(self.chat_histories_file, {})
315
+ if not success:
316
+ print(f"ERROR: Failed to create chat histories file: {self.chat_histories_file}")
317
+ else:
318
+ print(f"Successfully created chat histories file")
319
+ else:
320
+ print(f"Chat histories file already exists: {self.chat_histories_file}")
321
+
322
+ if not self.app_values_file.exists():
323
+ print(f"Creating new app values file: {self.app_values_file}")
324
+ success = RobustFileOperations.safe_write_json(self.app_values_file, {})
325
+ if not success:
326
+ print(f"ERROR: Failed to create app values file: {self.app_values_file}")
327
+ else:
328
+ print(f"Successfully created app values file")
329
+ else:
330
+ print(f"App values file already exists: {self.app_values_file}")
331
+
332
+ except Exception as e:
333
+ print(f"ERROR: Exception in _initialize_cache_files: {e}")
334
+ raise
335
+
336
+ def is_available(self) -> bool:
337
+ """Check if cache service is available"""
338
+ return self.cache_dir is not None and self.cache_dir.exists()
339
+
340
+ def _is_notebook_chat_history_key(self, chat_id: str) -> bool:
341
+ """Check if this is a notebook-specific chat history key"""
342
+ return chat_id.startswith('chat-history-notebook-')
343
+
344
+ def _get_notebook_chat_history_file(self, chat_id: str) -> Path:
345
+ """Get the file path for a notebook-specific chat history"""
346
+ if not self.cache_dir:
347
+ raise ValueError("Cache directory not available")
348
+
349
+ # Extract notebook ID from the chat_id
350
+ notebook_id = chat_id.replace('chat-history-notebook-', '')
351
+ filename = f"notebook_chat_{notebook_id}.json"
352
+ return self.cache_dir / filename
353
+
354
+ # Chat Histories Management
355
+ def get_chat_histories(self) -> Dict[str, Any]:
356
+ """Get all chat histories"""
357
+ with self._lock:
358
+ if not self.chat_histories_file:
359
+ return {}
360
+ return RobustFileOperations.safe_read_json(self.chat_histories_file, {})
361
+
362
+ def get_chat_history(self, chat_id: str) -> Optional[Any]:
363
+ """Get specific chat history"""
364
+ # Handle notebook-specific chat histories
365
+ if self._is_notebook_chat_history_key(chat_id):
366
+ try:
367
+ notebook_file = self._get_notebook_chat_history_file(chat_id)
368
+ if notebook_file.exists():
369
+ print(f"Loading notebook chat history from: {notebook_file}")
370
+ return RobustFileOperations.safe_read_json(notebook_file, None)
371
+ else:
372
+ print(f"Notebook chat history file does not exist: {notebook_file}")
373
+ return None
374
+ except Exception as e:
375
+ print(f"ERROR: Failed to get notebook chat history for {chat_id}: {e}")
376
+ return None
377
+
378
+ # Handle regular chat histories
379
+ histories = self.get_chat_histories()
380
+ return histories.get(chat_id)
381
+
382
+ def set_chat_history(self, chat_id: str, history: Any) -> bool:
383
+ """Set specific chat history"""
384
+ with self._lock:
385
+ # Handle notebook-specific chat histories
386
+ if self._is_notebook_chat_history_key(chat_id):
387
+ try:
388
+ notebook_file = self._get_notebook_chat_history_file(chat_id)
389
+ print(f"Saving notebook chat history to: {notebook_file}")
390
+ success = RobustFileOperations.safe_write_json(notebook_file, history)
391
+ if success:
392
+ print(f"Successfully saved notebook chat history for {chat_id}")
393
+ else:
394
+ print(f"ERROR: Failed to write notebook chat history for {chat_id}")
395
+ return success
396
+ except Exception as e:
397
+ print(f"ERROR: Exception while saving notebook chat history for {chat_id}: {e}")
398
+ import traceback
399
+ traceback.print_exc()
400
+ return False
401
+
402
+ # Handle regular chat histories
403
+ if not self.chat_histories_file:
404
+ print(f"ERROR: Cannot save chat history for {chat_id} - no chat histories file configured")
405
+ return False
406
+
407
+ try:
408
+ print(f"Attempting to save chat history for chat_id: {chat_id}")
409
+ histories = self.get_chat_histories()
410
+ print(f"Current histories count: {len(histories)}")
411
+
412
+ histories[chat_id] = history
413
+ print(f"Updated histories count: {len(histories)}")
414
+
415
+ success = RobustFileOperations.safe_write_json(self.chat_histories_file, histories)
416
+ if success:
417
+ print(f"Successfully saved chat history for {chat_id}")
418
+ else:
419
+ print(f"ERROR: Failed to write chat history file for {chat_id}")
420
+
421
+ return success
422
+
423
+ except Exception as e:
424
+ print(f"ERROR: Exception while saving chat history for {chat_id}: {e}")
425
+ import traceback
426
+ traceback.print_exc()
427
+ return False
428
+
429
+ def delete_chat_history(self, chat_id: str) -> bool:
430
+ """Delete specific chat history"""
431
+ with self._lock:
432
+ # Handle notebook-specific chat histories
433
+ if self._is_notebook_chat_history_key(chat_id):
434
+ try:
435
+ notebook_file = self._get_notebook_chat_history_file(chat_id)
436
+ if notebook_file.exists():
437
+ notebook_file.unlink()
438
+ print(f"Deleted notebook chat history file: {notebook_file}")
439
+ return True
440
+ except Exception as e:
441
+ print(f"ERROR: Failed to delete notebook chat history for {chat_id}: {e}")
442
+ return False
443
+
444
+ # Handle regular chat histories
445
+ if not self.chat_histories_file:
446
+ return False
447
+
448
+ histories = self.get_chat_histories()
449
+ if chat_id in histories:
450
+ del histories[chat_id]
451
+ return RobustFileOperations.safe_write_json(self.chat_histories_file, histories)
452
+ return True
453
+
454
+ def clear_chat_histories(self) -> bool:
455
+ """Clear all chat histories"""
456
+ with self._lock:
457
+ if not self.chat_histories_file:
458
+ return False
459
+ return RobustFileOperations.safe_write_json(self.chat_histories_file, {})
460
+
461
+ # App Values Management
462
+ def get_app_values(self) -> Dict[str, Any]:
463
+ """Get all app values"""
464
+ with self._lock:
465
+ if not self.app_values_file:
466
+ return {}
467
+ return RobustFileOperations.safe_read_json(self.app_values_file, {})
468
+
469
+ def get_app_value(self, key: str, default: Any = None) -> Any:
470
+ """Get specific app value"""
471
+ values = self.get_app_values()
472
+ return values.get(key, default)
473
+
474
+ def set_app_value(self, key: str, value: Any) -> bool:
475
+ """Set specific app value"""
476
+ with self._lock:
477
+ if not self.app_values_file:
478
+ return False
479
+
480
+ values = self.get_app_values()
481
+ values[key] = value
482
+ return RobustFileOperations.safe_write_json(self.app_values_file, values)
483
+
484
+ def delete_app_value(self, key: str) -> bool:
485
+ """Delete specific app value"""
486
+ with self._lock:
487
+ if not self.app_values_file:
488
+ return False
489
+
490
+ values = self.get_app_values()
491
+ if key in values:
492
+ del values[key]
493
+ return RobustFileOperations.safe_write_json(self.app_values_file, values)
494
+ return True
495
+
496
+ def clear_app_values(self) -> bool:
497
+ """Clear all app values"""
498
+ with self._lock:
499
+ if not self.app_values_file:
500
+ return False
501
+ return RobustFileOperations.safe_write_json(self.app_values_file, {})
502
+
503
+ def get_cache_info(self) -> Dict[str, Any]:
504
+ """Get cache service information"""
505
+ info = {
506
+ "available": self.is_available(),
507
+ "cache_directory": str(self.cache_dir) if self.cache_dir else None,
508
+ "platform": platform.system(),
509
+ "chat_histories_size": 0,
510
+ "app_values_size": 0,
511
+ "total_chat_histories": 0,
512
+ "total_app_values": 0,
513
+ "notebook_chat_files": 0,
514
+ "notebook_chat_files_size": 0
515
+ }
516
+
517
+ if self.is_available():
518
+ try:
519
+ if self.chat_histories_file.exists():
520
+ info["chat_histories_size"] = self.chat_histories_file.stat().st_size
521
+ histories = self.get_chat_histories()
522
+ info["total_chat_histories"] = len(histories)
523
+
524
+ if self.app_values_file.exists():
525
+ info["app_values_size"] = self.app_values_file.stat().st_size
526
+ values = self.get_app_values()
527
+ info["total_app_values"] = len(values)
528
+
529
+ # Count notebook chat history files
530
+ notebook_files = list(self.cache_dir.glob("notebook_chat_*.json"))
531
+ info["notebook_chat_files"] = len(notebook_files)
532
+ info["notebook_chat_files_size"] = sum(f.stat().st_size for f in notebook_files if f.exists())
533
+
534
+ except Exception as e:
535
+ info["error"] = str(e)
536
+
537
+ return info
538
+
539
+
540
+ # Global cache service instance
541
+ _cache_service = None
542
+
543
+
544
+ def get_cache_service() -> PersistentCacheService:
545
+ """Get the global cache service instance"""
546
+ global _cache_service
547
+ if _cache_service is None:
548
+ _cache_service = PersistentCacheService()
549
+ return _cache_service