signalpilot-ai-internal 0.3.1__py3-none-any.whl → 0.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of signalpilot-ai-internal might be problematic. Click here for more details.

Files changed (46) hide show
  1. signalpilot_ai_internal/_version.py +1 -1
  2. signalpilot_ai_internal/cache_handlers.py +383 -0
  3. signalpilot_ai_internal/cache_service.py +552 -0
  4. signalpilot_ai_internal/handlers.py +35 -915
  5. signalpilot_ai_internal/snowflake_schema_service.py +639 -0
  6. signalpilot_ai_internal/unified_database_schema_service.py +742 -0
  7. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json +3 -2
  8. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig +2 -1
  9. signalpilot_ai_internal-0.3.3.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.fa432bdb7fb6b1c95ad6.js +1 -0
  10. signalpilot_ai_internal-0.3.3.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.0fea0d444fc7ba458d5a.js +1 -0
  11. signalpilot_ai_internal-0.3.3.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/839.c61f5bc4d0da4a0781d6.js +1 -0
  12. signalpilot_ai_internal-0.3.3.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.2e2c6ae0baa591126b0a.js +1 -0
  13. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/third-party-licenses.json +6 -0
  14. {signalpilot_ai_internal-0.3.1.dist-info → signalpilot_ai_internal-0.3.3.dist-info}/METADATA +1 -1
  15. signalpilot_ai_internal-0.3.3.dist-info/RECORD +45 -0
  16. signalpilot_ai_internal-0.3.1.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.45c187b4dc615d9cc073.js +0 -1
  17. signalpilot_ai_internal-0.3.1.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/839.e2bd05ad6dbdb957683f.js +0 -1
  18. signalpilot_ai_internal-0.3.1.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.07bc9bbdead29df455e7.js +0 -1
  19. signalpilot_ai_internal-0.3.1.dist-info/RECORD +0 -40
  20. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/etc/jupyter/jupyter_server_config.d/signalpilot_ai.json +0 -0
  21. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/install.json +0 -0
  22. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/plugin.json +0 -0
  23. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/104.04e170724f369fcbaf19.js +0 -0
  24. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/104.04e170724f369fcbaf19.js.LICENSE.txt +0 -0
  25. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.e2dadf63dc64d7b5f1ee.js +0 -0
  26. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.328403b5545f268b95c6.js +0 -0
  27. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.726e1da31a50868cb297.js +0 -0
  28. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/280.35d8c8b68815702a5238.js +0 -0
  29. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/280.35d8c8b68815702a5238.js.LICENSE.txt +0 -0
  30. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.72484b768a04f89bd3dd.js +0 -0
  31. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.dbec4c2dc12e7b050dcc.js +0 -0
  32. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.37e271d7a80336daabe2.js +0 -0
  33. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.9b4f05a99f5003f82094.js +0 -0
  34. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.73c7a9290b7d35a8b9c1.js +0 -0
  35. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.b58fc0093d080b8ee61c.js +0 -0
  36. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js +0 -0
  37. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js.LICENSE.txt +0 -0
  38. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/606.90aaaae46b73dc3c08fb.js +0 -0
  39. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.9720593ee20b768da3ca.js +0 -0
  40. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.8e6edc9a965bdd578ca7.js +0 -0
  41. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/742.91e7b516c8699eea3373.js +0 -0
  42. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.3aa564fc148b37d1d719.js +0 -0
  43. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/888.34054db17bcf6e87ec95.js +0 -0
  44. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/style.js +0 -0
  45. {signalpilot_ai_internal-0.3.1.dist-info → signalpilot_ai_internal-0.3.3.dist-info}/WHEEL +0 -0
  46. {signalpilot_ai_internal-0.3.1.dist-info → signalpilot_ai_internal-0.3.3.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,552 @@
1
+ """
2
+ Persistent caching service for SignalPilot AI.
3
+ Handles OS-specific cache directory management and robust file operations.
4
+ """
5
+
6
+ import json
7
+ import os
8
+ import platform
9
+ import shutil
10
+ import tempfile
11
+ import threading
12
+ import time
13
+ import uuid
14
+ from pathlib import Path
15
+ from typing import Any, Dict, Optional
16
+
17
+
18
+ class CacheDirectoryManager:
19
+ """OS-specific cache directory management with fallbacks"""
20
+
21
+ @staticmethod
22
+ def get_cache_directories() -> list[Path]:
23
+ """Get ordered list of cache directories from most to least preferred"""
24
+ system = platform.system().lower()
25
+ directories = []
26
+
27
+ try:
28
+ if system == "windows":
29
+ # Primary: AppData\Local
30
+ appdata_local = os.environ.get('LOCALAPPDATA')
31
+ if appdata_local:
32
+ directories.append(Path(appdata_local) / "SignalPilotAI" / "Cache")
33
+
34
+ # Secondary: AppData\Roaming
35
+ appdata_roaming = os.environ.get('APPDATA')
36
+ if appdata_roaming:
37
+ directories.append(Path(appdata_roaming) / "SignalPilotAI" / "Cache")
38
+
39
+ # Tertiary: User profile
40
+ userprofile = os.environ.get('USERPROFILE')
41
+ if userprofile:
42
+ directories.append(Path(userprofile) / ".signalpilot-cache")
43
+
44
+ elif system == "darwin": # macOS
45
+ # Primary: ~/Library/Caches
46
+ home = Path.home()
47
+ directories.append(home / "Library" / "Caches" / "SignalPilotAI")
48
+
49
+ # Secondary: ~/Library/Application Support
50
+ directories.append(home / "Library" / "Application Support" / "SignalPilotAI")
51
+
52
+ # Tertiary: ~/.signalpilot-cache
53
+ directories.append(home / ".signalpilot-cache")
54
+
55
+ else: # Linux and other Unix-like
56
+ # Primary: XDG_CACHE_HOME or ~/.cache
57
+ cache_home = os.environ.get('XDG_CACHE_HOME')
58
+ if cache_home:
59
+ directories.append(Path(cache_home) / "signalpilot-ai-internal")
60
+ else:
61
+ directories.append(Path.home() / ".cache" / "signalpilot-ai-internal")
62
+
63
+ # Secondary: XDG_DATA_HOME or ~/.local/share
64
+ data_home = os.environ.get('XDG_DATA_HOME')
65
+ if data_home:
66
+ directories.append(Path(data_home) / "signalpilot-ai-internal")
67
+ else:
68
+ directories.append(Path.home() / ".local" / "share" / "signalpilot-ai-internal")
69
+
70
+ # Tertiary: ~/.signalpilot-cache
71
+ directories.append(Path.home() / ".signalpilot-cache")
72
+
73
+ # Final fallback: temp directory
74
+ directories.append(Path(tempfile.gettempdir()) / f"signalpilot-ai-internal-{os.getuid() if hasattr(os, 'getuid') else 'user'}")
75
+
76
+ except Exception as e:
77
+ print(f"Error determining cache directories: {e}")
78
+ # Emergency fallback
79
+ directories.append(Path(tempfile.gettempdir()) / "signalpilot-ai-internal-emergency")
80
+
81
+ return directories
82
+
83
+ @staticmethod
84
+ def find_usable_cache_directory() -> Optional[Path]:
85
+ """Find the first usable cache directory with write permissions"""
86
+ for cache_dir in CacheDirectoryManager.get_cache_directories():
87
+ try:
88
+ # Create directory if it doesn't exist
89
+ cache_dir.mkdir(parents=True, exist_ok=True)
90
+
91
+ # Test write permissions
92
+ test_file = cache_dir / f"test_write_{uuid.uuid4().hex[:8]}.tmp"
93
+ test_file.write_text("test")
94
+ test_file.unlink()
95
+
96
+ print(f"Using cache directory: {cache_dir}")
97
+ return cache_dir
98
+
99
+ except Exception as e:
100
+ print(f"Cannot use cache directory {cache_dir}: {e}")
101
+ continue
102
+
103
+ print("ERROR: No usable cache directory found!")
104
+ return None
105
+
106
+
107
+ class RobustFileOperations:
108
+ """Extremely safe file operations with atomic writes and recovery"""
109
+
110
+ @staticmethod
111
+ def safe_write_json(file_path: Path, data: Any, max_retries: int = 3) -> bool:
112
+ """Safely write JSON data with atomic operations and backups"""
113
+ print(f"Attempting to write JSON to: {file_path}")
114
+
115
+ if not file_path.parent.exists():
116
+ try:
117
+ print(f"Creating parent directory: {file_path.parent}")
118
+ file_path.parent.mkdir(parents=True, exist_ok=True)
119
+ except Exception as e:
120
+ print(f"Failed to create directory {file_path.parent}: {e}")
121
+ return False
122
+
123
+ # Create backup if file exists and is valid, but only if last backup is older than 1 hour
124
+ backup_path = None
125
+ if file_path.exists():
126
+ try:
127
+ # Verify current file is valid JSON before backing up
128
+ with open(file_path, 'r', encoding='utf-8') as f:
129
+ json.load(f)
130
+
131
+ # Check if we need a new backup (only if last backup is > 1 hour old)
132
+ should_create_backup = RobustFileOperations._should_create_backup(file_path)
133
+
134
+ if should_create_backup:
135
+ backup_path = file_path.with_suffix(f".backup.{int(time.time())}")
136
+ shutil.copy2(file_path, backup_path)
137
+ print(f"Created backup: {backup_path}")
138
+
139
+ # Keep only the most recent backup that's at least 1 hour old
140
+ RobustFileOperations._cleanup_backups(file_path)
141
+ else:
142
+ print(f"Skipping backup for {file_path} - recent backup exists")
143
+
144
+ except Exception as e:
145
+ print(f"Warning: Could not create backup for {file_path}: {e}")
146
+
147
+ # Attempt atomic write with retries
148
+ for attempt in range(max_retries):
149
+ temp_path = file_path.with_suffix(f".tmp.{uuid.uuid4().hex[:8]}")
150
+
151
+ try:
152
+ # Write to temporary file first
153
+ with open(temp_path, 'w', encoding='utf-8') as f:
154
+ json.dump(data, f, indent=2, ensure_ascii=False)
155
+
156
+ # Verify the written data
157
+ with open(temp_path, 'r', encoding='utf-8') as f:
158
+ verification_data = json.load(f)
159
+
160
+ # Atomic move to final location
161
+ if platform.system().lower() == "windows":
162
+ # Windows requires removing target first
163
+ if file_path.exists():
164
+ file_path.unlink()
165
+
166
+ shutil.move(str(temp_path), str(file_path))
167
+
168
+ print(f"Successfully wrote {file_path}")
169
+ return True
170
+
171
+ except Exception as e:
172
+ print(f"Write attempt {attempt + 1} failed for {file_path}: {e}")
173
+
174
+ # Clean up temp file
175
+ try:
176
+ if temp_path.exists():
177
+ temp_path.unlink()
178
+ except:
179
+ pass
180
+
181
+ if attempt == max_retries - 1:
182
+ # Restore from backup if all attempts failed
183
+ if backup_path and backup_path.exists():
184
+ try:
185
+ shutil.copy2(backup_path, file_path)
186
+ print(f"Restored {file_path} from backup")
187
+ except Exception as restore_error:
188
+ print(f"Failed to restore backup: {restore_error}")
189
+
190
+ return False
191
+
192
+ # Wait before retry
193
+ time.sleep(0.1 * (attempt + 1))
194
+
195
+ return False
196
+
197
+ @staticmethod
198
+ def safe_read_json(file_path: Path, default: Any = None) -> Any:
199
+ """Safely read JSON data with corruption recovery"""
200
+ if not file_path.exists():
201
+ return default
202
+
203
+ # Try reading main file
204
+ try:
205
+ with open(file_path, 'r', encoding='utf-8') as f:
206
+ return json.load(f)
207
+ except Exception as e:
208
+ print(f"Failed to read {file_path}: {e}")
209
+
210
+ # Try to recover from backup
211
+ backup_files = sorted(
212
+ file_path.parent.glob(f"{file_path.stem}.backup.*"),
213
+ key=lambda x: x.stat().st_mtime,
214
+ reverse=True
215
+ )
216
+
217
+ for backup_path in backup_files:
218
+ try:
219
+ with open(backup_path, 'r', encoding='utf-8') as f:
220
+ data = json.load(f)
221
+
222
+ print(f"Recovered data from backup: {backup_path}")
223
+
224
+ # Try to restore the main file
225
+ try:
226
+ shutil.copy2(backup_path, file_path)
227
+ print(f"Restored {file_path} from {backup_path}")
228
+ except Exception as restore_error:
229
+ print(f"Could not restore main file: {restore_error}")
230
+
231
+ return data
232
+
233
+ except Exception as backup_error:
234
+ print(f"Backup {backup_path} also corrupted: {backup_error}")
235
+ continue
236
+
237
+ print(f"All recovery attempts failed for {file_path}, using default")
238
+ return default
239
+
240
+ @staticmethod
241
+ def _should_create_backup(file_path: Path) -> bool:
242
+ """Check if we should create a new backup (only if last backup is > 1 hour old)"""
243
+ try:
244
+ backup_files = sorted(
245
+ file_path.parent.glob(f"{file_path.stem}.backup.*"),
246
+ key=lambda x: x.stat().st_mtime,
247
+ reverse=True
248
+ )
249
+
250
+ if not backup_files:
251
+ return True # No backups exist, create first one
252
+
253
+ # Check if the most recent backup is older than 1 hour
254
+ most_recent_backup = backup_files[0]
255
+ backup_age = time.time() - most_recent_backup.stat().st_mtime
256
+ return backup_age > 3600 # 3600 seconds = 1 hour
257
+
258
+ except Exception as e:
259
+ print(f"Error checking backup age: {e}")
260
+ return True # If we can't check, err on the side of creating a backup
261
+
262
+ @staticmethod
263
+ def _cleanup_backups(file_path: Path, keep_count: int = 1):
264
+ """Keep only the most recent backup file (limit to 1 backup)"""
265
+ try:
266
+ backup_files = sorted(
267
+ file_path.parent.glob(f"{file_path.stem}.backup.*"),
268
+ key=lambda x: x.stat().st_mtime,
269
+ reverse=True
270
+ )
271
+
272
+ # Keep only the most recent backup, delete all others
273
+ for old_backup in backup_files[keep_count:]:
274
+ try:
275
+ old_backup.unlink()
276
+ print(f"Cleaned up old backup: {old_backup}")
277
+ except Exception as cleanup_error:
278
+ print(f"Failed to cleanup backup {old_backup}: {cleanup_error}")
279
+
280
+ except Exception as e:
281
+ print(f"Error cleaning up backups: {e}")
282
+
283
+
284
+ class PersistentCacheService:
285
+ """Extremely robust persistent caching service for SignalPilot AI"""
286
+
287
+ def __init__(self):
288
+ self.cache_dir = CacheDirectoryManager.find_usable_cache_directory()
289
+ self.chat_histories_file = None
290
+ self.app_values_file = None
291
+ self._lock = threading.RLock()
292
+
293
+ if self.cache_dir:
294
+ print(f"Cache service initialized with directory: {self.cache_dir}")
295
+ self.chat_histories_file = self.cache_dir / "chat_histories.json"
296
+ self.app_values_file = self.cache_dir / "app_values.json"
297
+
298
+ print(f"Chat histories file: {self.chat_histories_file}")
299
+ print(f"App values file: {self.app_values_file}")
300
+
301
+ # Initialize files if they don't exist
302
+ try:
303
+ self._initialize_cache_files()
304
+ print("Cache files initialized successfully")
305
+ except Exception as e:
306
+ print(f"ERROR: Failed to initialize cache files: {e}")
307
+ import traceback
308
+ traceback.print_exc()
309
+ else:
310
+ print("WARNING: Cache service running without persistent storage!")
311
+
312
+ def _initialize_cache_files(self):
313
+ """Initialize cache files with empty structures if they don't exist"""
314
+ try:
315
+ if not self.chat_histories_file.exists():
316
+ print(f"Creating new chat histories file: {self.chat_histories_file}")
317
+ success = RobustFileOperations.safe_write_json(self.chat_histories_file, {})
318
+ if not success:
319
+ print(f"ERROR: Failed to create chat histories file: {self.chat_histories_file}")
320
+ else:
321
+ print(f"Successfully created chat histories file")
322
+ else:
323
+ print(f"Chat histories file already exists: {self.chat_histories_file}")
324
+
325
+ if not self.app_values_file.exists():
326
+ print(f"Creating new app values file: {self.app_values_file}")
327
+ success = RobustFileOperations.safe_write_json(self.app_values_file, {})
328
+ if not success:
329
+ print(f"ERROR: Failed to create app values file: {self.app_values_file}")
330
+ else:
331
+ print(f"Successfully created app values file")
332
+ else:
333
+ print(f"App values file already exists: {self.app_values_file}")
334
+
335
+ except Exception as e:
336
+ print(f"ERROR: Exception in _initialize_cache_files: {e}")
337
+ raise
338
+
339
+ def is_available(self) -> bool:
340
+ """Check if cache service is available"""
341
+ return self.cache_dir is not None and self.cache_dir.exists()
342
+
343
+ def _is_notebook_chat_history_key(self, chat_id: str) -> bool:
344
+ """Check if this is a notebook-specific chat history key"""
345
+ return chat_id.startswith('chat-history-notebook-')
346
+
347
+ def _get_notebook_chat_history_file(self, chat_id: str) -> Path:
348
+ """Get the file path for a notebook-specific chat history"""
349
+ if not self.cache_dir:
350
+ raise ValueError("Cache directory not available")
351
+
352
+ # Extract notebook ID from the chat_id
353
+ notebook_id = chat_id.replace('chat-history-notebook-', '')
354
+ filename = f"notebook_chat_{notebook_id}.json"
355
+ return self.cache_dir / filename
356
+
357
+ # Chat Histories Management
358
+ def get_chat_histories(self) -> Dict[str, Any]:
359
+ """Get all chat histories"""
360
+ with self._lock:
361
+ if not self.chat_histories_file:
362
+ return {}
363
+ return RobustFileOperations.safe_read_json(self.chat_histories_file, {})
364
+
365
+ def get_chat_history(self, chat_id: str) -> Optional[Any]:
366
+ """Get specific chat history"""
367
+ # Handle notebook-specific chat histories
368
+ if self._is_notebook_chat_history_key(chat_id):
369
+ try:
370
+ notebook_file = self._get_notebook_chat_history_file(chat_id)
371
+ if notebook_file.exists():
372
+ print(f"Loading notebook chat history from: {notebook_file}")
373
+ return RobustFileOperations.safe_read_json(notebook_file, None)
374
+ else:
375
+ print(f"Notebook chat history file does not exist: {notebook_file}")
376
+ return None
377
+ except Exception as e:
378
+ print(f"ERROR: Failed to get notebook chat history for {chat_id}: {e}")
379
+ return None
380
+
381
+ # Handle regular chat histories
382
+ histories = self.get_chat_histories()
383
+ return histories.get(chat_id)
384
+
385
+ def set_chat_history(self, chat_id: str, history: Any) -> bool:
386
+ """Set specific chat history"""
387
+ with self._lock:
388
+ # Handle notebook-specific chat histories
389
+ if self._is_notebook_chat_history_key(chat_id):
390
+ try:
391
+ notebook_file = self._get_notebook_chat_history_file(chat_id)
392
+ print(f"Saving notebook chat history to: {notebook_file}")
393
+ success = RobustFileOperations.safe_write_json(notebook_file, history)
394
+ if success:
395
+ print(f"Successfully saved notebook chat history for {chat_id}")
396
+ else:
397
+ print(f"ERROR: Failed to write notebook chat history for {chat_id}")
398
+ return success
399
+ except Exception as e:
400
+ print(f"ERROR: Exception while saving notebook chat history for {chat_id}: {e}")
401
+ import traceback
402
+ traceback.print_exc()
403
+ return False
404
+
405
+ # Handle regular chat histories
406
+ if not self.chat_histories_file:
407
+ print(f"ERROR: Cannot save chat history for {chat_id} - no chat histories file configured")
408
+ return False
409
+
410
+ try:
411
+ print(f"Attempting to save chat history for chat_id: {chat_id}")
412
+ histories = self.get_chat_histories()
413
+ print(f"Current histories count: {len(histories)}")
414
+
415
+ histories[chat_id] = history
416
+ print(f"Updated histories count: {len(histories)}")
417
+
418
+ success = RobustFileOperations.safe_write_json(self.chat_histories_file, histories)
419
+ if success:
420
+ print(f"Successfully saved chat history for {chat_id}")
421
+ else:
422
+ print(f"ERROR: Failed to write chat history file for {chat_id}")
423
+
424
+ return success
425
+
426
+ except Exception as e:
427
+ print(f"ERROR: Exception while saving chat history for {chat_id}: {e}")
428
+ import traceback
429
+ traceback.print_exc()
430
+ return False
431
+
432
+ def delete_chat_history(self, chat_id: str) -> bool:
433
+ """Delete specific chat history"""
434
+ with self._lock:
435
+ # Handle notebook-specific chat histories
436
+ if self._is_notebook_chat_history_key(chat_id):
437
+ try:
438
+ notebook_file = self._get_notebook_chat_history_file(chat_id)
439
+ if notebook_file.exists():
440
+ notebook_file.unlink()
441
+ print(f"Deleted notebook chat history file: {notebook_file}")
442
+ return True
443
+ except Exception as e:
444
+ print(f"ERROR: Failed to delete notebook chat history for {chat_id}: {e}")
445
+ return False
446
+
447
+ # Handle regular chat histories
448
+ if not self.chat_histories_file:
449
+ return False
450
+
451
+ histories = self.get_chat_histories()
452
+ if chat_id in histories:
453
+ del histories[chat_id]
454
+ return RobustFileOperations.safe_write_json(self.chat_histories_file, histories)
455
+ return True
456
+
457
+ def clear_chat_histories(self) -> bool:
458
+ """Clear all chat histories"""
459
+ with self._lock:
460
+ if not self.chat_histories_file:
461
+ return False
462
+ return RobustFileOperations.safe_write_json(self.chat_histories_file, {})
463
+
464
+ # App Values Management
465
+ def get_app_values(self) -> Dict[str, Any]:
466
+ """Get all app values"""
467
+ with self._lock:
468
+ if not self.app_values_file:
469
+ return {}
470
+ return RobustFileOperations.safe_read_json(self.app_values_file, {})
471
+
472
+ def get_app_value(self, key: str, default: Any = None) -> Any:
473
+ """Get specific app value"""
474
+ values = self.get_app_values()
475
+ return values.get(key, default)
476
+
477
+ def set_app_value(self, key: str, value: Any) -> bool:
478
+ """Set specific app value"""
479
+ with self._lock:
480
+ if not self.app_values_file:
481
+ return False
482
+
483
+ values = self.get_app_values()
484
+ values[key] = value
485
+ return RobustFileOperations.safe_write_json(self.app_values_file, values)
486
+
487
+ def delete_app_value(self, key: str) -> bool:
488
+ """Delete specific app value"""
489
+ with self._lock:
490
+ if not self.app_values_file:
491
+ return False
492
+
493
+ values = self.get_app_values()
494
+ if key in values:
495
+ del values[key]
496
+ return RobustFileOperations.safe_write_json(self.app_values_file, values)
497
+ return True
498
+
499
+ def clear_app_values(self) -> bool:
500
+ """Clear all app values"""
501
+ with self._lock:
502
+ if not self.app_values_file:
503
+ return False
504
+ return RobustFileOperations.safe_write_json(self.app_values_file, {})
505
+
506
+ def get_cache_info(self) -> Dict[str, Any]:
507
+ """Get cache service information"""
508
+ info = {
509
+ "available": self.is_available(),
510
+ "cache_directory": str(self.cache_dir) if self.cache_dir else None,
511
+ "platform": platform.system(),
512
+ "chat_histories_size": 0,
513
+ "app_values_size": 0,
514
+ "total_chat_histories": 0,
515
+ "total_app_values": 0,
516
+ "notebook_chat_files": 0,
517
+ "notebook_chat_files_size": 0
518
+ }
519
+
520
+ if self.is_available():
521
+ try:
522
+ if self.chat_histories_file.exists():
523
+ info["chat_histories_size"] = self.chat_histories_file.stat().st_size
524
+ histories = self.get_chat_histories()
525
+ info["total_chat_histories"] = len(histories)
526
+
527
+ if self.app_values_file.exists():
528
+ info["app_values_size"] = self.app_values_file.stat().st_size
529
+ values = self.get_app_values()
530
+ info["total_app_values"] = len(values)
531
+
532
+ # Count notebook chat history files
533
+ notebook_files = list(self.cache_dir.glob("notebook_chat_*.json"))
534
+ info["notebook_chat_files"] = len(notebook_files)
535
+ info["notebook_chat_files_size"] = sum(f.stat().st_size for f in notebook_files if f.exists())
536
+
537
+ except Exception as e:
538
+ info["error"] = str(e)
539
+
540
+ return info
541
+
542
+
543
+ # Global cache service instance
544
+ _cache_service = None
545
+
546
+
547
+ def get_cache_service() -> PersistentCacheService:
548
+ """Get the global cache service instance"""
549
+ global _cache_service
550
+ if _cache_service is None:
551
+ _cache_service = PersistentCacheService()
552
+ return _cache_service