signalpilot-ai-internal 0.3.1__py3-none-any.whl → 0.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of signalpilot-ai-internal might be problematic. Click here for more details.

Files changed (46) hide show
  1. signalpilot_ai_internal/_version.py +1 -1
  2. signalpilot_ai_internal/cache_handlers.py +383 -0
  3. signalpilot_ai_internal/cache_service.py +552 -0
  4. signalpilot_ai_internal/handlers.py +35 -915
  5. signalpilot_ai_internal/snowflake_schema_service.py +639 -0
  6. signalpilot_ai_internal/unified_database_schema_service.py +742 -0
  7. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json +3 -2
  8. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig +2 -1
  9. signalpilot_ai_internal-0.3.3.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.fa432bdb7fb6b1c95ad6.js +1 -0
  10. signalpilot_ai_internal-0.3.3.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.0fea0d444fc7ba458d5a.js +1 -0
  11. signalpilot_ai_internal-0.3.3.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/839.c61f5bc4d0da4a0781d6.js +1 -0
  12. signalpilot_ai_internal-0.3.3.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.2e2c6ae0baa591126b0a.js +1 -0
  13. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/third-party-licenses.json +6 -0
  14. {signalpilot_ai_internal-0.3.1.dist-info → signalpilot_ai_internal-0.3.3.dist-info}/METADATA +1 -1
  15. signalpilot_ai_internal-0.3.3.dist-info/RECORD +45 -0
  16. signalpilot_ai_internal-0.3.1.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.45c187b4dc615d9cc073.js +0 -1
  17. signalpilot_ai_internal-0.3.1.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/839.e2bd05ad6dbdb957683f.js +0 -1
  18. signalpilot_ai_internal-0.3.1.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.07bc9bbdead29df455e7.js +0 -1
  19. signalpilot_ai_internal-0.3.1.dist-info/RECORD +0 -40
  20. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/etc/jupyter/jupyter_server_config.d/signalpilot_ai.json +0 -0
  21. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/install.json +0 -0
  22. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/plugin.json +0 -0
  23. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/104.04e170724f369fcbaf19.js +0 -0
  24. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/104.04e170724f369fcbaf19.js.LICENSE.txt +0 -0
  25. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.e2dadf63dc64d7b5f1ee.js +0 -0
  26. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.328403b5545f268b95c6.js +0 -0
  27. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.726e1da31a50868cb297.js +0 -0
  28. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/280.35d8c8b68815702a5238.js +0 -0
  29. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/280.35d8c8b68815702a5238.js.LICENSE.txt +0 -0
  30. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.72484b768a04f89bd3dd.js +0 -0
  31. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.dbec4c2dc12e7b050dcc.js +0 -0
  32. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.37e271d7a80336daabe2.js +0 -0
  33. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.9b4f05a99f5003f82094.js +0 -0
  34. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.73c7a9290b7d35a8b9c1.js +0 -0
  35. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.b58fc0093d080b8ee61c.js +0 -0
  36. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js +0 -0
  37. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js.LICENSE.txt +0 -0
  38. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/606.90aaaae46b73dc3c08fb.js +0 -0
  39. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.9720593ee20b768da3ca.js +0 -0
  40. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.8e6edc9a965bdd578ca7.js +0 -0
  41. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/742.91e7b516c8699eea3373.js +0 -0
  42. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.3aa564fc148b37d1d719.js +0 -0
  43. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/888.34054db17bcf6e87ec95.js +0 -0
  44. {signalpilot_ai_internal-0.3.1.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/style.js +0 -0
  45. {signalpilot_ai_internal-0.3.1.dist-info → signalpilot_ai_internal-0.3.3.dist-info}/WHEEL +0 -0
  46. {signalpilot_ai_internal-0.3.1.dist-info → signalpilot_ai_internal-0.3.3.dist-info}/licenses/LICENSE +0 -0
@@ -1,553 +1,14 @@
1
1
  import json
2
- import os
3
- import platform
4
- import shutil
5
- import tempfile
6
- import threading
7
- import time
8
- import uuid
9
- from pathlib import Path
10
- from typing import Dict, Any, Optional, Union
11
2
 
12
3
  from jupyter_server.base.handlers import APIHandler
13
4
  from jupyter_server.utils import url_path_join
14
5
  import tornado
15
6
 
7
+ from .cache_service import get_cache_service
8
+ from .cache_handlers import ChatHistoriesHandler, AppValuesHandler, CacheInfoHandler
9
+ from .unified_database_schema_service import UnifiedDatabaseSchemaHandler, UnifiedDatabaseQueryHandler
10
+ from .snowflake_schema_service import SnowflakeSchemaHandler, SnowflakeQueryHandler
16
11
 
17
- class CacheDirectoryManager:
18
- """OS-specific cache directory management with fallbacks"""
19
-
20
- @staticmethod
21
- def get_cache_directories() -> list[Path]:
22
- """Get ordered list of cache directories from most to least preferred"""
23
- system = platform.system().lower()
24
- directories = []
25
-
26
- try:
27
- if system == "windows":
28
- # Primary: AppData\Local
29
- appdata_local = os.environ.get('LOCALAPPDATA')
30
- if appdata_local:
31
- directories.append(Path(appdata_local) / "SignalPilotAI" / "Cache")
32
-
33
- # Secondary: AppData\Roaming
34
- appdata_roaming = os.environ.get('APPDATA')
35
- if appdata_roaming:
36
- directories.append(Path(appdata_roaming) / "SignalPilotAI" / "Cache")
37
-
38
- # Tertiary: User profile
39
- userprofile = os.environ.get('USERPROFILE')
40
- if userprofile:
41
- directories.append(Path(userprofile) / ".signalpilot-cache")
42
-
43
- elif system == "darwin": # macOS
44
- # Primary: ~/Library/Caches
45
- home = Path.home()
46
- directories.append(home / "Library" / "Caches" / "SignalPilotAI")
47
-
48
- # Secondary: ~/Library/Application Support
49
- directories.append(home / "Library" / "Application Support" / "SignalPilotAI")
50
-
51
- # Tertiary: ~/.signalpilot-cache
52
- directories.append(home / ".signalpilot-cache")
53
-
54
- else: # Linux and other Unix-like
55
- # Primary: XDG_CACHE_HOME or ~/.cache
56
- cache_home = os.environ.get('XDG_CACHE_HOME')
57
- if cache_home:
58
- directories.append(Path(cache_home) / "signalpilot-ai-internal")
59
- else:
60
- directories.append(Path.home() / ".cache" / "signalpilot-ai-internal")
61
-
62
- # Secondary: XDG_DATA_HOME or ~/.local/share
63
- data_home = os.environ.get('XDG_DATA_HOME')
64
- if data_home:
65
- directories.append(Path(data_home) / "signalpilot-ai-internal")
66
- else:
67
- directories.append(Path.home() / ".local" / "share" / "signalpilot-ai-internal")
68
-
69
- # Tertiary: ~/.signalpilot-cache
70
- directories.append(Path.home() / ".signalpilot-cache")
71
-
72
- # Final fallback: temp directory
73
- directories.append(Path(tempfile.gettempdir()) / f"signalpilot-ai-internal-{os.getuid() if hasattr(os, 'getuid') else 'user'}")
74
-
75
- except Exception as e:
76
- print(f"Error determining cache directories: {e}")
77
- # Emergency fallback
78
- directories.append(Path(tempfile.gettempdir()) / "signalpilot-ai-internal-emergency")
79
-
80
- return directories
81
-
82
- @staticmethod
83
- def find_usable_cache_directory() -> Optional[Path]:
84
- """Find the first usable cache directory with write permissions"""
85
- for cache_dir in CacheDirectoryManager.get_cache_directories():
86
- try:
87
- # Create directory if it doesn't exist
88
- cache_dir.mkdir(parents=True, exist_ok=True)
89
-
90
- # Test write permissions
91
- test_file = cache_dir / f"test_write_{uuid.uuid4().hex[:8]}.tmp"
92
- test_file.write_text("test")
93
- test_file.unlink()
94
-
95
- print(f"Using cache directory: {cache_dir}")
96
- return cache_dir
97
-
98
- except Exception as e:
99
- print(f"Cannot use cache directory {cache_dir}: {e}")
100
- continue
101
-
102
- print("ERROR: No usable cache directory found!")
103
- return None
104
-
105
-
106
- class RobustFileOperations:
107
- """Extremely safe file operations with atomic writes and recovery"""
108
-
109
- @staticmethod
110
- def safe_write_json(file_path: Path, data: Any, max_retries: int = 3) -> bool:
111
- """Safely write JSON data with atomic operations and backups"""
112
- print(f"Attempting to write JSON to: {file_path}")
113
-
114
- if not file_path.parent.exists():
115
- try:
116
- print(f"Creating parent directory: {file_path.parent}")
117
- file_path.parent.mkdir(parents=True, exist_ok=True)
118
- except Exception as e:
119
- print(f"Failed to create directory {file_path.parent}: {e}")
120
- return False
121
-
122
- # Create backup if file exists and is valid, but only if last backup is older than 1 hour
123
- backup_path = None
124
- if file_path.exists():
125
- try:
126
- # Verify current file is valid JSON before backing up
127
- with open(file_path, 'r', encoding='utf-8') as f:
128
- json.load(f)
129
-
130
- # Check if we need a new backup (only if last backup is > 1 hour old)
131
- should_create_backup = RobustFileOperations._should_create_backup(file_path)
132
-
133
- if should_create_backup:
134
- backup_path = file_path.with_suffix(f".backup.{int(time.time())}")
135
- shutil.copy2(file_path, backup_path)
136
- print(f"Created backup: {backup_path}")
137
-
138
- # Keep only the most recent backup that's at least 1 hour old
139
- RobustFileOperations._cleanup_backups(file_path)
140
- else:
141
- print(f"Skipping backup for {file_path} - recent backup exists")
142
-
143
- except Exception as e:
144
- print(f"Warning: Could not create backup for {file_path}: {e}")
145
-
146
- # Attempt atomic write with retries
147
- for attempt in range(max_retries):
148
- temp_path = file_path.with_suffix(f".tmp.{uuid.uuid4().hex[:8]}")
149
-
150
- try:
151
- # Write to temporary file first
152
- with open(temp_path, 'w', encoding='utf-8') as f:
153
- json.dump(data, f, indent=2, ensure_ascii=False)
154
-
155
- # Verify the written data
156
- with open(temp_path, 'r', encoding='utf-8') as f:
157
- verification_data = json.load(f)
158
-
159
- # Atomic move to final location
160
- if platform.system().lower() == "windows":
161
- # Windows requires removing target first
162
- if file_path.exists():
163
- file_path.unlink()
164
-
165
- shutil.move(str(temp_path), str(file_path))
166
-
167
- print(f"Successfully wrote {file_path}")
168
- return True
169
-
170
- except Exception as e:
171
- print(f"Write attempt {attempt + 1} failed for {file_path}: {e}")
172
-
173
- # Clean up temp file
174
- try:
175
- if temp_path.exists():
176
- temp_path.unlink()
177
- except:
178
- pass
179
-
180
- if attempt == max_retries - 1:
181
- # Restore from backup if all attempts failed
182
- if backup_path and backup_path.exists():
183
- try:
184
- shutil.copy2(backup_path, file_path)
185
- print(f"Restored {file_path} from backup")
186
- except Exception as restore_error:
187
- print(f"Failed to restore backup: {restore_error}")
188
-
189
- return False
190
-
191
- # Wait before retry
192
- time.sleep(0.1 * (attempt + 1))
193
-
194
- return False
195
-
196
- @staticmethod
197
- def safe_read_json(file_path: Path, default: Any = None) -> Any:
198
- """Safely read JSON data with corruption recovery"""
199
- if not file_path.exists():
200
- return default
201
-
202
- # Try reading main file
203
- try:
204
- with open(file_path, 'r', encoding='utf-8') as f:
205
- return json.load(f)
206
- except Exception as e:
207
- print(f"Failed to read {file_path}: {e}")
208
-
209
- # Try to recover from backup
210
- backup_files = sorted(
211
- file_path.parent.glob(f"{file_path.stem}.backup.*"),
212
- key=lambda x: x.stat().st_mtime,
213
- reverse=True
214
- )
215
-
216
- for backup_path in backup_files:
217
- try:
218
- with open(backup_path, 'r', encoding='utf-8') as f:
219
- data = json.load(f)
220
-
221
- print(f"Recovered data from backup: {backup_path}")
222
-
223
- # Try to restore the main file
224
- try:
225
- shutil.copy2(backup_path, file_path)
226
- print(f"Restored {file_path} from {backup_path}")
227
- except Exception as restore_error:
228
- print(f"Could not restore main file: {restore_error}")
229
-
230
- return data
231
-
232
- except Exception as backup_error:
233
- print(f"Backup {backup_path} also corrupted: {backup_error}")
234
- continue
235
-
236
- print(f"All recovery attempts failed for {file_path}, using default")
237
- return default
238
-
239
- @staticmethod
240
- def _should_create_backup(file_path: Path) -> bool:
241
- """Check if we should create a new backup (only if last backup is > 1 hour old)"""
242
- try:
243
- backup_files = sorted(
244
- file_path.parent.glob(f"{file_path.stem}.backup.*"),
245
- key=lambda x: x.stat().st_mtime,
246
- reverse=True
247
- )
248
-
249
- if not backup_files:
250
- return True # No backups exist, create first one
251
-
252
- # Check if the most recent backup is older than 1 hour
253
- most_recent_backup = backup_files[0]
254
- backup_age = time.time() - most_recent_backup.stat().st_mtime
255
- return backup_age > 3600 # 3600 seconds = 1 hour
256
-
257
- except Exception as e:
258
- print(f"Error checking backup age: {e}")
259
- return True # If we can't check, err on the side of creating a backup
260
-
261
- @staticmethod
262
- def _cleanup_backups(file_path: Path, keep_count: int = 1):
263
- """Keep only the most recent backup file (limit to 1 backup)"""
264
- try:
265
- backup_files = sorted(
266
- file_path.parent.glob(f"{file_path.stem}.backup.*"),
267
- key=lambda x: x.stat().st_mtime,
268
- reverse=True
269
- )
270
-
271
- # Keep only the most recent backup, delete all others
272
- for old_backup in backup_files[keep_count:]:
273
- try:
274
- old_backup.unlink()
275
- print(f"Cleaned up old backup: {old_backup}")
276
- except Exception as cleanup_error:
277
- print(f"Failed to cleanup backup {old_backup}: {cleanup_error}")
278
-
279
- except Exception as e:
280
- print(f"Error cleaning up backups: {e}")
281
-
282
-
283
- class PersistentCacheService:
284
- """Extremely robust persistent caching service for SignalPilot AI"""
285
-
286
- def __init__(self):
287
- self.cache_dir = CacheDirectoryManager.find_usable_cache_directory()
288
- self.chat_histories_file = None
289
- self.app_values_file = None
290
- self._lock = threading.RLock()
291
-
292
- if self.cache_dir:
293
- print(f"Cache service initialized with directory: {self.cache_dir}")
294
- self.chat_histories_file = self.cache_dir / "chat_histories.json"
295
- self.app_values_file = self.cache_dir / "app_values.json"
296
-
297
- print(f"Chat histories file: {self.chat_histories_file}")
298
- print(f"App values file: {self.app_values_file}")
299
-
300
- # Initialize files if they don't exist
301
- try:
302
- self._initialize_cache_files()
303
- print("Cache files initialized successfully")
304
- except Exception as e:
305
- print(f"ERROR: Failed to initialize cache files: {e}")
306
- import traceback
307
- traceback.print_exc()
308
- else:
309
- print("WARNING: Cache service running without persistent storage!")
310
-
311
- def _initialize_cache_files(self):
312
- """Initialize cache files with empty structures if they don't exist"""
313
- try:
314
- if not self.chat_histories_file.exists():
315
- print(f"Creating new chat histories file: {self.chat_histories_file}")
316
- success = RobustFileOperations.safe_write_json(self.chat_histories_file, {})
317
- if not success:
318
- print(f"ERROR: Failed to create chat histories file: {self.chat_histories_file}")
319
- else:
320
- print(f"Successfully created chat histories file")
321
- else:
322
- print(f"Chat histories file already exists: {self.chat_histories_file}")
323
-
324
- if not self.app_values_file.exists():
325
- print(f"Creating new app values file: {self.app_values_file}")
326
- success = RobustFileOperations.safe_write_json(self.app_values_file, {})
327
- if not success:
328
- print(f"ERROR: Failed to create app values file: {self.app_values_file}")
329
- else:
330
- print(f"Successfully created app values file")
331
- else:
332
- print(f"App values file already exists: {self.app_values_file}")
333
-
334
- except Exception as e:
335
- print(f"ERROR: Exception in _initialize_cache_files: {e}")
336
- raise
337
-
338
- def is_available(self) -> bool:
339
- """Check if cache service is available"""
340
- return self.cache_dir is not None and self.cache_dir.exists()
341
-
342
- def _is_notebook_chat_history_key(self, chat_id: str) -> bool:
343
- """Check if this is a notebook-specific chat history key"""
344
- return chat_id.startswith('chat-history-notebook-')
345
-
346
- def _get_notebook_chat_history_file(self, chat_id: str) -> Path:
347
- """Get the file path for a notebook-specific chat history"""
348
- if not self.cache_dir:
349
- raise ValueError("Cache directory not available")
350
-
351
- # Extract notebook ID from the chat_id
352
- notebook_id = chat_id.replace('chat-history-notebook-', '')
353
- filename = f"notebook_chat_{notebook_id}.json"
354
- return self.cache_dir / filename
355
-
356
- # Chat Histories Management
357
- def get_chat_histories(self) -> Dict[str, Any]:
358
- """Get all chat histories"""
359
- with self._lock:
360
- if not self.chat_histories_file:
361
- return {}
362
- return RobustFileOperations.safe_read_json(self.chat_histories_file, {})
363
-
364
- def get_chat_history(self, chat_id: str) -> Optional[Any]:
365
- """Get specific chat history"""
366
- # Handle notebook-specific chat histories
367
- if self._is_notebook_chat_history_key(chat_id):
368
- try:
369
- notebook_file = self._get_notebook_chat_history_file(chat_id)
370
- if notebook_file.exists():
371
- print(f"Loading notebook chat history from: {notebook_file}")
372
- return RobustFileOperations.safe_read_json(notebook_file, None)
373
- else:
374
- print(f"Notebook chat history file does not exist: {notebook_file}")
375
- return None
376
- except Exception as e:
377
- print(f"ERROR: Failed to get notebook chat history for {chat_id}: {e}")
378
- return None
379
-
380
- # Handle regular chat histories
381
- histories = self.get_chat_histories()
382
- return histories.get(chat_id)
383
-
384
- def set_chat_history(self, chat_id: str, history: Any) -> bool:
385
- """Set specific chat history"""
386
- with self._lock:
387
- # Handle notebook-specific chat histories
388
- if self._is_notebook_chat_history_key(chat_id):
389
- try:
390
- notebook_file = self._get_notebook_chat_history_file(chat_id)
391
- print(f"Saving notebook chat history to: {notebook_file}")
392
- success = RobustFileOperations.safe_write_json(notebook_file, history)
393
- if success:
394
- print(f"Successfully saved notebook chat history for {chat_id}")
395
- else:
396
- print(f"ERROR: Failed to write notebook chat history for {chat_id}")
397
- return success
398
- except Exception as e:
399
- print(f"ERROR: Exception while saving notebook chat history for {chat_id}: {e}")
400
- import traceback
401
- traceback.print_exc()
402
- return False
403
-
404
- # Handle regular chat histories
405
- if not self.chat_histories_file:
406
- print(f"ERROR: Cannot save chat history for {chat_id} - no chat histories file configured")
407
- return False
408
-
409
- try:
410
- print(f"Attempting to save chat history for chat_id: {chat_id}")
411
- histories = self.get_chat_histories()
412
- print(f"Current histories count: {len(histories)}")
413
-
414
- histories[chat_id] = history
415
- print(f"Updated histories count: {len(histories)}")
416
-
417
- success = RobustFileOperations.safe_write_json(self.chat_histories_file, histories)
418
- if success:
419
- print(f"Successfully saved chat history for {chat_id}")
420
- else:
421
- print(f"ERROR: Failed to write chat history file for {chat_id}")
422
-
423
- return success
424
-
425
- except Exception as e:
426
- print(f"ERROR: Exception while saving chat history for {chat_id}: {e}")
427
- import traceback
428
- traceback.print_exc()
429
- return False
430
-
431
- def delete_chat_history(self, chat_id: str) -> bool:
432
- """Delete specific chat history"""
433
- with self._lock:
434
- # Handle notebook-specific chat histories
435
- if self._is_notebook_chat_history_key(chat_id):
436
- try:
437
- notebook_file = self._get_notebook_chat_history_file(chat_id)
438
- if notebook_file.exists():
439
- notebook_file.unlink()
440
- print(f"Deleted notebook chat history file: {notebook_file}")
441
- return True
442
- except Exception as e:
443
- print(f"ERROR: Failed to delete notebook chat history for {chat_id}: {e}")
444
- return False
445
-
446
- # Handle regular chat histories
447
- if not self.chat_histories_file:
448
- return False
449
-
450
- histories = self.get_chat_histories()
451
- if chat_id in histories:
452
- del histories[chat_id]
453
- return RobustFileOperations.safe_write_json(self.chat_histories_file, histories)
454
- return True
455
-
456
- def clear_chat_histories(self) -> bool:
457
- """Clear all chat histories"""
458
- with self._lock:
459
- if not self.chat_histories_file:
460
- return False
461
- return RobustFileOperations.safe_write_json(self.chat_histories_file, {})
462
-
463
- # App Values Management
464
- def get_app_values(self) -> Dict[str, Any]:
465
- """Get all app values"""
466
- with self._lock:
467
- if not self.app_values_file:
468
- return {}
469
- return RobustFileOperations.safe_read_json(self.app_values_file, {})
470
-
471
- def get_app_value(self, key: str, default: Any = None) -> Any:
472
- """Get specific app value"""
473
- values = self.get_app_values()
474
- return values.get(key, default)
475
-
476
- def set_app_value(self, key: str, value: Any) -> bool:
477
- """Set specific app value"""
478
- with self._lock:
479
- if not self.app_values_file:
480
- return False
481
-
482
- values = self.get_app_values()
483
- values[key] = value
484
- return RobustFileOperations.safe_write_json(self.app_values_file, values)
485
-
486
- def delete_app_value(self, key: str) -> bool:
487
- """Delete specific app value"""
488
- with self._lock:
489
- if not self.app_values_file:
490
- return False
491
-
492
- values = self.get_app_values()
493
- if key in values:
494
- del values[key]
495
- return RobustFileOperations.safe_write_json(self.app_values_file, values)
496
- return True
497
-
498
- def clear_app_values(self) -> bool:
499
- """Clear all app values"""
500
- with self._lock:
501
- if not self.app_values_file:
502
- return False
503
- return RobustFileOperations.safe_write_json(self.app_values_file, {})
504
-
505
- def get_cache_info(self) -> Dict[str, Any]:
506
- """Get cache service information"""
507
- info = {
508
- "available": self.is_available(),
509
- "cache_directory": str(self.cache_dir) if self.cache_dir else None,
510
- "platform": platform.system(),
511
- "chat_histories_size": 0,
512
- "app_values_size": 0,
513
- "total_chat_histories": 0,
514
- "total_app_values": 0,
515
- "notebook_chat_files": 0,
516
- "notebook_chat_files_size": 0
517
- }
518
-
519
- if self.is_available():
520
- try:
521
- if self.chat_histories_file.exists():
522
- info["chat_histories_size"] = self.chat_histories_file.stat().st_size
523
- histories = self.get_chat_histories()
524
- info["total_chat_histories"] = len(histories)
525
-
526
- if self.app_values_file.exists():
527
- info["app_values_size"] = self.app_values_file.stat().st_size
528
- values = self.get_app_values()
529
- info["total_app_values"] = len(values)
530
-
531
- # Count notebook chat history files
532
- notebook_files = list(self.cache_dir.glob("notebook_chat_*.json"))
533
- info["notebook_chat_files"] = len(notebook_files)
534
- info["notebook_chat_files_size"] = sum(f.stat().st_size for f in notebook_files if f.exists())
535
-
536
- except Exception as e:
537
- info["error"] = str(e)
538
-
539
- return info
540
-
541
-
542
- # Global cache service instance
543
- _cache_service = None
544
-
545
- def get_cache_service() -> PersistentCacheService:
546
- """Get the global cache service instance"""
547
- global _cache_service
548
- if _cache_service is None:
549
- _cache_service = PersistentCacheService()
550
- return _cache_service
551
12
 
552
13
  class HelloWorldHandler(APIHandler):
553
14
  # The following decorator should be present on all verb methods (head, get, post,
@@ -561,377 +22,6 @@ class HelloWorldHandler(APIHandler):
561
22
  }))
562
23
 
563
24
 
564
- class ChatHistoriesHandler(APIHandler):
565
- """Handler for chat histories cache operations"""
566
-
567
- @tornado.web.authenticated
568
- def get(self, chat_id=None):
569
- """Get chat histories or specific chat history"""
570
- try:
571
- cache_service = get_cache_service()
572
-
573
- if not cache_service.is_available():
574
- self.set_status(503)
575
- self.finish(json.dumps({
576
- "error": "Cache service not available",
577
- "message": "Persistent storage is not accessible"
578
- }))
579
- return
580
-
581
- if chat_id:
582
- # Get specific chat history
583
- history = cache_service.get_chat_history(chat_id)
584
- if history is None:
585
- self.set_status(404)
586
- self.finish(json.dumps({
587
- "error": "Chat history not found",
588
- "chat_id": chat_id
589
- }))
590
- else:
591
- self.finish(json.dumps({
592
- "chat_id": chat_id,
593
- "history": history
594
- }))
595
- else:
596
- # Get all chat histories
597
- histories = cache_service.get_chat_histories()
598
- self.finish(json.dumps({
599
- "chat_histories": histories,
600
- "count": len(histories)
601
- }))
602
-
603
- except Exception as e:
604
- self.set_status(500)
605
- self.finish(json.dumps({
606
- "error": "Internal server error",
607
- "message": str(e)
608
- }))
609
-
610
- @tornado.web.authenticated
611
- def post(self, chat_id=None):
612
- """Create or update chat history"""
613
- try:
614
- cache_service = get_cache_service()
615
-
616
- if not cache_service.is_available():
617
- self.set_status(503)
618
- self.finish(json.dumps({
619
- "error": "Cache service not available",
620
- "message": "Persistent storage is not accessible"
621
- }))
622
- return
623
-
624
- # Parse request body
625
- try:
626
- body = json.loads(self.request.body.decode('utf-8'))
627
- except json.JSONDecodeError:
628
- self.set_status(400)
629
- self.finish(json.dumps({
630
- "error": "Invalid JSON in request body"
631
- }))
632
- return
633
-
634
- if chat_id:
635
- # Update specific chat history
636
- history_data = body.get('history')
637
- if history_data is None:
638
- self.set_status(400)
639
- self.finish(json.dumps({
640
- "error": "Missing 'history' field in request body"
641
- }))
642
- return
643
-
644
- success = cache_service.set_chat_history(chat_id, history_data)
645
- if success:
646
- self.finish(json.dumps({
647
- "success": True,
648
- "chat_id": chat_id,
649
- "message": "Chat history updated successfully"
650
- }))
651
- else:
652
- self.set_status(500)
653
- self.finish(json.dumps({
654
- "error": "Failed to save chat history"
655
- }))
656
- else:
657
- # Bulk update operation
658
- chat_histories = body.get('chat_histories', {})
659
- if not isinstance(chat_histories, dict):
660
- self.set_status(400)
661
- self.finish(json.dumps({
662
- "error": "'chat_histories' must be an object"
663
- }))
664
- return
665
-
666
- # Update each chat history
667
- failures = []
668
- successes = []
669
-
670
- for cid, history in chat_histories.items():
671
- if cache_service.set_chat_history(cid, history):
672
- successes.append(cid)
673
- else:
674
- failures.append(cid)
675
-
676
- self.finish(json.dumps({
677
- "success": len(failures) == 0,
678
- "updated": successes,
679
- "failed": failures,
680
- "message": f"Updated {len(successes)} chat histories, {len(failures)} failed"
681
- }))
682
-
683
- except Exception as e:
684
- self.set_status(500)
685
- self.finish(json.dumps({
686
- "error": "Internal server error",
687
- "message": str(e)
688
- }))
689
-
690
- @tornado.web.authenticated
691
- def delete(self, chat_id=None):
692
- """Delete chat history or all chat histories"""
693
- try:
694
- cache_service = get_cache_service()
695
-
696
- if not cache_service.is_available():
697
- self.set_status(503)
698
- self.finish(json.dumps({
699
- "error": "Cache service not available",
700
- "message": "Persistent storage is not accessible"
701
- }))
702
- return
703
-
704
- if chat_id:
705
- # Delete specific chat history
706
- success = cache_service.delete_chat_history(chat_id)
707
- if success:
708
- self.finish(json.dumps({
709
- "success": True,
710
- "chat_id": chat_id,
711
- "message": "Chat history deleted successfully"
712
- }))
713
- else:
714
- self.set_status(500)
715
- self.finish(json.dumps({
716
- "error": "Failed to delete chat history"
717
- }))
718
- else:
719
- # Clear all chat histories
720
- success = cache_service.clear_chat_histories()
721
- if success:
722
- self.finish(json.dumps({
723
- "success": True,
724
- "message": "All chat histories cleared successfully"
725
- }))
726
- else:
727
- self.set_status(500)
728
- self.finish(json.dumps({
729
- "error": "Failed to clear chat histories"
730
- }))
731
-
732
- except Exception as e:
733
- self.set_status(500)
734
- self.finish(json.dumps({
735
- "error": "Internal server error",
736
- "message": str(e)
737
- }))
738
-
739
-
740
- class AppValuesHandler(APIHandler):
741
- """Handler for app values cache operations"""
742
-
743
- @tornado.web.authenticated
744
- def get(self, key=None):
745
- """Get app values or specific app value"""
746
- try:
747
- cache_service = get_cache_service()
748
-
749
- if not cache_service.is_available():
750
- self.set_status(503)
751
- self.finish(json.dumps({
752
- "error": "Cache service not available",
753
- "message": "Persistent storage is not accessible"
754
- }))
755
- return
756
-
757
- if key:
758
- # Get specific app value
759
- default = self.get_argument('default', None)
760
- try:
761
- if default:
762
- default = json.loads(default)
763
- except json.JSONDecodeError:
764
- pass # Use string default
765
-
766
- value = cache_service.get_app_value(key, default)
767
- self.finish(json.dumps({
768
- "key": key,
769
- "value": value
770
- }))
771
- else:
772
- # Get all app values
773
- values = cache_service.get_app_values()
774
- self.finish(json.dumps({
775
- "app_values": values,
776
- "count": len(values)
777
- }))
778
-
779
- except Exception as e:
780
- self.set_status(500)
781
- self.finish(json.dumps({
782
- "error": "Internal server error",
783
- "message": str(e)
784
- }))
785
-
786
- @tornado.web.authenticated
787
- def post(self, key=None):
788
- """Create or update app value"""
789
- try:
790
- cache_service = get_cache_service()
791
-
792
- if not cache_service.is_available():
793
- self.set_status(503)
794
- self.finish(json.dumps({
795
- "error": "Cache service not available",
796
- "message": "Persistent storage is not accessible"
797
- }))
798
- return
799
-
800
- # Parse request body
801
- try:
802
- body = json.loads(self.request.body.decode('utf-8'))
803
- except json.JSONDecodeError:
804
- self.set_status(400)
805
- self.finish(json.dumps({
806
- "error": "Invalid JSON in request body"
807
- }))
808
- return
809
-
810
- if key:
811
- # Update specific app value
812
- value_data = body.get('value')
813
- if value_data is None:
814
- self.set_status(400)
815
- self.finish(json.dumps({
816
- "error": "Missing 'value' field in request body"
817
- }))
818
- return
819
-
820
- success = cache_service.set_app_value(key, value_data)
821
- if success:
822
- self.finish(json.dumps({
823
- "success": True,
824
- "key": key,
825
- "message": "App value updated successfully"
826
- }))
827
- else:
828
- self.set_status(500)
829
- self.finish(json.dumps({
830
- "error": "Failed to save app value"
831
- }))
832
- else:
833
- # Bulk update operation
834
- app_values = body.get('app_values', {})
835
- if not isinstance(app_values, dict):
836
- self.set_status(400)
837
- self.finish(json.dumps({
838
- "error": "'app_values' must be an object"
839
- }))
840
- return
841
-
842
- # Update each app value
843
- failures = []
844
- successes = []
845
-
846
- for k, value in app_values.items():
847
- if cache_service.set_app_value(k, value):
848
- successes.append(k)
849
- else:
850
- failures.append(k)
851
-
852
- self.finish(json.dumps({
853
- "success": len(failures) == 0,
854
- "updated": successes,
855
- "failed": failures,
856
- "message": f"Updated {len(successes)} app values, {len(failures)} failed"
857
- }))
858
-
859
- except Exception as e:
860
- self.set_status(500)
861
- self.finish(json.dumps({
862
- "error": "Internal server error",
863
- "message": str(e)
864
- }))
865
-
866
- @tornado.web.authenticated
867
- def delete(self, key=None):
868
- """Delete app value or all app values"""
869
- try:
870
- cache_service = get_cache_service()
871
-
872
- if not cache_service.is_available():
873
- self.set_status(503)
874
- self.finish(json.dumps({
875
- "error": "Cache service not available",
876
- "message": "Persistent storage is not accessible"
877
- }))
878
- return
879
-
880
- if key:
881
- # Delete specific app value
882
- success = cache_service.delete_app_value(key)
883
- if success:
884
- self.finish(json.dumps({
885
- "success": True,
886
- "key": key,
887
- "message": "App value deleted successfully"
888
- }))
889
- else:
890
- self.set_status(500)
891
- self.finish(json.dumps({
892
- "error": "Failed to delete app value"
893
- }))
894
- else:
895
- # Clear all app values
896
- success = cache_service.clear_app_values()
897
- if success:
898
- self.finish(json.dumps({
899
- "success": True,
900
- "message": "All app values cleared successfully"
901
- }))
902
- else:
903
- self.set_status(500)
904
- self.finish(json.dumps({
905
- "error": "Failed to clear app values"
906
- }))
907
-
908
- except Exception as e:
909
- self.set_status(500)
910
- self.finish(json.dumps({
911
- "error": "Internal server error",
912
- "message": str(e)
913
- }))
914
-
915
-
916
- class CacheInfoHandler(APIHandler):
917
- """Handler for cache service information"""
918
-
919
- @tornado.web.authenticated
920
- def get(self):
921
- """Get cache service information and statistics"""
922
- try:
923
- cache_service = get_cache_service()
924
- info = cache_service.get_cache_info()
925
- self.finish(json.dumps(info))
926
-
927
- except Exception as e:
928
- self.set_status(500)
929
- self.finish(json.dumps({
930
- "error": "Internal server error",
931
- "message": str(e)
932
- }))
933
-
934
-
935
25
  def setup_handlers(web_app):
936
26
  host_pattern = ".*$"
937
27
  base_url = web_app.settings["base_url"]
@@ -948,6 +38,18 @@ def setup_handlers(web_app):
948
38
 
949
39
  cache_info_route = url_path_join(base_url, "signalpilot-ai-internal", "cache", "info")
950
40
 
41
+ # Database service endpoints
42
+ database_schema_route = url_path_join(base_url, "signalpilot-ai-internal", "database", "schema")
43
+ database_query_route = url_path_join(base_url, "signalpilot-ai-internal", "database", "query")
44
+
45
+ # MySQL service endpoints
46
+ mysql_schema_route = url_path_join(base_url, "signalpilot-ai-internal", "mysql", "schema")
47
+ mysql_query_route = url_path_join(base_url, "signalpilot-ai-internal", "mysql", "query")
48
+
49
+ # Snowflake service endpoints
50
+ snowflake_schema_route = url_path_join(base_url, "signalpilot-ai-internal", "snowflake", "schema")
51
+ snowflake_query_route = url_path_join(base_url, "signalpilot-ai-internal", "snowflake", "query")
52
+
951
53
  handlers = [
952
54
  # Original endpoint
953
55
  (hello_route, HelloWorldHandler),
@@ -962,6 +64,18 @@ def setup_handlers(web_app):
962
64
 
963
65
  # Cache info endpoint
964
66
  (cache_info_route, CacheInfoHandler),
67
+
68
+ # Database service endpoints (unified for PostgreSQL and MySQL)
69
+ (database_schema_route, UnifiedDatabaseSchemaHandler),
70
+ (database_query_route, UnifiedDatabaseQueryHandler),
71
+
72
+ # MySQL service endpoints (use unified handler)
73
+ (mysql_schema_route, UnifiedDatabaseSchemaHandler),
74
+ (mysql_query_route, UnifiedDatabaseQueryHandler),
75
+
76
+ # Snowflake service endpoints
77
+ (snowflake_schema_route, SnowflakeSchemaHandler),
78
+ (snowflake_query_route, SnowflakeQueryHandler),
965
79
  ]
966
80
 
967
81
  web_app.add_handlers(host_pattern, handlers)
@@ -980,4 +94,10 @@ def setup_handlers(web_app):
980
94
  print(f" - Chat History (by ID): {chat_history_route}")
981
95
  print(f" - App Values: {app_values_route}")
982
96
  print(f" - App Value (by key): {app_value_route}")
983
- print(f" - Cache Info: {cache_info_route}")
97
+ print(f" - Cache Info: {cache_info_route}")
98
+ print(f" - Database Schema: {database_schema_route}")
99
+ print(f" - Database Query: {database_query_route}")
100
+ print(f" - MySQL Schema: {mysql_schema_route}")
101
+ print(f" - MySQL Query: {mysql_query_route}")
102
+ print(f" - Snowflake Schema: {snowflake_schema_route}")
103
+ print(f" - Snowflake Query: {snowflake_query_route}")