pomera-ai-commander 0.1.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (191) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +105 -680
  3. package/bin/pomera-ai-commander.js +62 -62
  4. package/core/__init__.py +65 -65
  5. package/core/app_context.py +482 -482
  6. package/core/async_text_processor.py +421 -421
  7. package/core/backup_manager.py +655 -655
  8. package/core/backup_recovery_manager.py +1033 -1033
  9. package/core/content_hash_cache.py +508 -508
  10. package/core/context_menu.py +313 -313
  11. package/core/data_validator.py +1066 -1066
  12. package/core/database_connection_manager.py +744 -744
  13. package/core/database_curl_settings_manager.py +608 -608
  14. package/core/database_promera_ai_settings_manager.py +446 -446
  15. package/core/database_schema.py +411 -411
  16. package/core/database_schema_manager.py +395 -395
  17. package/core/database_settings_manager.py +1507 -1507
  18. package/core/database_settings_manager_interface.py +456 -456
  19. package/core/dialog_manager.py +734 -734
  20. package/core/efficient_line_numbers.py +510 -510
  21. package/core/error_handler.py +746 -746
  22. package/core/error_service.py +431 -431
  23. package/core/event_consolidator.py +511 -511
  24. package/core/mcp/__init__.py +43 -43
  25. package/core/mcp/protocol.py +288 -288
  26. package/core/mcp/schema.py +251 -251
  27. package/core/mcp/server_stdio.py +299 -299
  28. package/core/mcp/tool_registry.py +2372 -2345
  29. package/core/memory_efficient_text_widget.py +711 -711
  30. package/core/migration_manager.py +914 -914
  31. package/core/migration_test_suite.py +1085 -1085
  32. package/core/migration_validator.py +1143 -1143
  33. package/core/optimized_find_replace.py +714 -714
  34. package/core/optimized_pattern_engine.py +424 -424
  35. package/core/optimized_search_highlighter.py +552 -552
  36. package/core/performance_monitor.py +674 -674
  37. package/core/persistence_manager.py +712 -712
  38. package/core/progressive_stats_calculator.py +632 -632
  39. package/core/regex_pattern_cache.py +529 -529
  40. package/core/regex_pattern_library.py +350 -350
  41. package/core/search_operation_manager.py +434 -434
  42. package/core/settings_defaults_registry.py +1087 -1087
  43. package/core/settings_integrity_validator.py +1111 -1111
  44. package/core/settings_serializer.py +557 -557
  45. package/core/settings_validator.py +1823 -1823
  46. package/core/smart_stats_calculator.py +709 -709
  47. package/core/statistics_update_manager.py +619 -619
  48. package/core/stats_config_manager.py +858 -858
  49. package/core/streaming_text_handler.py +723 -723
  50. package/core/task_scheduler.py +596 -596
  51. package/core/update_pattern_library.py +168 -168
  52. package/core/visibility_monitor.py +596 -596
  53. package/core/widget_cache.py +498 -498
  54. package/mcp.json +51 -61
  55. package/package.json +61 -57
  56. package/pomera.py +7482 -7482
  57. package/pomera_mcp_server.py +183 -144
  58. package/requirements.txt +32 -0
  59. package/tools/__init__.py +4 -4
  60. package/tools/ai_tools.py +2891 -2891
  61. package/tools/ascii_art_generator.py +352 -352
  62. package/tools/base64_tools.py +183 -183
  63. package/tools/base_tool.py +511 -511
  64. package/tools/case_tool.py +308 -308
  65. package/tools/column_tools.py +395 -395
  66. package/tools/cron_tool.py +884 -884
  67. package/tools/curl_history.py +600 -600
  68. package/tools/curl_processor.py +1207 -1207
  69. package/tools/curl_settings.py +502 -502
  70. package/tools/curl_tool.py +5467 -5467
  71. package/tools/diff_viewer.py +1071 -1071
  72. package/tools/email_extraction_tool.py +248 -248
  73. package/tools/email_header_analyzer.py +425 -425
  74. package/tools/extraction_tools.py +250 -250
  75. package/tools/find_replace.py +1750 -1750
  76. package/tools/folder_file_reporter.py +1463 -1463
  77. package/tools/folder_file_reporter_adapter.py +480 -480
  78. package/tools/generator_tools.py +1216 -1216
  79. package/tools/hash_generator.py +255 -255
  80. package/tools/html_tool.py +656 -656
  81. package/tools/jsonxml_tool.py +729 -729
  82. package/tools/line_tools.py +419 -419
  83. package/tools/markdown_tools.py +561 -561
  84. package/tools/mcp_widget.py +1417 -1417
  85. package/tools/notes_widget.py +973 -973
  86. package/tools/number_base_converter.py +372 -372
  87. package/tools/regex_extractor.py +571 -571
  88. package/tools/slug_generator.py +310 -310
  89. package/tools/sorter_tools.py +458 -458
  90. package/tools/string_escape_tool.py +392 -392
  91. package/tools/text_statistics_tool.py +365 -365
  92. package/tools/text_wrapper.py +430 -430
  93. package/tools/timestamp_converter.py +421 -421
  94. package/tools/tool_loader.py +710 -710
  95. package/tools/translator_tools.py +522 -522
  96. package/tools/url_link_extractor.py +261 -261
  97. package/tools/url_parser.py +204 -204
  98. package/tools/whitespace_tools.py +355 -355
  99. package/tools/word_frequency_counter.py +146 -146
  100. package/core/__pycache__/__init__.cpython-313.pyc +0 -0
  101. package/core/__pycache__/app_context.cpython-313.pyc +0 -0
  102. package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
  103. package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
  104. package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
  105. package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
  106. package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
  107. package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
  108. package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
  109. package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
  110. package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
  111. package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
  112. package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
  113. package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
  114. package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
  115. package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
  116. package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
  117. package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
  118. package/core/__pycache__/error_service.cpython-313.pyc +0 -0
  119. package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
  120. package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
  121. package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
  122. package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
  123. package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
  124. package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
  125. package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
  126. package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
  127. package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
  128. package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
  129. package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
  130. package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
  131. package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
  132. package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
  133. package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
  134. package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
  135. package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
  136. package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
  137. package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
  138. package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
  139. package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
  140. package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
  141. package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
  142. package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
  143. package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
  144. package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
  145. package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
  146. package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
  147. package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
  148. package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
  149. package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
  150. package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
  151. package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
  152. package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
  153. package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
  154. package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
  155. package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
  156. package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
  157. package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
  158. package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
  159. package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
  160. package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
  161. package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
  162. package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
  163. package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
  164. package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
  165. package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
  166. package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
  167. package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
  168. package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
  169. package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
  170. package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
  171. package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
  172. package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
  173. package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
  174. package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
  175. package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
  176. package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
  177. package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
  178. package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
  179. package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
  180. package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
  181. package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
  182. package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
  183. package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
  184. package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
  185. package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
  186. package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
  187. package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
  188. package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
  189. package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
  190. package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
  191. package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
@@ -1,745 +1,745 @@
1
- """
2
- Database Connection Manager for Settings Migration
3
-
4
- This module provides robust database connection management with SQLite WAL mode
5
- for concurrent access, connection pooling, transaction management, and automatic
6
- backup scheduling with disk persistence.
7
-
8
- Designed to handle the high-frequency settings access patterns identified in
9
- the production codebase analysis (579+ config operations across 45 files).
10
-
11
- Enhanced with performance monitoring and optimization capabilities.
12
- """
13
-
14
- import sqlite3
15
- import threading
16
- import time
17
- import os
18
- import shutil
19
- import logging
20
- from typing import Optional, Callable, List, Any, Dict
21
- from datetime import datetime, timedelta
22
- from contextlib import contextmanager
23
- from pathlib import Path
24
-
25
-
26
- class DatabaseConnectionManager:
27
- """
28
- Manages SQLite database connections with WAL mode for concurrency support.
29
-
30
- Features:
31
- - WAL (Write-Ahead Logging) mode for better concurrent access
32
- - Connection pooling for multiple threads
33
- - Automatic backup scheduling and disk persistence
34
- - Transaction management with rollback support
35
- - Error handling and connection recovery
36
- - Thread-safe operations
37
- """
38
-
39
- def __init__(self, db_path: str = ":memory:", backup_path: Optional[str] = None,
40
- enable_performance_monitoring: bool = True):
41
- """
42
- Initialize the database connection manager.
43
-
44
- Args:
45
- db_path: Path to SQLite database file (":memory:" for in-memory)
46
- backup_path: Path for automatic backups (None to disable)
47
- enable_performance_monitoring: Whether to enable performance monitoring
48
- """
49
- self.db_path = db_path
50
- self.backup_path = backup_path or "settings_backup.db"
51
- self.backup_interval = 300 # 5 minutes default
52
- self.last_backup = None
53
- self.auto_backup_enabled = True
54
- self.enable_performance_monitoring = enable_performance_monitoring
55
-
56
- # Thread safety
57
- self._lock = threading.RLock()
58
- self._connections = {} # Thread-local connections
59
- self._main_connection = None
60
-
61
- # Connection configuration
62
- self._connection_config = {
63
- 'timeout': 30.0,
64
- 'isolation_level': None, # Autocommit mode
65
- 'check_same_thread': False
66
- }
67
-
68
- # Backup and persistence settings
69
- self._backup_thread = None
70
- self._backup_stop_event = threading.Event()
71
- self._changes_since_backup = 0
72
- self._max_changes_before_backup = 100
73
-
74
- # Error handling
75
- self.logger = logging.getLogger(__name__)
76
- self._connection_errors = []
77
- self._max_error_history = 50
78
-
79
- # Performance monitoring
80
- self._performance_monitor = None
81
- if enable_performance_monitoring:
82
- try:
83
- from .performance_monitor import get_performance_monitor
84
- self._performance_monitor = get_performance_monitor()
85
- except ImportError:
86
- self.logger.warning("Performance monitoring not available")
87
-
88
- # Query execution statistics
89
- self._query_count = 0
90
- self._total_query_time = 0.0
91
- self._slow_queries = []
92
- self._slow_query_threshold = 0.1 # 100ms
93
-
94
- # Initialize main connection
95
- self._initialize_main_connection()
96
-
97
- def _initialize_main_connection(self) -> None:
98
- """Initialize the main database connection with proper configuration."""
99
- try:
100
- self._main_connection = sqlite3.connect(
101
- self.db_path,
102
- **self._connection_config
103
- )
104
-
105
- # Configure WAL mode for better concurrency
106
- self._configure_wal_mode(self._main_connection)
107
-
108
- # Configure performance settings
109
- self._configure_performance_settings(self._main_connection)
110
-
111
- # Start automatic backup if enabled and not in-memory
112
- if self.auto_backup_enabled and self.db_path != ":memory:":
113
- self._start_backup_thread()
114
-
115
- self.logger.info(f"Database connection initialized: {self.db_path}")
116
-
117
- except Exception as e:
118
- self._log_connection_error(f"Failed to initialize main connection: {e}")
119
- raise
120
-
121
- def _configure_wal_mode(self, connection: sqlite3.Connection) -> None:
122
- """
123
- Configure WAL (Write-Ahead Logging) mode for better concurrency.
124
-
125
- Args:
126
- connection: SQLite connection to configure
127
- """
128
- try:
129
- # Enable WAL mode for better concurrent access
130
- connection.execute("PRAGMA journal_mode=WAL")
131
-
132
- # Configure WAL settings for performance
133
- connection.execute("PRAGMA wal_autocheckpoint=1000") # Checkpoint every 1000 pages
134
- connection.execute("PRAGMA wal_checkpoint(TRUNCATE)") # Initial checkpoint
135
-
136
- self.logger.debug("WAL mode configured successfully")
137
-
138
- except Exception as e:
139
- self.logger.warning(f"Failed to configure WAL mode: {e}")
140
- # Continue without WAL mode - not critical for in-memory databases
141
-
142
- def _configure_performance_settings(self, connection: sqlite3.Connection) -> None:
143
- """
144
- Configure SQLite performance settings for optimal operation.
145
-
146
- Args:
147
- connection: SQLite connection to configure
148
- """
149
- try:
150
- # Performance optimizations
151
- connection.execute("PRAGMA synchronous=NORMAL") # Balance safety and speed
152
- connection.execute("PRAGMA cache_size=10000") # 10MB cache
153
- connection.execute("PRAGMA temp_store=MEMORY") # Use memory for temp tables
154
- connection.execute("PRAGMA mmap_size=268435456") # 256MB memory mapping
155
-
156
- # Enable foreign key constraints
157
- connection.execute("PRAGMA foreign_keys=ON")
158
-
159
- # Optimize for frequent reads with some writes
160
- connection.execute("PRAGMA optimize")
161
-
162
- self.logger.debug("Performance settings configured")
163
-
164
- except Exception as e:
165
- self.logger.warning(f"Failed to configure performance settings: {e}")
166
-
167
- def get_connection(self) -> sqlite3.Connection:
168
- """
169
- Get a database connection for the current thread.
170
-
171
- Returns:
172
- SQLite connection object
173
-
174
- Raises:
175
- sqlite3.Error: If connection cannot be established
176
- """
177
- thread_id = threading.get_ident()
178
-
179
- with self._lock:
180
- # Return existing connection for this thread
181
- if thread_id in self._connections:
182
- connection = self._connections[thread_id]
183
- try:
184
- # Test connection is still valid
185
- start_time = time.time()
186
- connection.execute("SELECT 1")
187
-
188
- # Record the test query
189
- if self.enable_performance_monitoring:
190
- execution_time = time.time() - start_time
191
- self._record_query_performance("SELECT 1", execution_time)
192
-
193
- return connection
194
- except sqlite3.Error:
195
- # Connection is stale, remove it
196
- del self._connections[thread_id]
197
-
198
- # Create new connection for this thread
199
- try:
200
- connection = sqlite3.connect(
201
- self.db_path,
202
- **self._connection_config
203
- )
204
-
205
- # Configure the new connection
206
- self._configure_wal_mode(connection)
207
- self._configure_performance_settings(connection)
208
-
209
- # Store for reuse
210
- self._connections[thread_id] = connection
211
-
212
- self.logger.debug(f"Created new connection for thread {thread_id}")
213
- return connection
214
-
215
- except Exception as e:
216
- error_msg = f"Failed to create connection for thread {thread_id}: {e}"
217
- self._log_connection_error(error_msg)
218
- raise sqlite3.Error(error_msg)
219
-
220
- @contextmanager
221
- def transaction(self):
222
- """
223
- Context manager for database transactions with automatic rollback on error.
224
-
225
- Usage:
226
- with connection_manager.transaction() as conn:
227
- conn.execute("INSERT INTO table VALUES (?)", (value,))
228
- conn.execute("UPDATE table SET col = ?", (new_value,))
229
- """
230
- connection = self.get_connection()
231
-
232
- try:
233
- connection.execute("BEGIN TRANSACTION")
234
- yield connection
235
- connection.execute("COMMIT")
236
- self._changes_since_backup += 1
237
-
238
- except Exception as e:
239
- connection.execute("ROLLBACK")
240
- self.logger.error(f"Transaction rolled back due to error: {e}")
241
- raise
242
-
243
- def execute_transaction(self, operations: List[Callable[[sqlite3.Connection], Any]]) -> List[Any]:
244
- """
245
- Execute multiple operations in a single transaction.
246
-
247
- Args:
248
- operations: List of functions that take a connection and return a result
249
-
250
- Returns:
251
- List of results from each operation
252
-
253
- Raises:
254
- sqlite3.Error: If any operation fails (all operations are rolled back)
255
- """
256
- results = []
257
-
258
- with self.transaction() as conn:
259
- for operation in operations:
260
- try:
261
- result = operation(conn)
262
- results.append(result)
263
- except Exception as e:
264
- self.logger.error(f"Operation failed in transaction: {e}")
265
- raise
266
-
267
- return results
268
-
269
- def backup_to_disk(self, filepath: Optional[str] = None) -> bool:
270
- """
271
- Backup the current database to a disk file.
272
-
273
- Args:
274
- filepath: Target backup file path (uses default if None)
275
-
276
- Returns:
277
- True if backup successful, False otherwise
278
- """
279
- if self.db_path == ":memory:" and not self._main_connection:
280
- self.logger.warning("Cannot backup: no in-memory database connection")
281
- return False
282
-
283
- backup_path = filepath or self.backup_path
284
-
285
- try:
286
- # Ensure backup directory exists
287
- backup_dir = os.path.dirname(backup_path)
288
- if backup_dir:
289
- os.makedirs(backup_dir, exist_ok=True)
290
-
291
- # Create backup connection
292
- backup_conn = sqlite3.connect(backup_path)
293
-
294
- try:
295
- # Perform backup
296
- source_conn = self._main_connection or self.get_connection()
297
- source_conn.backup(backup_conn)
298
-
299
- self.last_backup = datetime.now()
300
- self._changes_since_backup = 0
301
-
302
- self.logger.info(f"Database backed up to: {backup_path}")
303
- return True
304
-
305
- finally:
306
- backup_conn.close()
307
-
308
- except Exception as e:
309
- self._log_connection_error(f"Backup failed: {e}")
310
- return False
311
-
312
- def restore_from_disk(self, filepath: Optional[str] = None) -> bool:
313
- """
314
- Restore database from a disk backup file.
315
-
316
- Args:
317
- filepath: Source backup file path (uses default if None)
318
-
319
- Returns:
320
- True if restore successful, False otherwise
321
- """
322
- restore_path = filepath or self.backup_path
323
-
324
- if not os.path.exists(restore_path):
325
- self.logger.error(f"Backup file not found: {restore_path}")
326
- return False
327
-
328
- try:
329
- # Close existing connections
330
- self.close_all_connections()
331
-
332
- # Copy backup to main database location if not in-memory
333
- if self.db_path != ":memory:":
334
- shutil.copy2(restore_path, self.db_path)
335
- else:
336
- # For in-memory, we need to restore by copying data
337
- restore_conn = sqlite3.connect(restore_path)
338
- try:
339
- self._main_connection = sqlite3.connect(":memory:")
340
- restore_conn.backup(self._main_connection)
341
- self._configure_wal_mode(self._main_connection)
342
- self._configure_performance_settings(self._main_connection)
343
- finally:
344
- restore_conn.close()
345
-
346
- # Reinitialize if needed
347
- if self.db_path != ":memory:":
348
- self._initialize_main_connection()
349
-
350
- self.logger.info(f"Database restored from: {restore_path}")
351
- return True
352
-
353
- except Exception as e:
354
- self._log_connection_error(f"Restore failed: {e}")
355
- return False
356
-
357
- def _start_backup_thread(self) -> None:
358
- """Start the automatic backup thread."""
359
- if self._backup_thread and self._backup_thread.is_alive():
360
- return
361
-
362
- self._backup_stop_event.clear()
363
- self._backup_thread = threading.Thread(
364
- target=self._backup_worker,
365
- daemon=True,
366
- name="DatabaseBackupWorker"
367
- )
368
- self._backup_thread.start()
369
- self.logger.debug("Automatic backup thread started")
370
-
371
- def _backup_worker(self) -> None:
372
- """Worker thread for automatic backups."""
373
- while not self._backup_stop_event.is_set():
374
- try:
375
- # Check if backup is needed
376
- should_backup = False
377
-
378
- # Time-based backup
379
- if self.last_backup is None:
380
- should_backup = True
381
- elif datetime.now() - self.last_backup > timedelta(seconds=self.backup_interval):
382
- should_backup = True
383
-
384
- # Change-based backup
385
- if self._changes_since_backup >= self._max_changes_before_backup:
386
- should_backup = True
387
-
388
- if should_backup:
389
- self.backup_to_disk()
390
-
391
- # Wait before next check (but allow early termination)
392
- self._backup_stop_event.wait(min(60, self.backup_interval // 5))
393
-
394
- except Exception as e:
395
- self.logger.error(f"Backup worker error: {e}")
396
- # Continue running despite errors
397
- self._backup_stop_event.wait(60)
398
-
399
- def set_backup_interval(self, seconds: int) -> None:
400
- """
401
- Set the automatic backup interval.
402
-
403
- Args:
404
- seconds: Backup interval in seconds (0 to disable)
405
- """
406
- self.backup_interval = max(0, seconds)
407
- self.auto_backup_enabled = seconds > 0
408
-
409
- if self.auto_backup_enabled and self.db_path != ":memory:":
410
- self._start_backup_thread()
411
- elif not self.auto_backup_enabled and self._backup_thread:
412
- self._backup_stop_event.set()
413
-
414
- def get_connection_info(self) -> Dict[str, Any]:
415
- """
416
- Get information about current database connections.
417
-
418
- Returns:
419
- Dictionary with connection statistics and status
420
- """
421
- with self._lock:
422
- info = {
423
- 'db_path': self.db_path,
424
- 'backup_path': self.backup_path,
425
- 'active_connections': len(self._connections),
426
- 'backup_interval': self.backup_interval,
427
- 'last_backup': self.last_backup.isoformat() if self.last_backup else None,
428
- 'changes_since_backup': self._changes_since_backup,
429
- 'auto_backup_enabled': self.auto_backup_enabled,
430
- 'recent_errors': self._connection_errors[-5:] if self._connection_errors else [],
431
- 'performance_monitoring_enabled': self.enable_performance_monitoring
432
- }
433
-
434
- # Add performance statistics if monitoring is enabled
435
- if self._performance_monitor:
436
- try:
437
- perf_stats = self._performance_monitor.get_performance_stats()
438
- info.update({
439
- 'query_count': self._query_count,
440
- 'avg_query_time': self._total_query_time / max(self._query_count, 1),
441
- 'slow_queries_count': len(self._slow_queries),
442
- 'cache_hit_rate': perf_stats.cache_hit_rate,
443
- 'memory_usage_mb': perf_stats.memory_usage_mb
444
- })
445
- except Exception as e:
446
- self.logger.warning(f"Failed to get performance stats: {e}")
447
-
448
- return info
449
-
450
- @contextmanager
451
- def monitored_query(self, query: str, params: tuple = ()):
452
- """
453
- Context manager for executing queries with performance monitoring.
454
-
455
- Args:
456
- query: SQL query string
457
- params: Query parameters
458
-
459
- Yields:
460
- Database connection
461
- """
462
- start_time = time.time()
463
- connection = self.get_connection()
464
-
465
- try:
466
- # Record setting access if it's a settings query
467
- if self._performance_monitor:
468
- self._extract_and_record_setting_access(query, params)
469
-
470
- yield connection
471
-
472
- finally:
473
- # Record query performance
474
- execution_time = time.time() - start_time
475
- self._record_query_performance(query, execution_time)
476
-
477
- def _extract_and_record_setting_access(self, query: str, params: tuple) -> None:
478
- """Extract setting key from query and record access."""
479
- if not self._performance_monitor:
480
- return
481
-
482
- try:
483
- query_lower = query.lower()
484
-
485
- # Extract setting key from different query types
486
- if 'core_settings' in query_lower and 'where key' in query_lower:
487
- if params and len(params) > 0:
488
- setting_key = str(params[0])
489
- self._performance_monitor.record_setting_access(f"core:{setting_key}")
490
-
491
- elif 'tool_settings' in query_lower and 'where tool_name' in query_lower:
492
- if params and len(params) >= 2:
493
- tool_name = str(params[0])
494
- setting_path = str(params[1]) if len(params) > 1 else "all"
495
- self._performance_monitor.record_setting_access(f"tool:{tool_name}.{setting_path}")
496
-
497
- elif 'tab_content' in query_lower:
498
- if params and len(params) > 0:
499
- tab_type = str(params[0])
500
- self._performance_monitor.record_setting_access(f"tab:{tab_type}")
501
-
502
- except Exception as e:
503
- self.logger.debug(f"Failed to extract setting access: {e}")
504
-
505
- def _record_query_performance(self, query: str, execution_time: float) -> None:
506
- """Record query performance metrics."""
507
- with self._lock:
508
- self._query_count += 1
509
- self._total_query_time += execution_time
510
-
511
- # Track slow queries
512
- if execution_time > self._slow_query_threshold:
513
- slow_query_info = {
514
- 'query': query[:200], # Truncate long queries
515
- 'execution_time': execution_time,
516
- 'timestamp': datetime.now().isoformat()
517
- }
518
- self._slow_queries.append(slow_query_info)
519
-
520
- # Keep only recent slow queries
521
- if len(self._slow_queries) > 50:
522
- self._slow_queries = self._slow_queries[-50:]
523
-
524
- self.logger.warning(f"Slow query detected: {execution_time:.3f}s - {query[:100]}...")
525
-
526
- def get_performance_stats(self) -> Dict[str, Any]:
527
- """
528
- Get detailed performance statistics.
529
-
530
- Returns:
531
- Dictionary with performance metrics
532
- """
533
- with self._lock:
534
- stats = {
535
- 'total_queries': self._query_count,
536
- 'total_query_time': self._total_query_time,
537
- 'avg_query_time': self._total_query_time / max(self._query_count, 1),
538
- 'slow_queries_count': len(self._slow_queries),
539
- 'slow_query_threshold': self._slow_query_threshold,
540
- 'recent_slow_queries': self._slow_queries[-10:] if self._slow_queries else []
541
- }
542
-
543
- # Add performance monitor stats if available
544
- if self._performance_monitor:
545
- try:
546
- monitor_stats = self._performance_monitor.get_performance_stats()
547
- stats.update({
548
- 'cache_hit_rate': monitor_stats.cache_hit_rate,
549
- 'queries_per_second': monitor_stats.queries_per_second,
550
- 'memory_usage_mb': monitor_stats.memory_usage_mb,
551
- 'hot_settings': self._performance_monitor.get_hot_settings(10),
552
- 'cache_stats': self._performance_monitor.get_cache_stats()
553
- })
554
- except Exception as e:
555
- self.logger.warning(f"Failed to get monitor stats: {e}")
556
-
557
- return stats
558
-
559
- def optimize_database(self) -> List[str]:
560
- """
561
- Perform database optimization based on usage patterns.
562
-
563
- Returns:
564
- List of optimization actions performed
565
- """
566
- actions = []
567
-
568
- try:
569
- conn = self.get_connection()
570
-
571
- # Analyze and optimize
572
- conn.execute("ANALYZE")
573
- actions.append("Analyzed database statistics")
574
-
575
- # Optimize query planner
576
- conn.execute("PRAGMA optimize")
577
- actions.append("Optimized query planner")
578
-
579
- # Vacuum if needed (for non-memory databases)
580
- if self.db_path != ":memory:":
581
- # Check fragmentation
582
- cursor = conn.execute("PRAGMA freelist_count")
583
- free_pages = cursor.fetchone()[0]
584
-
585
- cursor = conn.execute("PRAGMA page_count")
586
- total_pages = cursor.fetchone()[0]
587
-
588
- if total_pages > 0 and (free_pages / total_pages) > 0.1: # 10% fragmentation
589
- conn.execute("VACUUM")
590
- actions.append("Vacuumed database to reduce fragmentation")
591
-
592
- # Suggest indexes based on performance monitor data
593
- if self._performance_monitor:
594
- index_suggestions = self._performance_monitor.optimize_indexes(self)
595
- for index_sql in index_suggestions:
596
- try:
597
- conn.execute(index_sql)
598
- actions.append(f"Created index: {index_sql}")
599
- except sqlite3.Error as e:
600
- self.logger.warning(f"Failed to create index: {e}")
601
-
602
- self.logger.info(f"Database optimization completed: {len(actions)} actions")
603
-
604
- except Exception as e:
605
- self.logger.error(f"Database optimization failed: {e}")
606
- actions.append(f"Optimization failed: {e}")
607
-
608
- return actions
609
-
610
- def clear_performance_data(self) -> None:
611
- """Clear performance monitoring data."""
612
- with self._lock:
613
- self._query_count = 0
614
- self._total_query_time = 0.0
615
- self._slow_queries.clear()
616
-
617
- if self._performance_monitor:
618
- self._performance_monitor.reset_metrics()
619
-
620
- def set_slow_query_threshold(self, threshold_seconds: float) -> None:
621
- """
622
- Set the threshold for slow query detection.
623
-
624
- Args:
625
- threshold_seconds: Threshold in seconds
626
- """
627
- self._slow_query_threshold = max(0.001, threshold_seconds) # Minimum 1ms
628
-
629
- def _log_connection_error(self, error_msg: str) -> None:
630
- """Log connection error with timestamp."""
631
- error_entry = {
632
- 'timestamp': datetime.now().isoformat(),
633
- 'error': error_msg
634
- }
635
-
636
- self._connection_errors.append(error_entry)
637
-
638
- # Keep only recent errors
639
- if len(self._connection_errors) > self._max_error_history:
640
- self._connection_errors = self._connection_errors[-self._max_error_history:]
641
-
642
- self.logger.error(error_msg)
643
-
644
- def close_connection(self, thread_id: Optional[int] = None) -> None:
645
- """
646
- Close database connection for specific thread or current thread.
647
-
648
- Args:
649
- thread_id: Thread ID to close connection for (None for current thread)
650
- """
651
- target_thread = thread_id or threading.get_ident()
652
-
653
- with self._lock:
654
- if target_thread in self._connections:
655
- try:
656
- self._connections[target_thread].close()
657
- del self._connections[target_thread]
658
- self.logger.debug(f"Closed connection for thread {target_thread}")
659
- except Exception as e:
660
- self.logger.warning(f"Error closing connection for thread {target_thread}: {e}")
661
-
662
- def close_all_connections(self) -> None:
663
- """Close all database connections and stop background threads."""
664
- # Stop backup thread
665
- if self._backup_thread and self._backup_thread.is_alive():
666
- self._backup_stop_event.set()
667
- self._backup_thread.join(timeout=5)
668
-
669
- with self._lock:
670
- # Close all thread connections
671
- for thread_id in list(self._connections.keys()):
672
- self.close_connection(thread_id)
673
-
674
- # Close main connection
675
- if self._main_connection:
676
- try:
677
- self._main_connection.close()
678
- self._main_connection = None
679
- self.logger.info("All database connections closed")
680
- except Exception as e:
681
- self.logger.warning(f"Error closing main connection: {e}")
682
-
683
- def __enter__(self):
684
- """Context manager entry."""
685
- return self
686
-
687
- def __exit__(self, exc_type, exc_val, exc_tb):
688
- """Context manager exit - close all connections."""
689
- self.close_all_connections()
690
-
691
-
692
- # Connection pool for shared access across modules
693
- class ConnectionPool:
694
- """
695
- Singleton connection pool for shared database access across the application.
696
- """
697
-
698
- _instance = None
699
- _lock = threading.Lock()
700
-
701
- def __new__(cls):
702
- if cls._instance is None:
703
- with cls._lock:
704
- if cls._instance is None:
705
- cls._instance = super().__new__(cls)
706
- cls._instance._initialized = False
707
- return cls._instance
708
-
709
- def __init__(self):
710
- if not getattr(self, '_initialized', False):
711
- self._manager = None
712
- self._initialized = True
713
-
714
- def initialize(self, db_path: str = ":memory:", backup_path: Optional[str] = None) -> None:
715
- """
716
- Initialize the connection pool with database settings.
717
-
718
- Args:
719
- db_path: Path to SQLite database file
720
- backup_path: Path for automatic backups
721
- """
722
- if self._manager:
723
- self._manager.close_all_connections()
724
-
725
- self._manager = DatabaseConnectionManager(db_path, backup_path)
726
-
727
- def get_manager(self) -> DatabaseConnectionManager:
728
- """
729
- Get the connection manager instance.
730
-
731
- Returns:
732
- DatabaseConnectionManager instance
733
-
734
- Raises:
735
- RuntimeError: If pool not initialized
736
- """
737
- if not self._manager:
738
- raise RuntimeError("Connection pool not initialized. Call initialize() first.")
739
- return self._manager
740
-
741
- def close(self) -> None:
742
- """Close the connection pool."""
743
- if self._manager:
744
- self._manager.close_all_connections()
1
+ """
2
+ Database Connection Manager for Settings Migration
3
+
4
+ This module provides robust database connection management with SQLite WAL mode
5
+ for concurrent access, connection pooling, transaction management, and automatic
6
+ backup scheduling with disk persistence.
7
+
8
+ Designed to handle the high-frequency settings access patterns identified in
9
+ the production codebase analysis (579+ config operations across 45 files).
10
+
11
+ Enhanced with performance monitoring and optimization capabilities.
12
+ """
13
+
14
+ import sqlite3
15
+ import threading
16
+ import time
17
+ import os
18
+ import shutil
19
+ import logging
20
+ from typing import Optional, Callable, List, Any, Dict
21
+ from datetime import datetime, timedelta
22
+ from contextlib import contextmanager
23
+ from pathlib import Path
24
+
25
+
26
+ class DatabaseConnectionManager:
27
+ """
28
+ Manages SQLite database connections with WAL mode for concurrency support.
29
+
30
+ Features:
31
+ - WAL (Write-Ahead Logging) mode for better concurrent access
32
+ - Connection pooling for multiple threads
33
+ - Automatic backup scheduling and disk persistence
34
+ - Transaction management with rollback support
35
+ - Error handling and connection recovery
36
+ - Thread-safe operations
37
+ """
38
+
39
+ def __init__(self, db_path: str = ":memory:", backup_path: Optional[str] = None,
40
+ enable_performance_monitoring: bool = True):
41
+ """
42
+ Initialize the database connection manager.
43
+
44
+ Args:
45
+ db_path: Path to SQLite database file (":memory:" for in-memory)
46
+ backup_path: Path for automatic backups (None to disable)
47
+ enable_performance_monitoring: Whether to enable performance monitoring
48
+ """
49
+ self.db_path = db_path
50
+ self.backup_path = backup_path or "settings_backup.db"
51
+ self.backup_interval = 300 # 5 minutes default
52
+ self.last_backup = None
53
+ self.auto_backup_enabled = True
54
+ self.enable_performance_monitoring = enable_performance_monitoring
55
+
56
+ # Thread safety
57
+ self._lock = threading.RLock()
58
+ self._connections = {} # Thread-local connections
59
+ self._main_connection = None
60
+
61
+ # Connection configuration
62
+ self._connection_config = {
63
+ 'timeout': 30.0,
64
+ 'isolation_level': None, # Autocommit mode
65
+ 'check_same_thread': False
66
+ }
67
+
68
+ # Backup and persistence settings
69
+ self._backup_thread = None
70
+ self._backup_stop_event = threading.Event()
71
+ self._changes_since_backup = 0
72
+ self._max_changes_before_backup = 100
73
+
74
+ # Error handling
75
+ self.logger = logging.getLogger(__name__)
76
+ self._connection_errors = []
77
+ self._max_error_history = 50
78
+
79
+ # Performance monitoring
80
+ self._performance_monitor = None
81
+ if enable_performance_monitoring:
82
+ try:
83
+ from .performance_monitor import get_performance_monitor
84
+ self._performance_monitor = get_performance_monitor()
85
+ except ImportError:
86
+ self.logger.warning("Performance monitoring not available")
87
+
88
+ # Query execution statistics
89
+ self._query_count = 0
90
+ self._total_query_time = 0.0
91
+ self._slow_queries = []
92
+ self._slow_query_threshold = 0.1 # 100ms
93
+
94
+ # Initialize main connection
95
+ self._initialize_main_connection()
96
+
97
+ def _initialize_main_connection(self) -> None:
98
+ """Initialize the main database connection with proper configuration."""
99
+ try:
100
+ self._main_connection = sqlite3.connect(
101
+ self.db_path,
102
+ **self._connection_config
103
+ )
104
+
105
+ # Configure WAL mode for better concurrency
106
+ self._configure_wal_mode(self._main_connection)
107
+
108
+ # Configure performance settings
109
+ self._configure_performance_settings(self._main_connection)
110
+
111
+ # Start automatic backup if enabled and not in-memory
112
+ if self.auto_backup_enabled and self.db_path != ":memory:":
113
+ self._start_backup_thread()
114
+
115
+ self.logger.info(f"Database connection initialized: {self.db_path}")
116
+
117
+ except Exception as e:
118
+ self._log_connection_error(f"Failed to initialize main connection: {e}")
119
+ raise
120
+
121
+ def _configure_wal_mode(self, connection: sqlite3.Connection) -> None:
122
+ """
123
+ Configure WAL (Write-Ahead Logging) mode for better concurrency.
124
+
125
+ Args:
126
+ connection: SQLite connection to configure
127
+ """
128
+ try:
129
+ # Enable WAL mode for better concurrent access
130
+ connection.execute("PRAGMA journal_mode=WAL")
131
+
132
+ # Configure WAL settings for performance
133
+ connection.execute("PRAGMA wal_autocheckpoint=1000") # Checkpoint every 1000 pages
134
+ connection.execute("PRAGMA wal_checkpoint(TRUNCATE)") # Initial checkpoint
135
+
136
+ self.logger.debug("WAL mode configured successfully")
137
+
138
+ except Exception as e:
139
+ self.logger.warning(f"Failed to configure WAL mode: {e}")
140
+ # Continue without WAL mode - not critical for in-memory databases
141
+
142
+ def _configure_performance_settings(self, connection: sqlite3.Connection) -> None:
143
+ """
144
+ Configure SQLite performance settings for optimal operation.
145
+
146
+ Args:
147
+ connection: SQLite connection to configure
148
+ """
149
+ try:
150
+ # Performance optimizations
151
+ connection.execute("PRAGMA synchronous=NORMAL") # Balance safety and speed
152
+ connection.execute("PRAGMA cache_size=10000") # 10MB cache
153
+ connection.execute("PRAGMA temp_store=MEMORY") # Use memory for temp tables
154
+ connection.execute("PRAGMA mmap_size=268435456") # 256MB memory mapping
155
+
156
+ # Enable foreign key constraints
157
+ connection.execute("PRAGMA foreign_keys=ON")
158
+
159
+ # Optimize for frequent reads with some writes
160
+ connection.execute("PRAGMA optimize")
161
+
162
+ self.logger.debug("Performance settings configured")
163
+
164
+ except Exception as e:
165
+ self.logger.warning(f"Failed to configure performance settings: {e}")
166
+
167
+ def get_connection(self) -> sqlite3.Connection:
168
+ """
169
+ Get a database connection for the current thread.
170
+
171
+ Returns:
172
+ SQLite connection object
173
+
174
+ Raises:
175
+ sqlite3.Error: If connection cannot be established
176
+ """
177
+ thread_id = threading.get_ident()
178
+
179
+ with self._lock:
180
+ # Return existing connection for this thread
181
+ if thread_id in self._connections:
182
+ connection = self._connections[thread_id]
183
+ try:
184
+ # Test connection is still valid
185
+ start_time = time.time()
186
+ connection.execute("SELECT 1")
187
+
188
+ # Record the test query
189
+ if self.enable_performance_monitoring:
190
+ execution_time = time.time() - start_time
191
+ self._record_query_performance("SELECT 1", execution_time)
192
+
193
+ return connection
194
+ except sqlite3.Error:
195
+ # Connection is stale, remove it
196
+ del self._connections[thread_id]
197
+
198
+ # Create new connection for this thread
199
+ try:
200
+ connection = sqlite3.connect(
201
+ self.db_path,
202
+ **self._connection_config
203
+ )
204
+
205
+ # Configure the new connection
206
+ self._configure_wal_mode(connection)
207
+ self._configure_performance_settings(connection)
208
+
209
+ # Store for reuse
210
+ self._connections[thread_id] = connection
211
+
212
+ self.logger.debug(f"Created new connection for thread {thread_id}")
213
+ return connection
214
+
215
+ except Exception as e:
216
+ error_msg = f"Failed to create connection for thread {thread_id}: {e}"
217
+ self._log_connection_error(error_msg)
218
+ raise sqlite3.Error(error_msg)
219
+
220
+ @contextmanager
221
+ def transaction(self):
222
+ """
223
+ Context manager for database transactions with automatic rollback on error.
224
+
225
+ Usage:
226
+ with connection_manager.transaction() as conn:
227
+ conn.execute("INSERT INTO table VALUES (?)", (value,))
228
+ conn.execute("UPDATE table SET col = ?", (new_value,))
229
+ """
230
+ connection = self.get_connection()
231
+
232
+ try:
233
+ connection.execute("BEGIN TRANSACTION")
234
+ yield connection
235
+ connection.execute("COMMIT")
236
+ self._changes_since_backup += 1
237
+
238
+ except Exception as e:
239
+ connection.execute("ROLLBACK")
240
+ self.logger.error(f"Transaction rolled back due to error: {e}")
241
+ raise
242
+
243
+ def execute_transaction(self, operations: List[Callable[[sqlite3.Connection], Any]]) -> List[Any]:
244
+ """
245
+ Execute multiple operations in a single transaction.
246
+
247
+ Args:
248
+ operations: List of functions that take a connection and return a result
249
+
250
+ Returns:
251
+ List of results from each operation
252
+
253
+ Raises:
254
+ sqlite3.Error: If any operation fails (all operations are rolled back)
255
+ """
256
+ results = []
257
+
258
+ with self.transaction() as conn:
259
+ for operation in operations:
260
+ try:
261
+ result = operation(conn)
262
+ results.append(result)
263
+ except Exception as e:
264
+ self.logger.error(f"Operation failed in transaction: {e}")
265
+ raise
266
+
267
+ return results
268
+
269
+ def backup_to_disk(self, filepath: Optional[str] = None) -> bool:
270
+ """
271
+ Backup the current database to a disk file.
272
+
273
+ Args:
274
+ filepath: Target backup file path (uses default if None)
275
+
276
+ Returns:
277
+ True if backup successful, False otherwise
278
+ """
279
+ if self.db_path == ":memory:" and not self._main_connection:
280
+ self.logger.warning("Cannot backup: no in-memory database connection")
281
+ return False
282
+
283
+ backup_path = filepath or self.backup_path
284
+
285
+ try:
286
+ # Ensure backup directory exists
287
+ backup_dir = os.path.dirname(backup_path)
288
+ if backup_dir:
289
+ os.makedirs(backup_dir, exist_ok=True)
290
+
291
+ # Create backup connection
292
+ backup_conn = sqlite3.connect(backup_path)
293
+
294
+ try:
295
+ # Perform backup
296
+ source_conn = self._main_connection or self.get_connection()
297
+ source_conn.backup(backup_conn)
298
+
299
+ self.last_backup = datetime.now()
300
+ self._changes_since_backup = 0
301
+
302
+ self.logger.info(f"Database backed up to: {backup_path}")
303
+ return True
304
+
305
+ finally:
306
+ backup_conn.close()
307
+
308
+ except Exception as e:
309
+ self._log_connection_error(f"Backup failed: {e}")
310
+ return False
311
+
312
+ def restore_from_disk(self, filepath: Optional[str] = None) -> bool:
313
+ """
314
+ Restore database from a disk backup file.
315
+
316
+ Args:
317
+ filepath: Source backup file path (uses default if None)
318
+
319
+ Returns:
320
+ True if restore successful, False otherwise
321
+ """
322
+ restore_path = filepath or self.backup_path
323
+
324
+ if not os.path.exists(restore_path):
325
+ self.logger.error(f"Backup file not found: {restore_path}")
326
+ return False
327
+
328
+ try:
329
+ # Close existing connections
330
+ self.close_all_connections()
331
+
332
+ # Copy backup to main database location if not in-memory
333
+ if self.db_path != ":memory:":
334
+ shutil.copy2(restore_path, self.db_path)
335
+ else:
336
+ # For in-memory, we need to restore by copying data
337
+ restore_conn = sqlite3.connect(restore_path)
338
+ try:
339
+ self._main_connection = sqlite3.connect(":memory:")
340
+ restore_conn.backup(self._main_connection)
341
+ self._configure_wal_mode(self._main_connection)
342
+ self._configure_performance_settings(self._main_connection)
343
+ finally:
344
+ restore_conn.close()
345
+
346
+ # Reinitialize if needed
347
+ if self.db_path != ":memory:":
348
+ self._initialize_main_connection()
349
+
350
+ self.logger.info(f"Database restored from: {restore_path}")
351
+ return True
352
+
353
+ except Exception as e:
354
+ self._log_connection_error(f"Restore failed: {e}")
355
+ return False
356
+
357
+ def _start_backup_thread(self) -> None:
358
+ """Start the automatic backup thread."""
359
+ if self._backup_thread and self._backup_thread.is_alive():
360
+ return
361
+
362
+ self._backup_stop_event.clear()
363
+ self._backup_thread = threading.Thread(
364
+ target=self._backup_worker,
365
+ daemon=True,
366
+ name="DatabaseBackupWorker"
367
+ )
368
+ self._backup_thread.start()
369
+ self.logger.debug("Automatic backup thread started")
370
+
371
+ def _backup_worker(self) -> None:
372
+ """Worker thread for automatic backups."""
373
+ while not self._backup_stop_event.is_set():
374
+ try:
375
+ # Check if backup is needed
376
+ should_backup = False
377
+
378
+ # Time-based backup
379
+ if self.last_backup is None:
380
+ should_backup = True
381
+ elif datetime.now() - self.last_backup > timedelta(seconds=self.backup_interval):
382
+ should_backup = True
383
+
384
+ # Change-based backup
385
+ if self._changes_since_backup >= self._max_changes_before_backup:
386
+ should_backup = True
387
+
388
+ if should_backup:
389
+ self.backup_to_disk()
390
+
391
+ # Wait before next check (but allow early termination)
392
+ self._backup_stop_event.wait(min(60, self.backup_interval // 5))
393
+
394
+ except Exception as e:
395
+ self.logger.error(f"Backup worker error: {e}")
396
+ # Continue running despite errors
397
+ self._backup_stop_event.wait(60)
398
+
399
+ def set_backup_interval(self, seconds: int) -> None:
400
+ """
401
+ Set the automatic backup interval.
402
+
403
+ Args:
404
+ seconds: Backup interval in seconds (0 to disable)
405
+ """
406
+ self.backup_interval = max(0, seconds)
407
+ self.auto_backup_enabled = seconds > 0
408
+
409
+ if self.auto_backup_enabled and self.db_path != ":memory:":
410
+ self._start_backup_thread()
411
+ elif not self.auto_backup_enabled and self._backup_thread:
412
+ self._backup_stop_event.set()
413
+
414
+ def get_connection_info(self) -> Dict[str, Any]:
415
+ """
416
+ Get information about current database connections.
417
+
418
+ Returns:
419
+ Dictionary with connection statistics and status
420
+ """
421
+ with self._lock:
422
+ info = {
423
+ 'db_path': self.db_path,
424
+ 'backup_path': self.backup_path,
425
+ 'active_connections': len(self._connections),
426
+ 'backup_interval': self.backup_interval,
427
+ 'last_backup': self.last_backup.isoformat() if self.last_backup else None,
428
+ 'changes_since_backup': self._changes_since_backup,
429
+ 'auto_backup_enabled': self.auto_backup_enabled,
430
+ 'recent_errors': self._connection_errors[-5:] if self._connection_errors else [],
431
+ 'performance_monitoring_enabled': self.enable_performance_monitoring
432
+ }
433
+
434
+ # Add performance statistics if monitoring is enabled
435
+ if self._performance_monitor:
436
+ try:
437
+ perf_stats = self._performance_monitor.get_performance_stats()
438
+ info.update({
439
+ 'query_count': self._query_count,
440
+ 'avg_query_time': self._total_query_time / max(self._query_count, 1),
441
+ 'slow_queries_count': len(self._slow_queries),
442
+ 'cache_hit_rate': perf_stats.cache_hit_rate,
443
+ 'memory_usage_mb': perf_stats.memory_usage_mb
444
+ })
445
+ except Exception as e:
446
+ self.logger.warning(f"Failed to get performance stats: {e}")
447
+
448
+ return info
449
+
450
+ @contextmanager
451
+ def monitored_query(self, query: str, params: tuple = ()):
452
+ """
453
+ Context manager for executing queries with performance monitoring.
454
+
455
+ Args:
456
+ query: SQL query string
457
+ params: Query parameters
458
+
459
+ Yields:
460
+ Database connection
461
+ """
462
+ start_time = time.time()
463
+ connection = self.get_connection()
464
+
465
+ try:
466
+ # Record setting access if it's a settings query
467
+ if self._performance_monitor:
468
+ self._extract_and_record_setting_access(query, params)
469
+
470
+ yield connection
471
+
472
+ finally:
473
+ # Record query performance
474
+ execution_time = time.time() - start_time
475
+ self._record_query_performance(query, execution_time)
476
+
477
+ def _extract_and_record_setting_access(self, query: str, params: tuple) -> None:
478
+ """Extract setting key from query and record access."""
479
+ if not self._performance_monitor:
480
+ return
481
+
482
+ try:
483
+ query_lower = query.lower()
484
+
485
+ # Extract setting key from different query types
486
+ if 'core_settings' in query_lower and 'where key' in query_lower:
487
+ if params and len(params) > 0:
488
+ setting_key = str(params[0])
489
+ self._performance_monitor.record_setting_access(f"core:{setting_key}")
490
+
491
+ elif 'tool_settings' in query_lower and 'where tool_name' in query_lower:
492
+ if params and len(params) >= 2:
493
+ tool_name = str(params[0])
494
+ setting_path = str(params[1]) if len(params) > 1 else "all"
495
+ self._performance_monitor.record_setting_access(f"tool:{tool_name}.{setting_path}")
496
+
497
+ elif 'tab_content' in query_lower:
498
+ if params and len(params) > 0:
499
+ tab_type = str(params[0])
500
+ self._performance_monitor.record_setting_access(f"tab:{tab_type}")
501
+
502
+ except Exception as e:
503
+ self.logger.debug(f"Failed to extract setting access: {e}")
504
+
505
+ def _record_query_performance(self, query: str, execution_time: float) -> None:
506
+ """Record query performance metrics."""
507
+ with self._lock:
508
+ self._query_count += 1
509
+ self._total_query_time += execution_time
510
+
511
+ # Track slow queries
512
+ if execution_time > self._slow_query_threshold:
513
+ slow_query_info = {
514
+ 'query': query[:200], # Truncate long queries
515
+ 'execution_time': execution_time,
516
+ 'timestamp': datetime.now().isoformat()
517
+ }
518
+ self._slow_queries.append(slow_query_info)
519
+
520
+ # Keep only recent slow queries
521
+ if len(self._slow_queries) > 50:
522
+ self._slow_queries = self._slow_queries[-50:]
523
+
524
+ self.logger.warning(f"Slow query detected: {execution_time:.3f}s - {query[:100]}...")
525
+
526
+ def get_performance_stats(self) -> Dict[str, Any]:
527
+ """
528
+ Get detailed performance statistics.
529
+
530
+ Returns:
531
+ Dictionary with performance metrics
532
+ """
533
+ with self._lock:
534
+ stats = {
535
+ 'total_queries': self._query_count,
536
+ 'total_query_time': self._total_query_time,
537
+ 'avg_query_time': self._total_query_time / max(self._query_count, 1),
538
+ 'slow_queries_count': len(self._slow_queries),
539
+ 'slow_query_threshold': self._slow_query_threshold,
540
+ 'recent_slow_queries': self._slow_queries[-10:] if self._slow_queries else []
541
+ }
542
+
543
+ # Add performance monitor stats if available
544
+ if self._performance_monitor:
545
+ try:
546
+ monitor_stats = self._performance_monitor.get_performance_stats()
547
+ stats.update({
548
+ 'cache_hit_rate': monitor_stats.cache_hit_rate,
549
+ 'queries_per_second': monitor_stats.queries_per_second,
550
+ 'memory_usage_mb': monitor_stats.memory_usage_mb,
551
+ 'hot_settings': self._performance_monitor.get_hot_settings(10),
552
+ 'cache_stats': self._performance_monitor.get_cache_stats()
553
+ })
554
+ except Exception as e:
555
+ self.logger.warning(f"Failed to get monitor stats: {e}")
556
+
557
+ return stats
558
+
559
+ def optimize_database(self) -> List[str]:
560
+ """
561
+ Perform database optimization based on usage patterns.
562
+
563
+ Returns:
564
+ List of optimization actions performed
565
+ """
566
+ actions = []
567
+
568
+ try:
569
+ conn = self.get_connection()
570
+
571
+ # Analyze and optimize
572
+ conn.execute("ANALYZE")
573
+ actions.append("Analyzed database statistics")
574
+
575
+ # Optimize query planner
576
+ conn.execute("PRAGMA optimize")
577
+ actions.append("Optimized query planner")
578
+
579
+ # Vacuum if needed (for non-memory databases)
580
+ if self.db_path != ":memory:":
581
+ # Check fragmentation
582
+ cursor = conn.execute("PRAGMA freelist_count")
583
+ free_pages = cursor.fetchone()[0]
584
+
585
+ cursor = conn.execute("PRAGMA page_count")
586
+ total_pages = cursor.fetchone()[0]
587
+
588
+ if total_pages > 0 and (free_pages / total_pages) > 0.1: # 10% fragmentation
589
+ conn.execute("VACUUM")
590
+ actions.append("Vacuumed database to reduce fragmentation")
591
+
592
+ # Suggest indexes based on performance monitor data
593
+ if self._performance_monitor:
594
+ index_suggestions = self._performance_monitor.optimize_indexes(self)
595
+ for index_sql in index_suggestions:
596
+ try:
597
+ conn.execute(index_sql)
598
+ actions.append(f"Created index: {index_sql}")
599
+ except sqlite3.Error as e:
600
+ self.logger.warning(f"Failed to create index: {e}")
601
+
602
+ self.logger.info(f"Database optimization completed: {len(actions)} actions")
603
+
604
+ except Exception as e:
605
+ self.logger.error(f"Database optimization failed: {e}")
606
+ actions.append(f"Optimization failed: {e}")
607
+
608
+ return actions
609
+
610
+ def clear_performance_data(self) -> None:
611
+ """Clear performance monitoring data."""
612
+ with self._lock:
613
+ self._query_count = 0
614
+ self._total_query_time = 0.0
615
+ self._slow_queries.clear()
616
+
617
+ if self._performance_monitor:
618
+ self._performance_monitor.reset_metrics()
619
+
620
+ def set_slow_query_threshold(self, threshold_seconds: float) -> None:
621
+ """
622
+ Set the threshold for slow query detection.
623
+
624
+ Args:
625
+ threshold_seconds: Threshold in seconds
626
+ """
627
+ self._slow_query_threshold = max(0.001, threshold_seconds) # Minimum 1ms
628
+
629
+ def _log_connection_error(self, error_msg: str) -> None:
630
+ """Log connection error with timestamp."""
631
+ error_entry = {
632
+ 'timestamp': datetime.now().isoformat(),
633
+ 'error': error_msg
634
+ }
635
+
636
+ self._connection_errors.append(error_entry)
637
+
638
+ # Keep only recent errors
639
+ if len(self._connection_errors) > self._max_error_history:
640
+ self._connection_errors = self._connection_errors[-self._max_error_history:]
641
+
642
+ self.logger.error(error_msg)
643
+
644
+ def close_connection(self, thread_id: Optional[int] = None) -> None:
645
+ """
646
+ Close database connection for specific thread or current thread.
647
+
648
+ Args:
649
+ thread_id: Thread ID to close connection for (None for current thread)
650
+ """
651
+ target_thread = thread_id or threading.get_ident()
652
+
653
+ with self._lock:
654
+ if target_thread in self._connections:
655
+ try:
656
+ self._connections[target_thread].close()
657
+ del self._connections[target_thread]
658
+ self.logger.debug(f"Closed connection for thread {target_thread}")
659
+ except Exception as e:
660
+ self.logger.warning(f"Error closing connection for thread {target_thread}: {e}")
661
+
662
+ def close_all_connections(self) -> None:
663
+ """Close all database connections and stop background threads."""
664
+ # Stop backup thread
665
+ if self._backup_thread and self._backup_thread.is_alive():
666
+ self._backup_stop_event.set()
667
+ self._backup_thread.join(timeout=5)
668
+
669
+ with self._lock:
670
+ # Close all thread connections
671
+ for thread_id in list(self._connections.keys()):
672
+ self.close_connection(thread_id)
673
+
674
+ # Close main connection
675
+ if self._main_connection:
676
+ try:
677
+ self._main_connection.close()
678
+ self._main_connection = None
679
+ self.logger.info("All database connections closed")
680
+ except Exception as e:
681
+ self.logger.warning(f"Error closing main connection: {e}")
682
+
683
+ def __enter__(self):
684
+ """Context manager entry."""
685
+ return self
686
+
687
+ def __exit__(self, exc_type, exc_val, exc_tb):
688
+ """Context manager exit - close all connections."""
689
+ self.close_all_connections()
690
+
691
+
692
+ # Connection pool for shared access across modules
693
+ class ConnectionPool:
694
+ """
695
+ Singleton connection pool for shared database access across the application.
696
+ """
697
+
698
+ _instance = None
699
+ _lock = threading.Lock()
700
+
701
+ def __new__(cls):
702
+ if cls._instance is None:
703
+ with cls._lock:
704
+ if cls._instance is None:
705
+ cls._instance = super().__new__(cls)
706
+ cls._instance._initialized = False
707
+ return cls._instance
708
+
709
+ def __init__(self):
710
+ if not getattr(self, '_initialized', False):
711
+ self._manager = None
712
+ self._initialized = True
713
+
714
+ def initialize(self, db_path: str = ":memory:", backup_path: Optional[str] = None) -> None:
715
+ """
716
+ Initialize the connection pool with database settings.
717
+
718
+ Args:
719
+ db_path: Path to SQLite database file
720
+ backup_path: Path for automatic backups
721
+ """
722
+ if self._manager:
723
+ self._manager.close_all_connections()
724
+
725
+ self._manager = DatabaseConnectionManager(db_path, backup_path)
726
+
727
+ def get_manager(self) -> DatabaseConnectionManager:
728
+ """
729
+ Get the connection manager instance.
730
+
731
+ Returns:
732
+ DatabaseConnectionManager instance
733
+
734
+ Raises:
735
+ RuntimeError: If pool not initialized
736
+ """
737
+ if not self._manager:
738
+ raise RuntimeError("Connection pool not initialized. Call initialize() first.")
739
+ return self._manager
740
+
741
+ def close(self) -> None:
742
+ """Close the connection pool."""
743
+ if self._manager:
744
+ self._manager.close_all_connections()
745
745
  self._manager = None