pomera-ai-commander 0.1.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (191) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +105 -680
  3. package/bin/pomera-ai-commander.js +62 -62
  4. package/core/__init__.py +65 -65
  5. package/core/app_context.py +482 -482
  6. package/core/async_text_processor.py +421 -421
  7. package/core/backup_manager.py +655 -655
  8. package/core/backup_recovery_manager.py +1033 -1033
  9. package/core/content_hash_cache.py +508 -508
  10. package/core/context_menu.py +313 -313
  11. package/core/data_validator.py +1066 -1066
  12. package/core/database_connection_manager.py +744 -744
  13. package/core/database_curl_settings_manager.py +608 -608
  14. package/core/database_promera_ai_settings_manager.py +446 -446
  15. package/core/database_schema.py +411 -411
  16. package/core/database_schema_manager.py +395 -395
  17. package/core/database_settings_manager.py +1507 -1507
  18. package/core/database_settings_manager_interface.py +456 -456
  19. package/core/dialog_manager.py +734 -734
  20. package/core/efficient_line_numbers.py +510 -510
  21. package/core/error_handler.py +746 -746
  22. package/core/error_service.py +431 -431
  23. package/core/event_consolidator.py +511 -511
  24. package/core/mcp/__init__.py +43 -43
  25. package/core/mcp/protocol.py +288 -288
  26. package/core/mcp/schema.py +251 -251
  27. package/core/mcp/server_stdio.py +299 -299
  28. package/core/mcp/tool_registry.py +2372 -2345
  29. package/core/memory_efficient_text_widget.py +711 -711
  30. package/core/migration_manager.py +914 -914
  31. package/core/migration_test_suite.py +1085 -1085
  32. package/core/migration_validator.py +1143 -1143
  33. package/core/optimized_find_replace.py +714 -714
  34. package/core/optimized_pattern_engine.py +424 -424
  35. package/core/optimized_search_highlighter.py +552 -552
  36. package/core/performance_monitor.py +674 -674
  37. package/core/persistence_manager.py +712 -712
  38. package/core/progressive_stats_calculator.py +632 -632
  39. package/core/regex_pattern_cache.py +529 -529
  40. package/core/regex_pattern_library.py +350 -350
  41. package/core/search_operation_manager.py +434 -434
  42. package/core/settings_defaults_registry.py +1087 -1087
  43. package/core/settings_integrity_validator.py +1111 -1111
  44. package/core/settings_serializer.py +557 -557
  45. package/core/settings_validator.py +1823 -1823
  46. package/core/smart_stats_calculator.py +709 -709
  47. package/core/statistics_update_manager.py +619 -619
  48. package/core/stats_config_manager.py +858 -858
  49. package/core/streaming_text_handler.py +723 -723
  50. package/core/task_scheduler.py +596 -596
  51. package/core/update_pattern_library.py +168 -168
  52. package/core/visibility_monitor.py +596 -596
  53. package/core/widget_cache.py +498 -498
  54. package/mcp.json +51 -61
  55. package/package.json +61 -57
  56. package/pomera.py +7482 -7482
  57. package/pomera_mcp_server.py +183 -144
  58. package/requirements.txt +32 -0
  59. package/tools/__init__.py +4 -4
  60. package/tools/ai_tools.py +2891 -2891
  61. package/tools/ascii_art_generator.py +352 -352
  62. package/tools/base64_tools.py +183 -183
  63. package/tools/base_tool.py +511 -511
  64. package/tools/case_tool.py +308 -308
  65. package/tools/column_tools.py +395 -395
  66. package/tools/cron_tool.py +884 -884
  67. package/tools/curl_history.py +600 -600
  68. package/tools/curl_processor.py +1207 -1207
  69. package/tools/curl_settings.py +502 -502
  70. package/tools/curl_tool.py +5467 -5467
  71. package/tools/diff_viewer.py +1071 -1071
  72. package/tools/email_extraction_tool.py +248 -248
  73. package/tools/email_header_analyzer.py +425 -425
  74. package/tools/extraction_tools.py +250 -250
  75. package/tools/find_replace.py +1750 -1750
  76. package/tools/folder_file_reporter.py +1463 -1463
  77. package/tools/folder_file_reporter_adapter.py +480 -480
  78. package/tools/generator_tools.py +1216 -1216
  79. package/tools/hash_generator.py +255 -255
  80. package/tools/html_tool.py +656 -656
  81. package/tools/jsonxml_tool.py +729 -729
  82. package/tools/line_tools.py +419 -419
  83. package/tools/markdown_tools.py +561 -561
  84. package/tools/mcp_widget.py +1417 -1417
  85. package/tools/notes_widget.py +973 -973
  86. package/tools/number_base_converter.py +372 -372
  87. package/tools/regex_extractor.py +571 -571
  88. package/tools/slug_generator.py +310 -310
  89. package/tools/sorter_tools.py +458 -458
  90. package/tools/string_escape_tool.py +392 -392
  91. package/tools/text_statistics_tool.py +365 -365
  92. package/tools/text_wrapper.py +430 -430
  93. package/tools/timestamp_converter.py +421 -421
  94. package/tools/tool_loader.py +710 -710
  95. package/tools/translator_tools.py +522 -522
  96. package/tools/url_link_extractor.py +261 -261
  97. package/tools/url_parser.py +204 -204
  98. package/tools/whitespace_tools.py +355 -355
  99. package/tools/word_frequency_counter.py +146 -146
  100. package/core/__pycache__/__init__.cpython-313.pyc +0 -0
  101. package/core/__pycache__/app_context.cpython-313.pyc +0 -0
  102. package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
  103. package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
  104. package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
  105. package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
  106. package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
  107. package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
  108. package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
  109. package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
  110. package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
  111. package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
  112. package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
  113. package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
  114. package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
  115. package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
  116. package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
  117. package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
  118. package/core/__pycache__/error_service.cpython-313.pyc +0 -0
  119. package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
  120. package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
  121. package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
  122. package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
  123. package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
  124. package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
  125. package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
  126. package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
  127. package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
  128. package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
  129. package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
  130. package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
  131. package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
  132. package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
  133. package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
  134. package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
  135. package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
  136. package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
  137. package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
  138. package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
  139. package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
  140. package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
  141. package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
  142. package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
  143. package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
  144. package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
  145. package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
  146. package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
  147. package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
  148. package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
  149. package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
  150. package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
  151. package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
  152. package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
  153. package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
  154. package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
  155. package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
  156. package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
  157. package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
  158. package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
  159. package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
  160. package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
  161. package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
  162. package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
  163. package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
  164. package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
  165. package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
  166. package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
  167. package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
  168. package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
  169. package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
  170. package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
  171. package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
  172. package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
  173. package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
  174. package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
  175. package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
  176. package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
  177. package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
  178. package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
  179. package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
  180. package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
  181. package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
  182. package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
  183. package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
  184. package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
  185. package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
  186. package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
  187. package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
  188. package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
  189. package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
  190. package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
  191. package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
@@ -1,675 +1,675 @@
1
- """
2
- Performance Monitor for Database Settings System
3
-
4
- This module provides comprehensive performance monitoring and optimization
5
- for the database settings system, including query performance tracking,
6
- caching layer, memory usage monitoring, and automatic optimization.
7
- """
8
-
9
- import time
10
- import threading
11
- import sqlite3
12
- import logging
13
- import statistics
14
- from typing import Dict, List, Any, Optional, Callable, Tuple
15
- from datetime import datetime, timedelta
16
- from collections import defaultdict, deque
17
- from dataclasses import dataclass, field
18
- from contextlib import contextmanager
19
- import psutil
20
- import os
21
-
22
-
23
- @dataclass
24
- class QueryMetrics:
25
- """Metrics for a database query."""
26
- query_hash: str
27
- query_text: str
28
- execution_time: float
29
- timestamp: datetime
30
- thread_id: int
31
- result_count: int = 0
32
- cache_hit: bool = False
33
-
34
-
35
- @dataclass
36
- class PerformanceStats:
37
- """Aggregated performance statistics."""
38
- total_queries: int = 0
39
- avg_execution_time: float = 0.0
40
- max_execution_time: float = 0.0
41
- min_execution_time: float = float('inf')
42
- cache_hit_rate: float = 0.0
43
- queries_per_second: float = 0.0
44
- memory_usage_mb: float = 0.0
45
- active_connections: int = 0
46
- slow_queries: List[QueryMetrics] = field(default_factory=list)
47
-
48
-
49
- class QueryCache:
50
- """
51
- LRU cache for database query results with TTL support.
52
- """
53
-
54
- def __init__(self, max_size: int = 1000, ttl_seconds: int = 300):
55
- """
56
- Initialize query cache.
57
-
58
- Args:
59
- max_size: Maximum number of cached queries
60
- ttl_seconds: Time-to-live for cached results in seconds
61
- """
62
- self.max_size = max_size
63
- self.ttl_seconds = ttl_seconds
64
- self._cache = {}
65
- self._access_order = deque()
66
- self._lock = threading.RLock()
67
-
68
- # Cache statistics
69
- self.hits = 0
70
- self.misses = 0
71
- self.evictions = 0
72
-
73
- def get(self, query_hash: str) -> Optional[Any]:
74
- """
75
- Get cached query result.
76
-
77
- Args:
78
- query_hash: Hash of the query
79
-
80
- Returns:
81
- Cached result or None if not found/expired
82
- """
83
- with self._lock:
84
- if query_hash not in self._cache:
85
- self.misses += 1
86
- return None
87
-
88
- entry = self._cache[query_hash]
89
-
90
- # Check TTL
91
- if datetime.now() - entry['timestamp'] > timedelta(seconds=self.ttl_seconds):
92
- del self._cache[query_hash]
93
- self._access_order.remove(query_hash)
94
- self.misses += 1
95
- return None
96
-
97
- # Update access order
98
- self._access_order.remove(query_hash)
99
- self._access_order.append(query_hash)
100
-
101
- self.hits += 1
102
- return entry['result']
103
-
104
- def put(self, query_hash: str, result: Any) -> None:
105
- """
106
- Cache query result.
107
-
108
- Args:
109
- query_hash: Hash of the query
110
- result: Query result to cache
111
- """
112
- with self._lock:
113
- # Remove if already exists
114
- if query_hash in self._cache:
115
- self._access_order.remove(query_hash)
116
-
117
- # Evict oldest if at capacity
118
- elif len(self._cache) >= self.max_size:
119
- oldest = self._access_order.popleft()
120
- del self._cache[oldest]
121
- self.evictions += 1
122
-
123
- # Add new entry
124
- self._cache[query_hash] = {
125
- 'result': result,
126
- 'timestamp': datetime.now()
127
- }
128
- self._access_order.append(query_hash)
129
-
130
- def clear(self) -> None:
131
- """Clear all cached entries."""
132
- with self._lock:
133
- self._cache.clear()
134
- self._access_order.clear()
135
-
136
- def get_stats(self) -> Dict[str, Any]:
137
- """Get cache statistics."""
138
- with self._lock:
139
- total_requests = self.hits + self.misses
140
- hit_rate = (self.hits / total_requests * 100) if total_requests > 0 else 0
141
-
142
- return {
143
- 'size': len(self._cache),
144
- 'max_size': self.max_size,
145
- 'hits': self.hits,
146
- 'misses': self.misses,
147
- 'evictions': self.evictions,
148
- 'hit_rate_percent': hit_rate,
149
- 'ttl_seconds': self.ttl_seconds
150
- }
151
-
152
-
153
- class ConnectionPool:
154
- """
155
- Connection pool with performance monitoring.
156
- """
157
-
158
- def __init__(self, db_path: str, max_connections: int = 10):
159
- """
160
- Initialize connection pool.
161
-
162
- Args:
163
- db_path: Database file path
164
- max_connections: Maximum number of pooled connections
165
- """
166
- self.db_path = db_path
167
- self.max_connections = max_connections
168
- self._pool = deque()
169
- self._active_connections = set()
170
- self._lock = threading.RLock()
171
- self._created_count = 0
172
- self._borrowed_count = 0
173
- self._returned_count = 0
174
-
175
- @contextmanager
176
- def get_connection(self):
177
- """
178
- Get a connection from the pool.
179
-
180
- Yields:
181
- SQLite connection
182
- """
183
- conn = self._borrow_connection()
184
- try:
185
- yield conn
186
- finally:
187
- self._return_connection(conn)
188
-
189
- def _borrow_connection(self) -> sqlite3.Connection:
190
- """Borrow a connection from the pool."""
191
- with self._lock:
192
- if self._pool:
193
- conn = self._pool.popleft()
194
- else:
195
- conn = self._create_connection()
196
-
197
- self._active_connections.add(conn)
198
- self._borrowed_count += 1
199
- return conn
200
-
201
- def _return_connection(self, conn: sqlite3.Connection) -> None:
202
- """Return a connection to the pool."""
203
- with self._lock:
204
- if conn in self._active_connections:
205
- self._active_connections.remove(conn)
206
-
207
- if len(self._pool) < self.max_connections:
208
- self._pool.append(conn)
209
- self._returned_count += 1
210
- else:
211
- conn.close()
212
-
213
- def _create_connection(self) -> sqlite3.Connection:
214
- """Create a new database connection."""
215
- conn = sqlite3.connect(
216
- self.db_path,
217
- timeout=30.0,
218
- isolation_level=None,
219
- check_same_thread=False
220
- )
221
-
222
- # Configure performance settings
223
- conn.execute("PRAGMA journal_mode=WAL")
224
- conn.execute("PRAGMA synchronous=NORMAL")
225
- conn.execute("PRAGMA cache_size=10000")
226
- conn.execute("PRAGMA temp_store=MEMORY")
227
- conn.execute("PRAGMA mmap_size=268435456")
228
- conn.execute("PRAGMA foreign_keys=ON")
229
-
230
- self._created_count += 1
231
- return conn
232
-
233
- def get_stats(self) -> Dict[str, Any]:
234
- """Get connection pool statistics."""
235
- with self._lock:
236
- return {
237
- 'pool_size': len(self._pool),
238
- 'active_connections': len(self._active_connections),
239
- 'max_connections': self.max_connections,
240
- 'created_count': self._created_count,
241
- 'borrowed_count': self._borrowed_count,
242
- 'returned_count': self._returned_count
243
- }
244
-
245
- def close_all(self) -> None:
246
- """Close all connections in the pool."""
247
- with self._lock:
248
- # Close pooled connections
249
- while self._pool:
250
- conn = self._pool.popleft()
251
- conn.close()
252
-
253
- # Close active connections
254
- for conn in list(self._active_connections):
255
- conn.close()
256
- self._active_connections.clear()
257
-
258
-
259
- class PerformanceMonitor:
260
- """
261
- Comprehensive performance monitoring system for database settings.
262
- """
263
-
264
- def __init__(self, enable_caching: bool = True, cache_size: int = 1000,
265
- slow_query_threshold: float = 0.1, max_metrics_history: int = 10000):
266
- """
267
- Initialize performance monitor.
268
-
269
- Args:
270
- enable_caching: Whether to enable query result caching
271
- cache_size: Maximum number of cached queries
272
- slow_query_threshold: Threshold in seconds for slow query detection
273
- max_metrics_history: Maximum number of query metrics to keep
274
- """
275
- self.enable_caching = enable_caching
276
- self.slow_query_threshold = slow_query_threshold
277
- self.max_metrics_history = max_metrics_history
278
-
279
- # Query cache
280
- self.query_cache = QueryCache(max_size=cache_size) if enable_caching else None
281
-
282
- # Metrics storage
283
- self.query_metrics = deque(maxlen=max_metrics_history)
284
- self.query_stats = defaultdict(list) # query_hash -> [execution_times]
285
-
286
- # Performance tracking
287
- self._lock = threading.RLock()
288
- self.start_time = datetime.now()
289
- self.logger = logging.getLogger(__name__)
290
-
291
- # Hot settings tracking
292
- self.hot_settings = defaultdict(int) # setting_key -> access_count
293
- self.hot_queries = defaultdict(int) # query_hash -> execution_count
294
-
295
- # Memory monitoring
296
- self.process = psutil.Process(os.getpid())
297
- self.memory_samples = deque(maxlen=100)
298
-
299
- # Connection pool (optional)
300
- self.connection_pool = None
301
-
302
- def set_connection_pool(self, pool: ConnectionPool) -> None:
303
- """Set connection pool for monitoring."""
304
- self.connection_pool = pool
305
-
306
- def _hash_query(self, query: str, params: Tuple = ()) -> str:
307
- """Generate hash for query and parameters."""
308
- import hashlib
309
- query_str = f"{query}:{str(params)}"
310
- return hashlib.md5(query_str.encode()).hexdigest()
311
-
312
- @contextmanager
313
- def monitor_query(self, query: str, params: Tuple = ()):
314
- """
315
- Context manager for monitoring query execution.
316
-
317
- Args:
318
- query: SQL query string
319
- params: Query parameters
320
-
321
- Yields:
322
- Tuple of (connection, cached_result_if_available)
323
- """
324
- query_hash = self._hash_query(query, params)
325
- start_time = time.time()
326
- thread_id = threading.get_ident()
327
-
328
- # Check cache first
329
- cached_result = None
330
- if self.query_cache:
331
- cached_result = self.query_cache.get(query_hash)
332
- if cached_result is not None:
333
- # Record cache hit
334
- execution_time = time.time() - start_time
335
- self._record_query_metric(
336
- query_hash, query, execution_time, thread_id,
337
- len(cached_result) if isinstance(cached_result, (list, tuple)) else 1,
338
- cache_hit=True
339
- )
340
- yield None, cached_result
341
- return
342
-
343
- # Execute query
344
- connection = None
345
- try:
346
- if self.connection_pool:
347
- with self.connection_pool.get_connection() as conn:
348
- connection = conn
349
- yield conn, None
350
- else:
351
- # Caller provides connection
352
- yield None, None
353
- finally:
354
- # Record metrics
355
- execution_time = time.time() - start_time
356
- self._record_query_metric(
357
- query_hash, query, execution_time, thread_id, 0, cache_hit=False
358
- )
359
-
360
- def cache_query_result(self, query: str, params: Tuple, result: Any) -> None:
361
- """
362
- Cache query result if caching is enabled.
363
-
364
- Args:
365
- query: SQL query string
366
- params: Query parameters
367
- result: Query result to cache
368
- """
369
- if self.query_cache:
370
- query_hash = self._hash_query(query, params)
371
- self.query_cache.put(query_hash, result)
372
-
373
- def _record_query_metric(self, query_hash: str, query: str, execution_time: float,
374
- thread_id: int, result_count: int, cache_hit: bool = False) -> None:
375
- """Record query execution metrics."""
376
- with self._lock:
377
- metric = QueryMetrics(
378
- query_hash=query_hash,
379
- query_text=query,
380
- execution_time=execution_time,
381
- timestamp=datetime.now(),
382
- thread_id=thread_id,
383
- result_count=result_count,
384
- cache_hit=cache_hit
385
- )
386
-
387
- self.query_metrics.append(metric)
388
- self.query_stats[query_hash].append(execution_time)
389
- self.hot_queries[query_hash] += 1
390
-
391
- # Log slow queries
392
- if execution_time > self.slow_query_threshold and not cache_hit:
393
- self.logger.warning(
394
- f"Slow query detected: {execution_time:.3f}s - {query[:100]}..."
395
- )
396
-
397
- def record_setting_access(self, setting_key: str) -> None:
398
- """
399
- Record access to a specific setting for hot data tracking.
400
-
401
- Args:
402
- setting_key: Setting key that was accessed
403
- """
404
- with self._lock:
405
- self.hot_settings[setting_key] += 1
406
-
407
- def get_performance_stats(self, window_minutes: int = 60) -> PerformanceStats:
408
- """
409
- Get aggregated performance statistics.
410
-
411
- Args:
412
- window_minutes: Time window for statistics in minutes
413
-
414
- Returns:
415
- PerformanceStats object with aggregated metrics
416
- """
417
- with self._lock:
418
- cutoff_time = datetime.now() - timedelta(minutes=window_minutes)
419
-
420
- # Filter metrics to time window
421
- recent_metrics = [
422
- m for m in self.query_metrics
423
- if m.timestamp >= cutoff_time
424
- ]
425
-
426
- if not recent_metrics:
427
- return PerformanceStats()
428
-
429
- # Calculate statistics
430
- execution_times = [m.execution_time for m in recent_metrics if not m.cache_hit]
431
- cache_hits = sum(1 for m in recent_metrics if m.cache_hit)
432
-
433
- stats = PerformanceStats(
434
- total_queries=len(recent_metrics),
435
- cache_hit_rate=(cache_hits / len(recent_metrics) * 100) if recent_metrics else 0,
436
- queries_per_second=len(recent_metrics) / (window_minutes * 60),
437
- memory_usage_mb=self._get_memory_usage_mb()
438
- )
439
-
440
- if execution_times:
441
- stats.avg_execution_time = statistics.mean(execution_times)
442
- stats.max_execution_time = max(execution_times)
443
- stats.min_execution_time = min(execution_times)
444
-
445
- # Get slow queries
446
- stats.slow_queries = [
447
- m for m in recent_metrics
448
- if m.execution_time > self.slow_query_threshold and not m.cache_hit
449
- ]
450
-
451
- # Connection pool stats
452
- if self.connection_pool:
453
- pool_stats = self.connection_pool.get_stats()
454
- stats.active_connections = pool_stats['active_connections']
455
-
456
- return stats
457
-
458
- def get_hot_settings(self, top_n: int = 10) -> List[Tuple[str, int]]:
459
- """
460
- Get most frequently accessed settings.
461
-
462
- Args:
463
- top_n: Number of top settings to return
464
-
465
- Returns:
466
- List of (setting_key, access_count) tuples
467
- """
468
- with self._lock:
469
- return sorted(
470
- self.hot_settings.items(),
471
- key=lambda x: x[1],
472
- reverse=True
473
- )[:top_n]
474
-
475
- def get_hot_queries(self, top_n: int = 10) -> List[Tuple[str, int, float]]:
476
- """
477
- Get most frequently executed queries with average execution time.
478
-
479
- Args:
480
- top_n: Number of top queries to return
481
-
482
- Returns:
483
- List of (query_hash, execution_count, avg_time) tuples
484
- """
485
- with self._lock:
486
- hot_queries = []
487
- for query_hash, count in self.hot_queries.items():
488
- if query_hash in self.query_stats:
489
- avg_time = statistics.mean(self.query_stats[query_hash])
490
- hot_queries.append((query_hash, count, avg_time))
491
-
492
- return sorted(hot_queries, key=lambda x: x[1], reverse=True)[:top_n]
493
-
494
- def _get_memory_usage_mb(self) -> float:
495
- """Get current memory usage in MB."""
496
- try:
497
- memory_info = self.process.memory_info()
498
- memory_mb = memory_info.rss / 1024 / 1024
499
- self.memory_samples.append(memory_mb)
500
- return memory_mb
501
- except Exception:
502
- return 0.0
503
-
504
- def get_memory_trend(self) -> Dict[str, float]:
505
- """Get memory usage trend statistics."""
506
- if not self.memory_samples:
507
- return {'current': 0.0, 'average': 0.0, 'peak': 0.0}
508
-
509
- return {
510
- 'current': self.memory_samples[-1],
511
- 'average': statistics.mean(self.memory_samples),
512
- 'peak': max(self.memory_samples)
513
- }
514
-
515
- def optimize_indexes(self, connection_manager) -> List[str]:
516
- """
517
- Analyze query patterns and suggest index optimizations.
518
-
519
- Args:
520
- connection_manager: Database connection manager
521
-
522
- Returns:
523
- List of suggested index creation SQL statements
524
- """
525
- suggestions = []
526
-
527
- # Analyze hot queries for index opportunities
528
- hot_queries = self.get_hot_queries(20)
529
-
530
- for query_hash, count, avg_time in hot_queries:
531
- # Find the actual query
532
- query_text = None
533
- for metric in self.query_metrics:
534
- if metric.query_hash == query_hash:
535
- query_text = metric.query_text
536
- break
537
-
538
- if not query_text:
539
- continue
540
-
541
- # Analyze query for index opportunities
542
- query_lower = query_text.lower()
543
-
544
- # Tool settings queries
545
- if 'tool_settings' in query_lower and 'where tool_name' in query_lower:
546
- if count > 10 and avg_time > 0.01:
547
- suggestions.append(
548
- "CREATE INDEX IF NOT EXISTS idx_tool_settings_tool_name_path "
549
- "ON tool_settings(tool_name, setting_path)"
550
- )
551
-
552
- # Core settings queries
553
- if 'core_settings' in query_lower and 'where key' in query_lower:
554
- if count > 10 and avg_time > 0.01:
555
- suggestions.append(
556
- "CREATE INDEX IF NOT EXISTS idx_core_settings_key "
557
- "ON core_settings(key)"
558
- )
559
-
560
- # Performance settings queries
561
- if 'performance_settings' in query_lower and 'where category' in query_lower:
562
- if count > 5 and avg_time > 0.01:
563
- suggestions.append(
564
- "CREATE INDEX IF NOT EXISTS idx_performance_settings_category "
565
- "ON performance_settings(category, setting_key)"
566
- )
567
-
568
- return list(set(suggestions)) # Remove duplicates
569
-
570
- def clear_cache(self) -> None:
571
- """Clear query cache."""
572
- if self.query_cache:
573
- self.query_cache.clear()
574
-
575
- def reset_metrics(self) -> None:
576
- """Reset all performance metrics."""
577
- with self._lock:
578
- self.query_metrics.clear()
579
- self.query_stats.clear()
580
- self.hot_settings.clear()
581
- self.hot_queries.clear()
582
- self.memory_samples.clear()
583
- self.start_time = datetime.now()
584
-
585
- def get_cache_stats(self) -> Dict[str, Any]:
586
- """Get query cache statistics."""
587
- if self.query_cache:
588
- return self.query_cache.get_stats()
589
- return {'enabled': False}
590
-
591
- def export_metrics(self, filepath: str) -> bool:
592
- """
593
- Export performance metrics to JSON file.
594
-
595
- Args:
596
- filepath: Target file path
597
-
598
- Returns:
599
- True if export successful
600
- """
601
- try:
602
- import json
603
-
604
- with self._lock:
605
- metrics_data = {
606
- 'export_timestamp': datetime.now().isoformat(),
607
- 'monitoring_duration_minutes': (datetime.now() - self.start_time).total_seconds() / 60,
608
- 'performance_stats': self.get_performance_stats().__dict__,
609
- 'hot_settings': dict(self.hot_settings),
610
- 'hot_queries': dict(self.hot_queries),
611
- 'cache_stats': self.get_cache_stats(),
612
- 'memory_trend': self.get_memory_trend(),
613
- 'recent_slow_queries': [
614
- {
615
- 'query': m.query_text[:200],
616
- 'execution_time': m.execution_time,
617
- 'timestamp': m.timestamp.isoformat()
618
- }
619
- for m in self.query_metrics
620
- if m.execution_time > self.slow_query_threshold
621
- ][-20:] # Last 20 slow queries
622
- }
623
-
624
- with open(filepath, 'w') as f:
625
- json.dump(metrics_data, f, indent=2, default=str)
626
-
627
- self.logger.info(f"Performance metrics exported to {filepath}")
628
- return True
629
-
630
- except Exception as e:
631
- self.logger.error(f"Failed to export metrics: {e}")
632
- return False
633
-
634
-
635
- # Global performance monitor instance
636
- _performance_monitor = None
637
- _monitor_lock = threading.Lock()
638
-
639
-
640
- def get_performance_monitor() -> PerformanceMonitor:
641
- """Get the global performance monitor instance."""
642
- global _performance_monitor
643
-
644
- if _performance_monitor is None:
645
- with _monitor_lock:
646
- if _performance_monitor is None:
647
- _performance_monitor = PerformanceMonitor()
648
-
649
- return _performance_monitor
650
-
651
-
652
- def initialize_performance_monitoring(enable_caching: bool = True,
653
- cache_size: int = 1000,
654
- slow_query_threshold: float = 0.1) -> PerformanceMonitor:
655
- """
656
- Initialize global performance monitoring.
657
-
658
- Args:
659
- enable_caching: Whether to enable query result caching
660
- cache_size: Maximum number of cached queries
661
- slow_query_threshold: Threshold in seconds for slow query detection
662
-
663
- Returns:
664
- PerformanceMonitor instance
665
- """
666
- global _performance_monitor
667
-
668
- with _monitor_lock:
669
- _performance_monitor = PerformanceMonitor(
670
- enable_caching=enable_caching,
671
- cache_size=cache_size,
672
- slow_query_threshold=slow_query_threshold
673
- )
674
-
1
+ """
2
+ Performance Monitor for Database Settings System
3
+
4
+ This module provides comprehensive performance monitoring and optimization
5
+ for the database settings system, including query performance tracking,
6
+ caching layer, memory usage monitoring, and automatic optimization.
7
+ """
8
+
9
+ import time
10
+ import threading
11
+ import sqlite3
12
+ import logging
13
+ import statistics
14
+ from typing import Dict, List, Any, Optional, Callable, Tuple
15
+ from datetime import datetime, timedelta
16
+ from collections import defaultdict, deque
17
+ from dataclasses import dataclass, field
18
+ from contextlib import contextmanager
19
+ import psutil
20
+ import os
21
+
22
+
23
+ @dataclass
24
+ class QueryMetrics:
25
+ """Metrics for a database query."""
26
+ query_hash: str
27
+ query_text: str
28
+ execution_time: float
29
+ timestamp: datetime
30
+ thread_id: int
31
+ result_count: int = 0
32
+ cache_hit: bool = False
33
+
34
+
35
+ @dataclass
36
+ class PerformanceStats:
37
+ """Aggregated performance statistics."""
38
+ total_queries: int = 0
39
+ avg_execution_time: float = 0.0
40
+ max_execution_time: float = 0.0
41
+ min_execution_time: float = float('inf')
42
+ cache_hit_rate: float = 0.0
43
+ queries_per_second: float = 0.0
44
+ memory_usage_mb: float = 0.0
45
+ active_connections: int = 0
46
+ slow_queries: List[QueryMetrics] = field(default_factory=list)
47
+
48
+
49
+ class QueryCache:
50
+ """
51
+ LRU cache for database query results with TTL support.
52
+ """
53
+
54
+ def __init__(self, max_size: int = 1000, ttl_seconds: int = 300):
55
+ """
56
+ Initialize query cache.
57
+
58
+ Args:
59
+ max_size: Maximum number of cached queries
60
+ ttl_seconds: Time-to-live for cached results in seconds
61
+ """
62
+ self.max_size = max_size
63
+ self.ttl_seconds = ttl_seconds
64
+ self._cache = {}
65
+ self._access_order = deque()
66
+ self._lock = threading.RLock()
67
+
68
+ # Cache statistics
69
+ self.hits = 0
70
+ self.misses = 0
71
+ self.evictions = 0
72
+
73
+ def get(self, query_hash: str) -> Optional[Any]:
74
+ """
75
+ Get cached query result.
76
+
77
+ Args:
78
+ query_hash: Hash of the query
79
+
80
+ Returns:
81
+ Cached result or None if not found/expired
82
+ """
83
+ with self._lock:
84
+ if query_hash not in self._cache:
85
+ self.misses += 1
86
+ return None
87
+
88
+ entry = self._cache[query_hash]
89
+
90
+ # Check TTL
91
+ if datetime.now() - entry['timestamp'] > timedelta(seconds=self.ttl_seconds):
92
+ del self._cache[query_hash]
93
+ self._access_order.remove(query_hash)
94
+ self.misses += 1
95
+ return None
96
+
97
+ # Update access order
98
+ self._access_order.remove(query_hash)
99
+ self._access_order.append(query_hash)
100
+
101
+ self.hits += 1
102
+ return entry['result']
103
+
104
+ def put(self, query_hash: str, result: Any) -> None:
105
+ """
106
+ Cache query result.
107
+
108
+ Args:
109
+ query_hash: Hash of the query
110
+ result: Query result to cache
111
+ """
112
+ with self._lock:
113
+ # Remove if already exists
114
+ if query_hash in self._cache:
115
+ self._access_order.remove(query_hash)
116
+
117
+ # Evict oldest if at capacity
118
+ elif len(self._cache) >= self.max_size:
119
+ oldest = self._access_order.popleft()
120
+ del self._cache[oldest]
121
+ self.evictions += 1
122
+
123
+ # Add new entry
124
+ self._cache[query_hash] = {
125
+ 'result': result,
126
+ 'timestamp': datetime.now()
127
+ }
128
+ self._access_order.append(query_hash)
129
+
130
+ def clear(self) -> None:
131
+ """Clear all cached entries."""
132
+ with self._lock:
133
+ self._cache.clear()
134
+ self._access_order.clear()
135
+
136
+ def get_stats(self) -> Dict[str, Any]:
137
+ """Get cache statistics."""
138
+ with self._lock:
139
+ total_requests = self.hits + self.misses
140
+ hit_rate = (self.hits / total_requests * 100) if total_requests > 0 else 0
141
+
142
+ return {
143
+ 'size': len(self._cache),
144
+ 'max_size': self.max_size,
145
+ 'hits': self.hits,
146
+ 'misses': self.misses,
147
+ 'evictions': self.evictions,
148
+ 'hit_rate_percent': hit_rate,
149
+ 'ttl_seconds': self.ttl_seconds
150
+ }
151
+
152
+
153
+ class ConnectionPool:
154
+ """
155
+ Connection pool with performance monitoring.
156
+ """
157
+
158
+ def __init__(self, db_path: str, max_connections: int = 10):
159
+ """
160
+ Initialize connection pool.
161
+
162
+ Args:
163
+ db_path: Database file path
164
+ max_connections: Maximum number of pooled connections
165
+ """
166
+ self.db_path = db_path
167
+ self.max_connections = max_connections
168
+ self._pool = deque()
169
+ self._active_connections = set()
170
+ self._lock = threading.RLock()
171
+ self._created_count = 0
172
+ self._borrowed_count = 0
173
+ self._returned_count = 0
174
+
175
+ @contextmanager
176
+ def get_connection(self):
177
+ """
178
+ Get a connection from the pool.
179
+
180
+ Yields:
181
+ SQLite connection
182
+ """
183
+ conn = self._borrow_connection()
184
+ try:
185
+ yield conn
186
+ finally:
187
+ self._return_connection(conn)
188
+
189
+ def _borrow_connection(self) -> sqlite3.Connection:
190
+ """Borrow a connection from the pool."""
191
+ with self._lock:
192
+ if self._pool:
193
+ conn = self._pool.popleft()
194
+ else:
195
+ conn = self._create_connection()
196
+
197
+ self._active_connections.add(conn)
198
+ self._borrowed_count += 1
199
+ return conn
200
+
201
+ def _return_connection(self, conn: sqlite3.Connection) -> None:
202
+ """Return a connection to the pool."""
203
+ with self._lock:
204
+ if conn in self._active_connections:
205
+ self._active_connections.remove(conn)
206
+
207
+ if len(self._pool) < self.max_connections:
208
+ self._pool.append(conn)
209
+ self._returned_count += 1
210
+ else:
211
+ conn.close()
212
+
213
+ def _create_connection(self) -> sqlite3.Connection:
214
+ """Create a new database connection."""
215
+ conn = sqlite3.connect(
216
+ self.db_path,
217
+ timeout=30.0,
218
+ isolation_level=None,
219
+ check_same_thread=False
220
+ )
221
+
222
+ # Configure performance settings
223
+ conn.execute("PRAGMA journal_mode=WAL")
224
+ conn.execute("PRAGMA synchronous=NORMAL")
225
+ conn.execute("PRAGMA cache_size=10000")
226
+ conn.execute("PRAGMA temp_store=MEMORY")
227
+ conn.execute("PRAGMA mmap_size=268435456")
228
+ conn.execute("PRAGMA foreign_keys=ON")
229
+
230
+ self._created_count += 1
231
+ return conn
232
+
233
+ def get_stats(self) -> Dict[str, Any]:
234
+ """Get connection pool statistics."""
235
+ with self._lock:
236
+ return {
237
+ 'pool_size': len(self._pool),
238
+ 'active_connections': len(self._active_connections),
239
+ 'max_connections': self.max_connections,
240
+ 'created_count': self._created_count,
241
+ 'borrowed_count': self._borrowed_count,
242
+ 'returned_count': self._returned_count
243
+ }
244
+
245
+ def close_all(self) -> None:
246
+ """Close all connections in the pool."""
247
+ with self._lock:
248
+ # Close pooled connections
249
+ while self._pool:
250
+ conn = self._pool.popleft()
251
+ conn.close()
252
+
253
+ # Close active connections
254
+ for conn in list(self._active_connections):
255
+ conn.close()
256
+ self._active_connections.clear()
257
+
258
+
259
+ class PerformanceMonitor:
260
+ """
261
+ Comprehensive performance monitoring system for database settings.
262
+ """
263
+
264
+ def __init__(self, enable_caching: bool = True, cache_size: int = 1000,
265
+ slow_query_threshold: float = 0.1, max_metrics_history: int = 10000):
266
+ """
267
+ Initialize performance monitor.
268
+
269
+ Args:
270
+ enable_caching: Whether to enable query result caching
271
+ cache_size: Maximum number of cached queries
272
+ slow_query_threshold: Threshold in seconds for slow query detection
273
+ max_metrics_history: Maximum number of query metrics to keep
274
+ """
275
+ self.enable_caching = enable_caching
276
+ self.slow_query_threshold = slow_query_threshold
277
+ self.max_metrics_history = max_metrics_history
278
+
279
+ # Query cache
280
+ self.query_cache = QueryCache(max_size=cache_size) if enable_caching else None
281
+
282
+ # Metrics storage
283
+ self.query_metrics = deque(maxlen=max_metrics_history)
284
+ self.query_stats = defaultdict(list) # query_hash -> [execution_times]
285
+
286
+ # Performance tracking
287
+ self._lock = threading.RLock()
288
+ self.start_time = datetime.now()
289
+ self.logger = logging.getLogger(__name__)
290
+
291
+ # Hot settings tracking
292
+ self.hot_settings = defaultdict(int) # setting_key -> access_count
293
+ self.hot_queries = defaultdict(int) # query_hash -> execution_count
294
+
295
+ # Memory monitoring
296
+ self.process = psutil.Process(os.getpid())
297
+ self.memory_samples = deque(maxlen=100)
298
+
299
+ # Connection pool (optional)
300
+ self.connection_pool = None
301
+
302
+ def set_connection_pool(self, pool: ConnectionPool) -> None:
303
+ """Set connection pool for monitoring."""
304
+ self.connection_pool = pool
305
+
306
+ def _hash_query(self, query: str, params: Tuple = ()) -> str:
307
+ """Generate hash for query and parameters."""
308
+ import hashlib
309
+ query_str = f"{query}:{str(params)}"
310
+ return hashlib.md5(query_str.encode()).hexdigest()
311
+
312
+ @contextmanager
313
+ def monitor_query(self, query: str, params: Tuple = ()):
314
+ """
315
+ Context manager for monitoring query execution.
316
+
317
+ Args:
318
+ query: SQL query string
319
+ params: Query parameters
320
+
321
+ Yields:
322
+ Tuple of (connection, cached_result_if_available)
323
+ """
324
+ query_hash = self._hash_query(query, params)
325
+ start_time = time.time()
326
+ thread_id = threading.get_ident()
327
+
328
+ # Check cache first
329
+ cached_result = None
330
+ if self.query_cache:
331
+ cached_result = self.query_cache.get(query_hash)
332
+ if cached_result is not None:
333
+ # Record cache hit
334
+ execution_time = time.time() - start_time
335
+ self._record_query_metric(
336
+ query_hash, query, execution_time, thread_id,
337
+ len(cached_result) if isinstance(cached_result, (list, tuple)) else 1,
338
+ cache_hit=True
339
+ )
340
+ yield None, cached_result
341
+ return
342
+
343
+ # Execute query
344
+ connection = None
345
+ try:
346
+ if self.connection_pool:
347
+ with self.connection_pool.get_connection() as conn:
348
+ connection = conn
349
+ yield conn, None
350
+ else:
351
+ # Caller provides connection
352
+ yield None, None
353
+ finally:
354
+ # Record metrics
355
+ execution_time = time.time() - start_time
356
+ self._record_query_metric(
357
+ query_hash, query, execution_time, thread_id, 0, cache_hit=False
358
+ )
359
+
360
+ def cache_query_result(self, query: str, params: Tuple, result: Any) -> None:
361
+ """
362
+ Cache query result if caching is enabled.
363
+
364
+ Args:
365
+ query: SQL query string
366
+ params: Query parameters
367
+ result: Query result to cache
368
+ """
369
+ if self.query_cache:
370
+ query_hash = self._hash_query(query, params)
371
+ self.query_cache.put(query_hash, result)
372
+
373
+ def _record_query_metric(self, query_hash: str, query: str, execution_time: float,
374
+ thread_id: int, result_count: int, cache_hit: bool = False) -> None:
375
+ """Record query execution metrics."""
376
+ with self._lock:
377
+ metric = QueryMetrics(
378
+ query_hash=query_hash,
379
+ query_text=query,
380
+ execution_time=execution_time,
381
+ timestamp=datetime.now(),
382
+ thread_id=thread_id,
383
+ result_count=result_count,
384
+ cache_hit=cache_hit
385
+ )
386
+
387
+ self.query_metrics.append(metric)
388
+ self.query_stats[query_hash].append(execution_time)
389
+ self.hot_queries[query_hash] += 1
390
+
391
+ # Log slow queries
392
+ if execution_time > self.slow_query_threshold and not cache_hit:
393
+ self.logger.warning(
394
+ f"Slow query detected: {execution_time:.3f}s - {query[:100]}..."
395
+ )
396
+
397
+ def record_setting_access(self, setting_key: str) -> None:
398
+ """
399
+ Record access to a specific setting for hot data tracking.
400
+
401
+ Args:
402
+ setting_key: Setting key that was accessed
403
+ """
404
+ with self._lock:
405
+ self.hot_settings[setting_key] += 1
406
+
407
+ def get_performance_stats(self, window_minutes: int = 60) -> PerformanceStats:
408
+ """
409
+ Get aggregated performance statistics.
410
+
411
+ Args:
412
+ window_minutes: Time window for statistics in minutes
413
+
414
+ Returns:
415
+ PerformanceStats object with aggregated metrics
416
+ """
417
+ with self._lock:
418
+ cutoff_time = datetime.now() - timedelta(minutes=window_minutes)
419
+
420
+ # Filter metrics to time window
421
+ recent_metrics = [
422
+ m for m in self.query_metrics
423
+ if m.timestamp >= cutoff_time
424
+ ]
425
+
426
+ if not recent_metrics:
427
+ return PerformanceStats()
428
+
429
+ # Calculate statistics
430
+ execution_times = [m.execution_time for m in recent_metrics if not m.cache_hit]
431
+ cache_hits = sum(1 for m in recent_metrics if m.cache_hit)
432
+
433
+ stats = PerformanceStats(
434
+ total_queries=len(recent_metrics),
435
+ cache_hit_rate=(cache_hits / len(recent_metrics) * 100) if recent_metrics else 0,
436
+ queries_per_second=len(recent_metrics) / (window_minutes * 60),
437
+ memory_usage_mb=self._get_memory_usage_mb()
438
+ )
439
+
440
+ if execution_times:
441
+ stats.avg_execution_time = statistics.mean(execution_times)
442
+ stats.max_execution_time = max(execution_times)
443
+ stats.min_execution_time = min(execution_times)
444
+
445
+ # Get slow queries
446
+ stats.slow_queries = [
447
+ m for m in recent_metrics
448
+ if m.execution_time > self.slow_query_threshold and not m.cache_hit
449
+ ]
450
+
451
+ # Connection pool stats
452
+ if self.connection_pool:
453
+ pool_stats = self.connection_pool.get_stats()
454
+ stats.active_connections = pool_stats['active_connections']
455
+
456
+ return stats
457
+
458
+ def get_hot_settings(self, top_n: int = 10) -> List[Tuple[str, int]]:
459
+ """
460
+ Get most frequently accessed settings.
461
+
462
+ Args:
463
+ top_n: Number of top settings to return
464
+
465
+ Returns:
466
+ List of (setting_key, access_count) tuples
467
+ """
468
+ with self._lock:
469
+ return sorted(
470
+ self.hot_settings.items(),
471
+ key=lambda x: x[1],
472
+ reverse=True
473
+ )[:top_n]
474
+
475
+ def get_hot_queries(self, top_n: int = 10) -> List[Tuple[str, int, float]]:
476
+ """
477
+ Get most frequently executed queries with average execution time.
478
+
479
+ Args:
480
+ top_n: Number of top queries to return
481
+
482
+ Returns:
483
+ List of (query_hash, execution_count, avg_time) tuples
484
+ """
485
+ with self._lock:
486
+ hot_queries = []
487
+ for query_hash, count in self.hot_queries.items():
488
+ if query_hash in self.query_stats:
489
+ avg_time = statistics.mean(self.query_stats[query_hash])
490
+ hot_queries.append((query_hash, count, avg_time))
491
+
492
+ return sorted(hot_queries, key=lambda x: x[1], reverse=True)[:top_n]
493
+
494
+ def _get_memory_usage_mb(self) -> float:
495
+ """Get current memory usage in MB."""
496
+ try:
497
+ memory_info = self.process.memory_info()
498
+ memory_mb = memory_info.rss / 1024 / 1024
499
+ self.memory_samples.append(memory_mb)
500
+ return memory_mb
501
+ except Exception:
502
+ return 0.0
503
+
504
+ def get_memory_trend(self) -> Dict[str, float]:
505
+ """Get memory usage trend statistics."""
506
+ if not self.memory_samples:
507
+ return {'current': 0.0, 'average': 0.0, 'peak': 0.0}
508
+
509
+ return {
510
+ 'current': self.memory_samples[-1],
511
+ 'average': statistics.mean(self.memory_samples),
512
+ 'peak': max(self.memory_samples)
513
+ }
514
+
515
+ def optimize_indexes(self, connection_manager) -> List[str]:
516
+ """
517
+ Analyze query patterns and suggest index optimizations.
518
+
519
+ Args:
520
+ connection_manager: Database connection manager
521
+
522
+ Returns:
523
+ List of suggested index creation SQL statements
524
+ """
525
+ suggestions = []
526
+
527
+ # Analyze hot queries for index opportunities
528
+ hot_queries = self.get_hot_queries(20)
529
+
530
+ for query_hash, count, avg_time in hot_queries:
531
+ # Find the actual query
532
+ query_text = None
533
+ for metric in self.query_metrics:
534
+ if metric.query_hash == query_hash:
535
+ query_text = metric.query_text
536
+ break
537
+
538
+ if not query_text:
539
+ continue
540
+
541
+ # Analyze query for index opportunities
542
+ query_lower = query_text.lower()
543
+
544
+ # Tool settings queries
545
+ if 'tool_settings' in query_lower and 'where tool_name' in query_lower:
546
+ if count > 10 and avg_time > 0.01:
547
+ suggestions.append(
548
+ "CREATE INDEX IF NOT EXISTS idx_tool_settings_tool_name_path "
549
+ "ON tool_settings(tool_name, setting_path)"
550
+ )
551
+
552
+ # Core settings queries
553
+ if 'core_settings' in query_lower and 'where key' in query_lower:
554
+ if count > 10 and avg_time > 0.01:
555
+ suggestions.append(
556
+ "CREATE INDEX IF NOT EXISTS idx_core_settings_key "
557
+ "ON core_settings(key)"
558
+ )
559
+
560
+ # Performance settings queries
561
+ if 'performance_settings' in query_lower and 'where category' in query_lower:
562
+ if count > 5 and avg_time > 0.01:
563
+ suggestions.append(
564
+ "CREATE INDEX IF NOT EXISTS idx_performance_settings_category "
565
+ "ON performance_settings(category, setting_key)"
566
+ )
567
+
568
+ return list(set(suggestions)) # Remove duplicates
569
+
570
+ def clear_cache(self) -> None:
571
+ """Clear query cache."""
572
+ if self.query_cache:
573
+ self.query_cache.clear()
574
+
575
+ def reset_metrics(self) -> None:
576
+ """Reset all performance metrics."""
577
+ with self._lock:
578
+ self.query_metrics.clear()
579
+ self.query_stats.clear()
580
+ self.hot_settings.clear()
581
+ self.hot_queries.clear()
582
+ self.memory_samples.clear()
583
+ self.start_time = datetime.now()
584
+
585
+ def get_cache_stats(self) -> Dict[str, Any]:
586
+ """Get query cache statistics."""
587
+ if self.query_cache:
588
+ return self.query_cache.get_stats()
589
+ return {'enabled': False}
590
+
591
+ def export_metrics(self, filepath: str) -> bool:
592
+ """
593
+ Export performance metrics to JSON file.
594
+
595
+ Args:
596
+ filepath: Target file path
597
+
598
+ Returns:
599
+ True if export successful
600
+ """
601
+ try:
602
+ import json
603
+
604
+ with self._lock:
605
+ metrics_data = {
606
+ 'export_timestamp': datetime.now().isoformat(),
607
+ 'monitoring_duration_minutes': (datetime.now() - self.start_time).total_seconds() / 60,
608
+ 'performance_stats': self.get_performance_stats().__dict__,
609
+ 'hot_settings': dict(self.hot_settings),
610
+ 'hot_queries': dict(self.hot_queries),
611
+ 'cache_stats': self.get_cache_stats(),
612
+ 'memory_trend': self.get_memory_trend(),
613
+ 'recent_slow_queries': [
614
+ {
615
+ 'query': m.query_text[:200],
616
+ 'execution_time': m.execution_time,
617
+ 'timestamp': m.timestamp.isoformat()
618
+ }
619
+ for m in self.query_metrics
620
+ if m.execution_time > self.slow_query_threshold
621
+ ][-20:] # Last 20 slow queries
622
+ }
623
+
624
+ with open(filepath, 'w') as f:
625
+ json.dump(metrics_data, f, indent=2, default=str)
626
+
627
+ self.logger.info(f"Performance metrics exported to {filepath}")
628
+ return True
629
+
630
+ except Exception as e:
631
+ self.logger.error(f"Failed to export metrics: {e}")
632
+ return False
633
+
634
+
635
+ # Global performance monitor instance
636
+ _performance_monitor = None
637
+ _monitor_lock = threading.Lock()
638
+
639
+
640
+ def get_performance_monitor() -> PerformanceMonitor:
641
+ """Get the global performance monitor instance."""
642
+ global _performance_monitor
643
+
644
+ if _performance_monitor is None:
645
+ with _monitor_lock:
646
+ if _performance_monitor is None:
647
+ _performance_monitor = PerformanceMonitor()
648
+
649
+ return _performance_monitor
650
+
651
+
652
+ def initialize_performance_monitoring(enable_caching: bool = True,
653
+ cache_size: int = 1000,
654
+ slow_query_threshold: float = 0.1) -> PerformanceMonitor:
655
+ """
656
+ Initialize global performance monitoring.
657
+
658
+ Args:
659
+ enable_caching: Whether to enable query result caching
660
+ cache_size: Maximum number of cached queries
661
+ slow_query_threshold: Threshold in seconds for slow query detection
662
+
663
+ Returns:
664
+ PerformanceMonitor instance
665
+ """
666
+ global _performance_monitor
667
+
668
+ with _monitor_lock:
669
+ _performance_monitor = PerformanceMonitor(
670
+ enable_caching=enable_caching,
671
+ cache_size=cache_size,
672
+ slow_query_threshold=slow_query_threshold
673
+ )
674
+
675
675
  return _performance_monitor